text
stringlengths 2
100k
| meta
dict |
---|---|
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
"go_test",
)
go_library(
name = "go_default_library",
srcs = [
"dao.cache.go",
"dao.go",
"multi.go",
"none.go",
"single.go",
],
importpath = "go-common/app/tool/cache/testdata",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//library/stat/prom:go_default_library",
"//library/sync/errgroup:go_default_library",
"//library/sync/pipeline/fanout:go_default_library",
],
)
go_test(
name = "go_default_test",
srcs = [
"multi_test.go",
"none_test.go",
"single_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)
| {
"pile_set_name": "Github"
} |
package yaml
import (
"io"
)
// Set the reader error and return 0.
func yaml_parser_set_reader_error(parser *yaml_parser_t, problem string, offset int, value int) bool {
parser.error = yaml_READER_ERROR
parser.problem = problem
parser.problem_offset = offset
parser.problem_value = value
return false
}
// Byte order marks.
const (
bom_UTF8 = "\xef\xbb\xbf"
bom_UTF16LE = "\xff\xfe"
bom_UTF16BE = "\xfe\xff"
)
// Determine the input stream encoding by checking the BOM symbol. If no BOM is
// found, the UTF-8 encoding is assumed. Return 1 on success, 0 on failure.
func yaml_parser_determine_encoding(parser *yaml_parser_t) bool {
// Ensure that we had enough bytes in the raw buffer.
for !parser.eof && len(parser.raw_buffer)-parser.raw_buffer_pos < 3 {
if !yaml_parser_update_raw_buffer(parser) {
return false
}
}
// Determine the encoding.
buf := parser.raw_buffer
pos := parser.raw_buffer_pos
avail := len(buf) - pos
if avail >= 2 && buf[pos] == bom_UTF16LE[0] && buf[pos+1] == bom_UTF16LE[1] {
parser.encoding = yaml_UTF16LE_ENCODING
parser.raw_buffer_pos += 2
parser.offset += 2
} else if avail >= 2 && buf[pos] == bom_UTF16BE[0] && buf[pos+1] == bom_UTF16BE[1] {
parser.encoding = yaml_UTF16BE_ENCODING
parser.raw_buffer_pos += 2
parser.offset += 2
} else if avail >= 3 && buf[pos] == bom_UTF8[0] && buf[pos+1] == bom_UTF8[1] && buf[pos+2] == bom_UTF8[2] {
parser.encoding = yaml_UTF8_ENCODING
parser.raw_buffer_pos += 3
parser.offset += 3
} else {
parser.encoding = yaml_UTF8_ENCODING
}
return true
}
// Update the raw buffer.
func yaml_parser_update_raw_buffer(parser *yaml_parser_t) bool {
size_read := 0
// Return if the raw buffer is full.
if parser.raw_buffer_pos == 0 && len(parser.raw_buffer) == cap(parser.raw_buffer) {
return true
}
// Return on EOF.
if parser.eof {
return true
}
// Move the remaining bytes in the raw buffer to the beginning.
if parser.raw_buffer_pos > 0 && parser.raw_buffer_pos < len(parser.raw_buffer) {
copy(parser.raw_buffer, parser.raw_buffer[parser.raw_buffer_pos:])
}
parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)-parser.raw_buffer_pos]
parser.raw_buffer_pos = 0
// Call the read handler to fill the buffer.
size_read, err := parser.read_handler(parser, parser.raw_buffer[len(parser.raw_buffer):cap(parser.raw_buffer)])
parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)+size_read]
if err == io.EOF {
parser.eof = true
} else if err != nil {
return yaml_parser_set_reader_error(parser, "input error: "+err.Error(), parser.offset, -1)
}
return true
}
// Ensure that the buffer contains at least `length` characters.
// Return true on success, false on failure.
//
// The length is supposed to be significantly less that the buffer size.
func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool {
if parser.read_handler == nil {
panic("read handler must be set")
}
// [Go] This function was changed to guarantee the requested length size at EOF.
// The fact we need to do this is pretty awful, but the description above implies
// for that to be the case, and there are tests
// If the EOF flag is set and the raw buffer is empty, do nothing.
if parser.eof && parser.raw_buffer_pos == len(parser.raw_buffer) {
// [Go] ACTUALLY! Read the documentation of this function above.
// This is just broken. To return true, we need to have the
// given length in the buffer. Not doing that means every single
// check that calls this function to make sure the buffer has a
// given length is Go) panicking; or C) accessing invalid memory.
//return true
}
// Return if the buffer contains enough characters.
if parser.unread >= length {
return true
}
// Determine the input encoding if it is not known yet.
if parser.encoding == yaml_ANY_ENCODING {
if !yaml_parser_determine_encoding(parser) {
return false
}
}
// Move the unread characters to the beginning of the buffer.
buffer_len := len(parser.buffer)
if parser.buffer_pos > 0 && parser.buffer_pos < buffer_len {
copy(parser.buffer, parser.buffer[parser.buffer_pos:])
buffer_len -= parser.buffer_pos
parser.buffer_pos = 0
} else if parser.buffer_pos == buffer_len {
buffer_len = 0
parser.buffer_pos = 0
}
// Open the whole buffer for writing, and cut it before returning.
parser.buffer = parser.buffer[:cap(parser.buffer)]
// Fill the buffer until it has enough characters.
first := true
for parser.unread < length {
// Fill the raw buffer if necessary.
if !first || parser.raw_buffer_pos == len(parser.raw_buffer) {
if !yaml_parser_update_raw_buffer(parser) {
parser.buffer = parser.buffer[:buffer_len]
return false
}
}
first = false
// Decode the raw buffer.
inner:
for parser.raw_buffer_pos != len(parser.raw_buffer) {
var value rune
var width int
raw_unread := len(parser.raw_buffer) - parser.raw_buffer_pos
// Decode the next character.
switch parser.encoding {
case yaml_UTF8_ENCODING:
// Decode a UTF-8 character. Check RFC 3629
// (http://www.ietf.org/rfc/rfc3629.txt) for more details.
//
// The following table (taken from the RFC) is used for
// decoding.
//
// Char. number range | UTF-8 octet sequence
// (hexadecimal) | (binary)
// --------------------+------------------------------------
// 0000 0000-0000 007F | 0xxxxxxx
// 0000 0080-0000 07FF | 110xxxxx 10xxxxxx
// 0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx
// 0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
//
// Additionally, the characters in the range 0xD800-0xDFFF
// are prohibited as they are reserved for use with UTF-16
// surrogate pairs.
// Determine the length of the UTF-8 sequence.
octet := parser.raw_buffer[parser.raw_buffer_pos]
switch {
case octet&0x80 == 0x00:
width = 1
case octet&0xE0 == 0xC0:
width = 2
case octet&0xF0 == 0xE0:
width = 3
case octet&0xF8 == 0xF0:
width = 4
default:
// The leading octet is invalid.
return yaml_parser_set_reader_error(parser,
"invalid leading UTF-8 octet",
parser.offset, int(octet))
}
// Check if the raw buffer contains an incomplete character.
if width > raw_unread {
if parser.eof {
return yaml_parser_set_reader_error(parser,
"incomplete UTF-8 octet sequence",
parser.offset, -1)
}
break inner
}
// Decode the leading octet.
switch {
case octet&0x80 == 0x00:
value = rune(octet & 0x7F)
case octet&0xE0 == 0xC0:
value = rune(octet & 0x1F)
case octet&0xF0 == 0xE0:
value = rune(octet & 0x0F)
case octet&0xF8 == 0xF0:
value = rune(octet & 0x07)
default:
value = 0
}
// Check and decode the trailing octets.
for k := 1; k < width; k++ {
octet = parser.raw_buffer[parser.raw_buffer_pos+k]
// Check if the octet is valid.
if (octet & 0xC0) != 0x80 {
return yaml_parser_set_reader_error(parser,
"invalid trailing UTF-8 octet",
parser.offset+k, int(octet))
}
// Decode the octet.
value = (value << 6) + rune(octet&0x3F)
}
// Check the length of the sequence against the value.
switch {
case width == 1:
case width == 2 && value >= 0x80:
case width == 3 && value >= 0x800:
case width == 4 && value >= 0x10000:
default:
return yaml_parser_set_reader_error(parser,
"invalid length of a UTF-8 sequence",
parser.offset, -1)
}
// Check the range of the value.
if value >= 0xD800 && value <= 0xDFFF || value > 0x10FFFF {
return yaml_parser_set_reader_error(parser,
"invalid Unicode character",
parser.offset, int(value))
}
case yaml_UTF16LE_ENCODING, yaml_UTF16BE_ENCODING:
var low, high int
if parser.encoding == yaml_UTF16LE_ENCODING {
low, high = 0, 1
} else {
low, high = 1, 0
}
// The UTF-16 encoding is not as simple as one might
// naively think. Check RFC 2781
// (http://www.ietf.org/rfc/rfc2781.txt).
//
// Normally, two subsequent bytes describe a Unicode
// character. However a special technique (called a
// surrogate pair) is used for specifying character
// values larger than 0xFFFF.
//
// A surrogate pair consists of two pseudo-characters:
// high surrogate area (0xD800-0xDBFF)
// low surrogate area (0xDC00-0xDFFF)
//
// The following formulas are used for decoding
// and encoding characters using surrogate pairs:
//
// U = U' + 0x10000 (0x01 00 00 <= U <= 0x10 FF FF)
// U' = yyyyyyyyyyxxxxxxxxxx (0 <= U' <= 0x0F FF FF)
// W1 = 110110yyyyyyyyyy
// W2 = 110111xxxxxxxxxx
//
// where U is the character value, W1 is the high surrogate
// area, W2 is the low surrogate area.
// Check for incomplete UTF-16 character.
if raw_unread < 2 {
if parser.eof {
return yaml_parser_set_reader_error(parser,
"incomplete UTF-16 character",
parser.offset, -1)
}
break inner
}
// Get the character.
value = rune(parser.raw_buffer[parser.raw_buffer_pos+low]) +
(rune(parser.raw_buffer[parser.raw_buffer_pos+high]) << 8)
// Check for unexpected low surrogate area.
if value&0xFC00 == 0xDC00 {
return yaml_parser_set_reader_error(parser,
"unexpected low surrogate area",
parser.offset, int(value))
}
// Check for a high surrogate area.
if value&0xFC00 == 0xD800 {
width = 4
// Check for incomplete surrogate pair.
if raw_unread < 4 {
if parser.eof {
return yaml_parser_set_reader_error(parser,
"incomplete UTF-16 surrogate pair",
parser.offset, -1)
}
break inner
}
// Get the next character.
value2 := rune(parser.raw_buffer[parser.raw_buffer_pos+low+2]) +
(rune(parser.raw_buffer[parser.raw_buffer_pos+high+2]) << 8)
// Check for a low surrogate area.
if value2&0xFC00 != 0xDC00 {
return yaml_parser_set_reader_error(parser,
"expected low surrogate area",
parser.offset+2, int(value2))
}
// Generate the value of the surrogate pair.
value = 0x10000 + ((value & 0x3FF) << 10) + (value2 & 0x3FF)
} else {
width = 2
}
default:
panic("impossible")
}
// Check if the character is in the allowed range:
// #x9 | #xA | #xD | [#x20-#x7E] (8 bit)
// | #x85 | [#xA0-#xD7FF] | [#xE000-#xFFFD] (16 bit)
// | [#x10000-#x10FFFF] (32 bit)
switch {
case value == 0x09:
case value == 0x0A:
case value == 0x0D:
case value >= 0x20 && value <= 0x7E:
case value == 0x85:
case value >= 0xA0 && value <= 0xD7FF:
case value >= 0xE000 && value <= 0xFFFD:
case value >= 0x10000 && value <= 0x10FFFF:
default:
return yaml_parser_set_reader_error(parser,
"control characters are not allowed",
parser.offset, int(value))
}
// Move the raw pointers.
parser.raw_buffer_pos += width
parser.offset += width
// Finally put the character into the buffer.
if value <= 0x7F {
// 0000 0000-0000 007F . 0xxxxxxx
parser.buffer[buffer_len+0] = byte(value)
buffer_len += 1
} else if value <= 0x7FF {
// 0000 0080-0000 07FF . 110xxxxx 10xxxxxx
parser.buffer[buffer_len+0] = byte(0xC0 + (value >> 6))
parser.buffer[buffer_len+1] = byte(0x80 + (value & 0x3F))
buffer_len += 2
} else if value <= 0xFFFF {
// 0000 0800-0000 FFFF . 1110xxxx 10xxxxxx 10xxxxxx
parser.buffer[buffer_len+0] = byte(0xE0 + (value >> 12))
parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 6) & 0x3F))
parser.buffer[buffer_len+2] = byte(0x80 + (value & 0x3F))
buffer_len += 3
} else {
// 0001 0000-0010 FFFF . 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
parser.buffer[buffer_len+0] = byte(0xF0 + (value >> 18))
parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 12) & 0x3F))
parser.buffer[buffer_len+2] = byte(0x80 + ((value >> 6) & 0x3F))
parser.buffer[buffer_len+3] = byte(0x80 + (value & 0x3F))
buffer_len += 4
}
parser.unread++
}
// On EOF, put NUL into the buffer and return.
if parser.eof {
parser.buffer[buffer_len] = 0
buffer_len++
parser.unread++
break
}
}
// [Go] Read the documentation of this function above. To return true,
// we need to have the given length in the buffer. Not doing that means
// every single check that calls this function to make sure the buffer
// has a given length is Go) panicking; or C) accessing invalid memory.
// This happens here due to the EOF above breaking early.
for buffer_len < length {
parser.buffer[buffer_len] = 0
buffer_len++
}
parser.buffer = parser.buffer[:buffer_len]
return true
}
| {
"pile_set_name": "Github"
} |
<?php
return [
'user_validate' => '自定义验证'
]; | {
"pile_set_name": "Github"
} |
<server>
<featureManager>
<feature>jaxws-2.2</feature>
<feature>servlet-3.1</feature>
<feature>jdbc-4.0</feature>
<feature>mpMetrics-3.0</feature>
<feature>monitor-1.0</feature>
</featureManager>
<include location="../fatTestPorts.xml"/>
<include location="serverJDBCConfig.xml"/>
<include location="serverConfig.xml"/>
</server>
| {
"pile_set_name": "Github"
} |
[
{
"category": "``neptune``",
"description": "Update neptune client to latest version",
"type": "api-change"
},
{
"category": "``elbv2``",
"description": "Update elbv2 client to latest version",
"type": "api-change"
}
] | {
"pile_set_name": "Github"
} |
# Text v2 plugin HTTP API
The text plugin name is `text_v2`, so all its routes are under
`/data/plugin/text_v2`.
## `/data/plugin/text_v2/tags`
Retrieves an index of tags containint text data.
Returns a dictionary mapping from `runName` (quoted string) to an
array of `tagName`. Here is an example:
```json
{
"text_demo_run": [
"simple_example/greeting",
"markdown_table/chocolate_study",
"higher_order_tensors/multiplication_table"
]
}
```
Runs without any text tags are omitted from the result.
## `/data/plugin/text_v2/text?run=foo&tag=bar`
Returns an array of text events for the given run and tag. Each event is
a dictionary with members `wall_time`, `step`, `string_array`, `original_shape`, and `truncated`,
where `wall_time` is a floating-point number of seconds since epoch, `step` is
an integer step counter, `string_array` is an n-dimensional array, `original_shape` is the
size of each dimension in the tensor provided to the text-summarizer (Note this will
be different from the shape of `string_array` if truncation has occured), and
`truncated` is a boolean indicating whether the dimensionality of the array
was reduced (or truncated).
Example:
```json
{
"original_shape": [2, 2],
"step": 1,
"string_array": [["×", "**0**"], ["**0**", "0"]],
"wall_time": 1591289315.824522,
"truncated": false
}
```
| {
"pile_set_name": "Github"
} |
// <copyright file="PromptDialogViewModel.cs" company="Automate The Planet Ltd.">
// Copyright 2016 Automate The Planet Ltd.
// Licensed under the Apache License, Version 2.0 (the "License");
// You may not use this file except in compliance with the License.
// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
// <author>Anton Angelov</author>
// <site>http://automatetheplanet.com/</site>
namespace TestCaseManagerCore.ViewModels
{
using System;
using System.Linq;
using AAngelov.Utilities.UI.Core;
using AAngelov.Utilities.UI.Managers;
/// <summary>
/// Holds PromptDialogView Properties
/// </summary>
public class PromptDialogViewModel : BaseNotifyPropertyChanged
{
/// <summary>
/// The content
/// </summary>
private string content;
/// <summary>
/// The is canceled
/// </summary>
private bool isCanceled;
/// <summary>
/// Gets or sets the Content.
/// </summary>
/// <value>
/// The Content.
/// </value>
public string Content
{
get
{
if (this.content == null)
{
this.content = UIRegistryManager.Instance.GetContentPromtDialog();
}
return this.content;
}
set
{
this.content = value;
UIRegistryManager.Instance.WriteTitleTitlePromtDialog(this.content);
this.NotifyPropertyChanged();
}
}
/// <summary>
/// Gets or sets a value indicating whether [is canceled].
/// </summary>
/// <value>
/// <c>true</c> if [is canceled]; otherwise, <c>false</c>.
/// </value>
public bool IsCanceled
{
get
{
return this.isCanceled;
}
set
{
this.isCanceled = value;
UIRegistryManager.Instance.WriteIsCanceledTitlePromtDialog(this.isCanceled);
this.NotifyPropertyChanged();
}
}
}
} | {
"pile_set_name": "Github"
} |
/*
* jdatadst.c
*
* Copyright (C) 1994-1996, Thomas G. Lane.
* This file is part of the Independent JPEG Group's software.
* For conditions of distribution and use, see the accompanying README file.
*
* This file contains compression data destination routines for the case of
* emitting JPEG data to a file (or any stdio stream). While these routines
* are sufficient for most applications, some will want to use a different
* destination manager.
* IMPORTANT: we assume that fwrite() will correctly transcribe an array of
* JOCTETs into 8-bit-wide elements on external storage. If char is wider
* than 8 bits on your machine, you may need to do some tweaking.
*/
/* this is not a core library module, so it doesn't define JPEG_INTERNALS */
#include "jinclude.h"
#include "jpeglib.h"
#include "jerror.h"
/* Expanded data destination object for stdio output */
typedef struct {
struct jpeg_destination_mgr pub; /* public fields */
FILE * outfile; /* target stream */
JOCTET * buffer; /* start of buffer */
} my_destination_mgr;
typedef my_destination_mgr * my_dest_ptr;
#define OUTPUT_BUF_SIZE 4096 /* choose an efficiently fwrite'able size */
/*
* Initialize destination --- called by jpeg_start_compress
* before any data is actually written.
*/
METHODDEF(void)
init_destination (j_compress_ptr cinfo)
{
my_dest_ptr dest = (my_dest_ptr) cinfo->dest;
/* Allocate the output buffer --- it will be released when done with image */
dest->buffer = (JOCTET *)
(*cinfo->mem->alloc_small) ((j_common_ptr) cinfo, JPOOL_IMAGE,
OUTPUT_BUF_SIZE * SIZEOF(JOCTET));
dest->pub.next_output_byte = dest->buffer;
dest->pub.free_in_buffer = OUTPUT_BUF_SIZE;
}
/*
* Empty the output buffer --- called whenever buffer fills up.
*
* In typical applications, this should write the entire output buffer
* (ignoring the current state of next_output_byte & free_in_buffer),
* reset the pointer & count to the start of the buffer, and return TRUE
* indicating that the buffer has been dumped.
*
* In applications that need to be able to suspend compression due to output
* overrun, a FALSE return indicates that the buffer cannot be emptied now.
* In this situation, the compressor will return to its caller (possibly with
* an indication that it has not accepted all the supplied scanlines). The
* application should resume compression after it has made more room in the
* output buffer. Note that there are substantial restrictions on the use of
* suspension --- see the documentation.
*
* When suspending, the compressor will back up to a convenient restart point
* (typically the start of the current MCU). next_output_byte & free_in_buffer
* indicate where the restart point will be if the current call returns FALSE.
* Data beyond this point will be regenerated after resumption, so do not
* write it out when emptying the buffer externally.
*/
METHODDEF(boolean)
empty_output_buffer (j_compress_ptr cinfo)
{
my_dest_ptr dest = (my_dest_ptr) cinfo->dest;
if (JFWRITE(dest->outfile, dest->buffer, OUTPUT_BUF_SIZE) !=
(size_t) OUTPUT_BUF_SIZE)
ERREXIT(cinfo, JERR_FILE_WRITE);
dest->pub.next_output_byte = dest->buffer;
dest->pub.free_in_buffer = OUTPUT_BUF_SIZE;
return TRUE;
}
/*
* Terminate destination --- called by jpeg_finish_compress
* after all data has been written. Usually needs to flush buffer.
*
* NB: *not* called by jpeg_abort or jpeg_destroy; surrounding
* application must deal with any cleanup that should happen even
* for error exit.
*/
METHODDEF(void)
term_destination (j_compress_ptr cinfo)
{
my_dest_ptr dest = (my_dest_ptr) cinfo->dest;
size_t datacount = OUTPUT_BUF_SIZE - dest->pub.free_in_buffer;
/* Write any data remaining in the buffer */
if (datacount > 0) {
if (JFWRITE(dest->outfile, dest->buffer, datacount) != datacount)
ERREXIT(cinfo, JERR_FILE_WRITE);
}
fflush(dest->outfile);
/* Make sure we wrote the output file OK */
if (ferror(dest->outfile))
ERREXIT(cinfo, JERR_FILE_WRITE);
}
/*
* Prepare for output to a stdio stream.
* The caller must have already opened the stream, and is responsible
* for closing it after finishing compression.
*/
GLOBAL(void)
jpeg_stdio_dest (j_compress_ptr cinfo, FILE * outfile)
{
my_dest_ptr dest;
/* The destination object is made permanent so that multiple JPEG images
* can be written to the same file without re-executing jpeg_stdio_dest.
* This makes it dangerous to use this manager and a different destination
* manager serially with the same JPEG object, because their private object
* sizes may be different. Caveat programmer.
*/
if (cinfo->dest == NULL) { /* first time for this JPEG object? */
cinfo->dest = (struct jpeg_destination_mgr *)
(*cinfo->mem->alloc_small) ((j_common_ptr) cinfo, JPOOL_PERMANENT,
SIZEOF(my_destination_mgr));
}
dest = (my_dest_ptr) cinfo->dest;
dest->pub.init_destination = init_destination;
dest->pub.empty_output_buffer = empty_output_buffer;
dest->pub.term_destination = term_destination;
dest->outfile = outfile;
}
| {
"pile_set_name": "Github"
} |
#include "rk_driver.h"
#include "rk_router.h"
/* ROUTING ROUTINES
* Redirection and routing routines, including
* multicast and broadcast lists. Lists are
* protected with Mutex. Enables complete
* compromise of any firewall.
*/
/* route a packet to another host using
* encrypted encapsulation
*/
void RedirectPacketTo(){
DbgPrint(("RedirectPacket called\n"));
}
/* get an encapsulated packet & send over interface */
void OnRoutedPacket(){
DbgPrint(("RoutedPacket called\n"));
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2009, Ingo Weinhold, [email protected].
* Distributed under the terms of the MIT License.
*/
#include "StackFrame.h"
#include <new>
#include "CpuState.h"
#include "FunctionInstance.h"
#include "Image.h"
#include "StackFrameDebugInfo.h"
#include "StackFrameValueInfos.h"
#include "StackFrameValues.h"
#include "Variable.h"
// #pragma mark - StackFrame
StackFrame::StackFrame(stack_frame_type type, CpuState* cpuState,
target_addr_t frameAddress, target_addr_t instructionPointer,
StackFrameDebugInfo* debugInfo)
:
fType(type),
fCpuState(cpuState),
fPreviousCpuState(NULL),
fFrameAddress(frameAddress),
fInstructionPointer(instructionPointer),
fReturnAddress(0),
fDebugInfo(debugInfo),
fImage(NULL),
fFunction(NULL),
fValues(NULL),
fValueInfos(NULL)
{
fCpuState->AcquireReference();
fDebugInfo->AcquireReference();
}
StackFrame::~StackFrame()
{
for (int32 i = 0; Variable* variable = fParameters.ItemAt(i); i++)
variable->ReleaseReference();
for (int32 i = 0; Variable* variable = fLocalVariables.ItemAt(i); i++)
variable->ReleaseReference();
SetImage(NULL);
SetFunction(NULL);
SetPreviousCpuState(NULL);
fDebugInfo->ReleaseReference();
fCpuState->ReleaseReference();
if (fValues != NULL)
fValues->ReleaseReference();
if (fValueInfos != NULL)
fValueInfos->ReleaseReference();
}
status_t
StackFrame::Init()
{
// create values map
fValues = new(std::nothrow) StackFrameValues;
if (fValues == NULL)
return B_NO_MEMORY;
status_t error = fValues->Init();
if (error != B_OK)
return error;
// create value infos map
fValueInfos = new(std::nothrow) StackFrameValueInfos;
if (fValueInfos == NULL)
return B_NO_MEMORY;
error = fValueInfos->Init();
if (error != B_OK)
return error;
return B_OK;
}
void
StackFrame::SetPreviousCpuState(CpuState* state)
{
if (fPreviousCpuState != NULL)
fPreviousCpuState->ReleaseReference();
fPreviousCpuState = state;
if (fPreviousCpuState != NULL)
fPreviousCpuState->AcquireReference();
}
void
StackFrame::SetReturnAddress(target_addr_t address)
{
fReturnAddress = address;
}
void
StackFrame::SetImage(Image* image)
{
if (fImage != NULL)
fImage->ReleaseReference();
fImage = image;
if (fImage != NULL)
fImage->AcquireReference();
}
void
StackFrame::SetFunction(FunctionInstance* function)
{
if (fFunction != NULL)
fFunction->ReleaseReference();
fFunction = function;
if (fFunction != NULL)
fFunction->AcquireReference();
}
int32
StackFrame::CountParameters() const
{
return fParameters.CountItems();
}
Variable*
StackFrame::ParameterAt(int32 index) const
{
return fParameters.ItemAt(index);
}
bool
StackFrame::AddParameter(Variable* parameter)
{
if (!fParameters.AddItem(parameter))
return false;
parameter->AcquireReference();
return true;
}
int32
StackFrame::CountLocalVariables() const
{
return fLocalVariables.CountItems();
}
Variable*
StackFrame::LocalVariableAt(int32 index) const
{
return fLocalVariables.ItemAt(index);
}
bool
StackFrame::AddLocalVariable(Variable* variable)
{
if (!fLocalVariables.AddItem(variable))
return false;
variable->AcquireReference();
return true;
}
void
StackFrame::AddListener(Listener* listener)
{
fListeners.Add(listener);
}
void
StackFrame::RemoveListener(Listener* listener)
{
fListeners.Remove(listener);
}
void
StackFrame::NotifyValueRetrieved(Variable* variable, TypeComponentPath* path)
{
for (ListenerList::Iterator it = fListeners.GetIterator();
Listener* listener = it.Next();) {
listener->StackFrameValueRetrieved(this, variable, path);
}
}
// #pragma mark - StackFrame
StackFrame::Listener::~Listener()
{
}
void
StackFrame::Listener::StackFrameValueRetrieved(StackFrame* stackFrame,
Variable* variable, TypeComponentPath* path)
{
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<script src="ContentScripts.js"></script>
<head>
<title>Component Creator Start</title>
<link rel="stylesheet" type="text/css" href="DWSIMstyle.css">
</head>
<body onload="LoadMenue()">
<!-- ================================================================================== -->
<!-- ==================== Begin of content ======================================= -->
<!-- ================================================================================== -->
<H2>UNIFAC Interaction Parameter Regression </H2>
<a href="UR_Case_Definition_1.htm" target="_parent"><img src="pictures/arrow-up.gif"></a>
<a href="UR_Data_Regression_3.htm" target="_parent"><img src="pictures/arrow-down.gif"></a>
<hr>
<H3>Model Definition</H3>
This page is used for defining setting up the regression case.
To set up the regression you execute the following steps:
<ol>
<li><b>Model:</b> Select the required model of your choice.</li>
<li><b>Component 1/2:</b> As soon as you selected the model you will find drop down lists for selecting your components.
Only components which own group definitions for the selected model will be listed here.</li>
<li><b>Show parameters:</b> As soon as both components are selected a button will be shown to display
existing group interaction parameters. Missing group interaction parameters are displayed with "X" in a yellow
background field. In the picture this is the case for interactions groups "Aromatic Nitrile" and "H2O"</li>
<li><b>Parameter Selection:</b> Double click on one of the yellow fields. This will select these two groups
for later regression on experimental data. <br>
If you select an existing parameter set for regression this will
replace the original parameters of your model on saving to database and is not reccomendet! </li>
<li><b>Groups & Parameters: </b>Selected main groups with their names together with group-ID
and their mutual interaction parameters are displayed in the grey box.</li>
</ol>
<img src="Pictures/UR_02.png" width="70%" height="70%" border="0"><br><br>
</BODY>
</HTML> | {
"pile_set_name": "Github"
} |
# NOTE: Assertions have been autogenerated by utils/update_mca_test_checks.py
# RUN: llvm-mca -mtriple=x86_64-unknown-unknown -mcpu=btver2 -resource-pressure=false -retire-stats -iterations=1 < %s | FileCheck %s
vsqrtps %xmm0, %xmm2
vaddps %xmm0, %xmm1, %xmm2
vaddps %xmm0, %xmm1, %xmm2
vaddps %xmm0, %xmm1, %xmm2
vaddps %xmm0, %xmm1, %xmm2
vaddps %xmm0, %xmm1, %xmm2
vaddps %xmm0, %xmm1, %xmm2
vaddps %xmm0, %xmm1, %xmm2
vaddps %xmm0, %xmm1, %xmm2
vaddps %xmm0, %xmm1, %xmm2
vaddps %xmm0, %xmm1, %xmm2
vaddps %xmm0, %xmm1, %xmm2
vaddps %xmm0, %xmm1, %xmm2
vaddps %xmm0, %xmm1, %xmm2
vaddps %xmm0, %xmm1, %xmm2
vaddps %xmm0, %xmm1, %xmm2
# CHECK: Iterations: 1
# CHECK-NEXT: Instructions: 16
# CHECK-NEXT: Total Cycles: 31
# CHECK-NEXT: Total uOps: 16
# CHECK: Dispatch Width: 2
# CHECK-NEXT: uOps Per Cycle: 0.52
# CHECK-NEXT: IPC: 0.52
# CHECK-NEXT: Block RThroughput: 21.0
# CHECK: Instruction Info:
# CHECK-NEXT: [1]: #uOps
# CHECK-NEXT: [2]: Latency
# CHECK-NEXT: [3]: RThroughput
# CHECK-NEXT: [4]: MayLoad
# CHECK-NEXT: [5]: MayStore
# CHECK-NEXT: [6]: HasSideEffects (U)
# CHECK: [1] [2] [3] [4] [5] [6] Instructions:
# CHECK-NEXT: 1 21 21.00 vsqrtps %xmm0, %xmm2
# CHECK-NEXT: 1 3 1.00 vaddps %xmm0, %xmm1, %xmm2
# CHECK-NEXT: 1 3 1.00 vaddps %xmm0, %xmm1, %xmm2
# CHECK-NEXT: 1 3 1.00 vaddps %xmm0, %xmm1, %xmm2
# CHECK-NEXT: 1 3 1.00 vaddps %xmm0, %xmm1, %xmm2
# CHECK-NEXT: 1 3 1.00 vaddps %xmm0, %xmm1, %xmm2
# CHECK-NEXT: 1 3 1.00 vaddps %xmm0, %xmm1, %xmm2
# CHECK-NEXT: 1 3 1.00 vaddps %xmm0, %xmm1, %xmm2
# CHECK-NEXT: 1 3 1.00 vaddps %xmm0, %xmm1, %xmm2
# CHECK-NEXT: 1 3 1.00 vaddps %xmm0, %xmm1, %xmm2
# CHECK-NEXT: 1 3 1.00 vaddps %xmm0, %xmm1, %xmm2
# CHECK-NEXT: 1 3 1.00 vaddps %xmm0, %xmm1, %xmm2
# CHECK-NEXT: 1 3 1.00 vaddps %xmm0, %xmm1, %xmm2
# CHECK-NEXT: 1 3 1.00 vaddps %xmm0, %xmm1, %xmm2
# CHECK-NEXT: 1 3 1.00 vaddps %xmm0, %xmm1, %xmm2
# CHECK-NEXT: 1 3 1.00 vaddps %xmm0, %xmm1, %xmm2
# CHECK: Retire Control Unit - number of cycles where we saw N instructions retired:
# CHECK-NEXT: [# retired], [# cycles]
# CHECK-NEXT: 0, 23 (74.2%)
# CHECK-NEXT: 2, 8 (25.8%)
# CHECK: Total ROB Entries: 64
# CHECK-NEXT: Max Used ROB Entries: 16 ( 25.0% )
# CHECK-NEXT: Average Used ROB Entries per cy: 11 ( 17.2% )
| {
"pile_set_name": "Github"
} |
<component name="libraryTable">
<library name="Maven: io.netty:netty-common:4.1.33.Final">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/io/netty/netty-common/4.1.33.Final/netty-common-4.1.33.Final.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/io/netty/netty-common/4.1.33.Final/netty-common-4.1.33.Final-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/io/netty/netty-common/4.1.33.Final/netty-common-4.1.33.Final-sources.jar!/" />
</SOURCES>
</library>
</component> | {
"pile_set_name": "Github"
} |
//
// SUAppcastItem.h
// Sparkle
//
// Created by Andy Matuschak on 3/12/06.
// Copyright 2006 Andy Matuschak. All rights reserved.
//
#ifndef SUAPPCASTITEM_H
#define SUAPPCASTITEM_H
#if __has_feature(modules)
@import Foundation;
#else
#import <Foundation/Foundation.h>
#endif
#import "SUExport.h"
@class SUSignatures;
SU_EXPORT @interface SUAppcastItem : NSObject
@property (copy, readonly) NSString *title;
@property (copy, readonly) NSString *dateString;
@property (copy, readonly) NSDate *date;
@property (copy, readonly) NSString *itemDescription;
@property (strong, readonly) NSURL *releaseNotesURL;
@property (strong, readonly) SUSignatures *signatures;
@property (copy, readonly) NSString *minimumSystemVersion;
@property (copy, readonly) NSString *maximumSystemVersion;
@property (strong, readonly) NSURL *fileURL;
@property (nonatomic, readonly) uint64_t contentLength;
@property (copy, readonly) NSString *versionString;
@property (copy, readonly) NSString *osString;
@property (copy, readonly) NSString *displayVersionString;
@property (copy, readonly) NSDictionary *deltaUpdates;
@property (strong, readonly) NSURL *infoURL;
@property (copy, readonly) NSNumber* phasedRolloutInterval;
// Initializes with data from a dictionary provided by the RSS class.
- (instancetype)initWithDictionary:(NSDictionary *)dict;
- (instancetype)initWithDictionary:(NSDictionary *)dict failureReason:(NSString **)error;
@property (getter=isDeltaUpdate, readonly) BOOL deltaUpdate;
@property (getter=isCriticalUpdate, readonly) BOOL criticalUpdate;
@property (getter=isMacOsUpdate, readonly) BOOL macOsUpdate;
@property (getter=isInformationOnlyUpdate, readonly) BOOL informationOnlyUpdate;
// Returns the dictionary provided in initWithDictionary; this might be useful later for extensions.
@property (readonly, copy) NSDictionary *propertiesDictionary;
- (NSURL *)infoURL;
@end
#endif
| {
"pile_set_name": "Github"
} |
stderr of test 'mergepart11` in directory 'sql/test/merge-partitions` itself:
# 09:47:13 >
# 09:47:13 > "mserver5" "--debug=10" "--set" "gdk_nr_threads=0" "--set" "mapi_open=true" "--set" "mapi_port=39288" "--set" "mapi_usock=/var/tmp/mtest-21121/.s.monetdb.39288" "--set" "monet_prompt=" "--forcemito" "--dbpath=/home/ferreira/repositories/MonetDB-merge-partitions/BUILD/var/MonetDB/mTests_sql_test_merge-partitions" "--set" "embedded_c=true"
# 09:47:13 >
# builtin opt gdk_dbpath = /home/ferreira/repositories/MonetDB-merge-partitions/BUILD/var/monetdb5/dbfarm/demo
# builtin opt gdk_debug = 0
# builtin opt gdk_vmtrim = no
# builtin opt monet_prompt = >
# builtin opt monet_daemon = no
# builtin opt mapi_port = 50000
# builtin opt mapi_open = false
# builtin opt mapi_autosense = false
# builtin opt sql_optimizer = default_pipe
# builtin opt sql_debug = 0
# cmdline opt gdk_nr_threads = 0
# cmdline opt mapi_open = true
# cmdline opt mapi_port = 39288
# cmdline opt mapi_usock = /var/tmp/mtest-21121/.s.monetdb.39288
# cmdline opt monet_prompt =
# cmdline opt gdk_dbpath = /home/ferreira/repositories/MonetDB-merge-partitions/BUILD/var/MonetDB/mTests_sql_test_merge-partitions
# cmdline opt embedded_c = true
# cmdline opt gdk_debug = 553648138
# 09:47:13 >
# 09:47:13 > "mclient" "-lsql" "-ftest" "-tnone" "-Eutf-8" "-i" "-e" "--host=/var/tmp/mtest-21121" "--port=39288"
# 09:47:13 >
MAPI = (monetdb) /var/tmp/mtest-21121/.s.monetdb.39288
QUERY = UPDATE moveaccrosspartitions SET a = a + 1 WHERE a % 50 = 0;
ERROR = !UPDATE: Update on the partitioned column is not possible at the moment
CODE = 42000
MAPI = (monetdb) /var/tmp/mtest-21121/.s.monetdb.39288
QUERY = UPDATE moveaccrosspartitions SET a = a - 50, b = 'p' || b || 's' WHERE a % 60 = 0;
ERROR = !UPDATE: Update on the partitioned column is not possible at the moment
CODE = 42000
MAPI = (monetdb) /var/tmp/mtest-21121/.s.monetdb.39288
QUERY = UPDATE moveaccrosspartitions SET a = a - 60 WHERE a % 10 = 0; --error
ERROR = !UPDATE: Update on the partitioned column is not possible at the moment
CODE = 42000
MAPI = (monetdb) /var/tmp/mtest-21121/.s.monetdb.39288
QUERY = UPDATE moveaccrosspartitions SET a = a + 100, b = 'moved' WHERE a % 10 = 0 AND a < 100;
ERROR = !UPDATE: Update on the partitioned column is not possible at the moment
CODE = 42000
# 09:47:13 >
# 09:47:13 > "Done."
# 09:47:13 >
| {
"pile_set_name": "Github"
} |
/**
* Copyright (c) 2016 DeepCortex GmbH <[email protected]>
* Authors:
* - Paul Asmuth <[email protected]>
*
* This program is free software: you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License ("the license") as
* published by the Free Software Foundation, either version 3 of the License,
* or any later version.
*
* In accordance with Section 7(e) of the license, the licensing of the Program
* under the license does not imply a trademark license. Therefore any rights,
* title and interest in our trademarks remain entirely with us.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the license for more details.
*
* You can be released from the requirements of the license by purchasing a
* commercial license. Buying such a license is mandatory as soon as you develop
* commercial activities involving this program without disclosing the source
* code of your own applications
*/
#ifndef _FNORD_SSTABLE_SSTABLEREPAIR_H
#define _FNORD_SSTABLE_SSTABLEREPAIR_H
#include <string>
namespace sstable {
class SSTableRepair {
public:
SSTableRepair(const std::string& filename);
bool checkAndRepair(bool repair = false);
protected:
bool checkAndRepairUnfinishedTable(bool repair);
const std::string filename_;
};
}
#endif
| {
"pile_set_name": "Github"
} |
import _ from 'lodash';
import PropTypes from 'prop-types';
import React, { Component } from 'react';
import { connect } from 'react-redux';
import { createSelector } from 'reselect';
import { saveRemotePathMapping, setRemotePathMappingValue } from 'Store/Actions/settingsActions';
import selectSettings from 'Store/Selectors/selectSettings';
import EditRemotePathMappingModalContent from './EditRemotePathMappingModalContent';
const newRemotePathMapping = {
host: '',
remotePath: '',
localPath: ''
};
const selectDownloadClientHosts = createSelector(
(state) => state.settings.downloadClients.items,
(downloadClients) => {
const hosts = downloadClients.reduce((acc, downloadClient) => {
const name = downloadClient.name;
const host = downloadClient.fields.find((field) => {
return field.name === 'host';
});
if (host) {
const group = acc[host.value] = acc[host.value] || [];
group.push(name);
}
return acc;
}, {});
return Object.keys(hosts).map((host) => {
return {
key: host,
value: host,
hint: `${hosts[host].join(', ')}`
};
});
}
);
function createRemotePathMappingSelector() {
return createSelector(
(state, { id }) => id,
(state) => state.settings.remotePathMappings,
selectDownloadClientHosts,
(id, remotePathMappings, downloadClientHosts) => {
const {
isFetching,
error,
isSaving,
saveError,
pendingChanges,
items
} = remotePathMappings;
const mapping = id ? _.find(items, { id }) : newRemotePathMapping;
const settings = selectSettings(mapping, pendingChanges, saveError);
return {
id,
isFetching,
error,
isSaving,
saveError,
item: settings.settings,
...settings,
downloadClientHosts
};
}
);
}
function createMapStateToProps() {
return createSelector(
createRemotePathMappingSelector(),
(remotePathMapping) => {
return {
...remotePathMapping
};
}
);
}
const mapDispatchToProps = {
dispatchSetRemotePathMappingValue: setRemotePathMappingValue,
dispatchSaveRemotePathMapping: saveRemotePathMapping
};
class EditRemotePathMappingModalContentConnector extends Component {
//
// Lifecycle
componentDidMount() {
if (!this.props.id) {
Object.keys(newRemotePathMapping).forEach((name) => {
this.props.dispatchSetRemotePathMappingValue({
name,
value: newRemotePathMapping[name]
});
});
}
}
componentDidUpdate(prevProps, prevState) {
if (prevProps.isSaving && !this.props.isSaving && !this.props.saveError) {
this.props.onModalClose();
}
}
//
// Listeners
onInputChange = ({ name, value }) => {
this.props.dispatchSetRemotePathMappingValue({ name, value });
}
onSavePress = () => {
this.props.dispatchSaveRemotePathMapping({ id: this.props.id });
}
//
// Render
render() {
return (
<EditRemotePathMappingModalContent
{...this.props}
onSavePress={this.onSavePress}
onInputChange={this.onInputChange}
/>
);
}
}
EditRemotePathMappingModalContentConnector.propTypes = {
id: PropTypes.number,
isSaving: PropTypes.bool.isRequired,
saveError: PropTypes.object,
item: PropTypes.object.isRequired,
dispatchSetRemotePathMappingValue: PropTypes.func.isRequired,
dispatchSaveRemotePathMapping: PropTypes.func.isRequired,
onModalClose: PropTypes.func.isRequired
};
export default connect(createMapStateToProps, mapDispatchToProps)(EditRemotePathMappingModalContentConnector);
| {
"pile_set_name": "Github"
} |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Sample configuration properties for the Gobblin cluster launcher
# File system URIs
fs.uri="file:///"
writer.fs.uri=${fs.uri}
state.store.fs.uri=${fs.uri}
# Writer related configuration properties
writer.destination.type=HDFS
writer.output.format=AVRO
writer.staging.dir=${gobblin.cluster.work.dir}/task-staging
writer.output.dir=${gobblin.cluster.work.dir}/task-output
# Data publisher related configuration properties
data.publisher.type=org.apache.gobblin.publisher.BaseDataPublisher
data.publisher.final.dir=${gobblin.cluster.work.dir}/job-output
data.publisher.replace.final.dir=false
# Directory where job/task state files are stored
state.store.dir=${gobblin.cluster.work.dir}/state-store
# Directory where error files from the quality checkers are stored
qualitychecker.row.err.file=${gobblin.cluster.work.dir}/err
# Disable job locking for now
job.lock.enabled=false
# Interval of task state reporting in milliseconds
task.status.reportintervalinms=1000
# If the job execution history server should be enabled
job.execinfo.server.enabled=false
# Enable metrics / events
metrics.enabled=false
| {
"pile_set_name": "Github"
} |
# Header, don't edit
NLF v6
# Language ID
1030
# Font and size - dash (-) means default
-
-
# Codepage - dash (-) means ANSI code page
1252
# RTL - anything else than RTL means LTR
-
# Translation by Claus Futtrup
# ^Branding
Nullsoft Install System %s
# ^SetupCaption
$(^Name) Installation
# ^UninstallCaption
$(^Name) Afinstallation
# ^LicenseSubCaption
: Licensaftale
# ^ComponentsSubCaption
: Installationsmuligheder
# ^DirSubCaption
: Installationsmappe
# ^InstallingSubCaption
: Installerer
# ^CompletedSubCaption
: Gennemført
# ^UnComponentsSubCaption
: Afinstallationsmuligheder
# ^UnDirSubCaption
: Afinstallationsmappe
# ^ConfirmSubCaption
: Bekræft
# ^UninstallingSubCaption
: Afinstallerer
# ^UnCompletedSubCaption
: Gennemført
# ^BackBtn
< &Tilbage
# ^NextBtn
&Næste >
# ^AgreeBtn
Jeg &accepterer
# ^AcceptBtn
Jeg &accepterer vilkårene i licensaftalen
# ^DontAcceptBtn
Jeg &accepterer ikke vilkårene i licensaftalen
# ^InstallBtn
&Installer
# ^UninstallBtn
&Afinstaller
# ^CancelBtn
Afbryd
# ^CloseBtn
&Luk
# ^BrowseBtn
G&ennemse...
# ^ShowDetailsBtn
Vis &detaljer
# ^ClickNext
Tryk Næste for at fortsætte.
# ^ClickInstall
Tryk Installer for at starte installationen.
# ^ClickUninstall
Tryk Afinstaller for at starte afinstallationen.
# ^Name
Navn
# ^Completed
Gennemført
# ^LicenseText
Læs venligst licensaftalen før installationen af $(^NameDA). Hvis du accepterer alle betingelser i aftalen, skal du trykke 'Jeg accepterer'.
# ^LicenseTextCB
Læs venligst licensaftalen før installationen af $(^NameDA). Hvis du accepterer alle betingelser i aftalen, skal du markere afkrydsningsfeltet nedenfor. $_CLICK
# ^LicenseTextRB
Læs venligst licensaftalen før installationen af $(^NameDA). Hvis du accepterer alle betingelser i aftalen, skal du vælge den første mulighed nedenfor. $_CLICK
# ^UnLicenseText
Læs venligst licensaftalen før afinstallationen af $(^NameDA). Hvis du accepterer alle betingelser i aftalen, skal du trykke 'Jeg accepterer'
# ^UnLicenseTextCB
Læs venligst licensaftalen før afinstallationen af $(^NameDA). Hvis du accepterer alle betingelser i aftalen, skal du markere afkrydsningsfeltet nedenfor. $_CLICK
# ^UnLicenseTextRB
Læs venligst licensaftalen før afinstallationen af $(^NameDA). Hvis du accepterer alle betingelser i aftalen, skal du vælge den første mulighed nedenfor $_CLICK
# ^Custom
Brugerdefineret
# ^ComponentsText
Marker de komponenter du vil installere, og fjern markeringen af de komponenter du ikke vil installere. $_CLICK
# ^ComponentsSubText1
Vælg installationstype:
# ^ComponentsSubText2_NoInstTypes
Vælg de komponenter der skal installeres:
# ^ComponentsSubText2
Eller vælg de tillægskomponenter komponenter du ønsker at installere:
# ^UnComponentsText
Marker de komponenter du vil afinstallere, og fjern markeringen af de komponenter du ikke vil afinstallere. $_CLICK
# ^UnComponentsSubText1
Vælg afinstallationstype:
# ^UnComponentsSubText2_NoInstTypes
Vælg de komponenter der skal afinstalleres:
# ^UnComponentsSubText2
Eller vælg de tillægskomponenter du ønsker at afinstallere:
# ^DirText
Installationsguiden vil installere $(^NameDA) i følgende mappe. For at installere i en anden mappe, tryk Gennemse og vælg en anden mappe. $_CLICK
# ^DirSubText
Destinationsmappe
# ^DirBrowseText
Vælg den mappe $(^NameDA) skal installeres i:
# ^UnDirText
Installationsguiden vil afinstallere $(^NameDA) fra følgende mappe. For at afinstallere fra en anden mappe, tryk Gennemse og vælg en anden mappe. $_CLICK
# ^UnDirSubText
""
# ^UnDirBrowseText
Vælg den mappe hvorfra $(^NameDA) skal afinstalleres:
# ^SpaceAvailable
"Ledig plads: "
# ^SpaceRequired
"Nødvendig plads: "
# ^UninstallingText
$(^NameDA) vil blive afinstalleret fra følgende mappe. $_CLICK
# ^UninstallingSubText
Afinstallerer fra:
# ^FileError
Fejl ved skrivning af fil: \r\n\t"$0"\r\nTryk Afbryd for at afbryde installationen,\r\nPrøv Igen for at prøve at skrive til filen, eller\r\nIgnorer for at springe over denne fil
# ^FileError_NoIgnore
Fejl ved åbning af fil: \r\n\t"$0"\r\nTryk Prøv Igen for at prøve at skrive til filen, eller\r\nAfbryd for at afbryde installationen
# ^CantWrite
"Kan ikke skrive: "
# ^CopyFailed
Kopiering mislykkedes
# ^CopyTo
"Kopier til "
# ^Registering
"Registrerer: "
# ^Unregistering
"Afregisterer: "
# ^SymbolNotFound
"Kunne ikke finde symbol: "
# ^CouldNotLoad
"Kunne ikke hente: "
# ^CreateFolder
"Opret mappe: "
# ^CreateShortcut
"Opret genvej: "
# ^CreatedUninstaller
"Afinstallationsguide oprettet: "
# ^Delete
"Slet fil: "
# ^DeleteOnReboot
"Slet ved genstart: "
# ^ErrorCreatingShortcut
"Fejl ved oprettelse af genvej: "
# ^ErrorCreating
"Fejl ved oprettelse: "
# ^ErrorDecompressing
Fejl ved udpakning af data! Installationsguiden skadet?
# ^ErrorRegistering
Fejl ved registrering af DLL
# ^ExecShell
"ExecShell: "
# ^Exec
"Kør: "
# ^Extract
"Udpak: "
# ^ErrorWriting
"Udpak: Fejl ved skrivning til fil "
# ^InvalidOpcode
Installationsguide i stykker: Ugyldig opcode
# ^NoOLE
"Ingen OLE for: "
# ^OutputFolder
"Outputmappe: "
# ^RemoveFolder
"Slet mappe: "
# ^RenameOnReboot
"Omdøb ved genstart: "
# ^Rename
"Omdøb: "
# ^Skipped
"Sprunget over: "
# ^CopyDetails
Kopier detaljer til udklipsholderen
# ^LogInstall
Log installationsproces
# ^Byte
B
# ^Kilo
K
# ^Mega
M
# ^Giga
G | {
"pile_set_name": "Github"
} |
using UnityEngine;
using System.Collections;
using LuaInterface;
public class TestLuaThread : MonoBehaviour
{
string script =
@"
function fib(n)
local a, b = 0, 1
while n > 0 do
a, b = b, a + b
n = n - 1
end
return a
end
function CoFunc(len)
print('Coroutine started')
local i = 0
for i = 0, len, 1 do
local flag = coroutine.yield(fib(i))
if not flag then
break
end
end
print('Coroutine ended')
end
function Test()
local co = coroutine.create(CoFunc)
return co
end
";
LuaState state = null;
LuaThread thread = null;
string tips = null;
void Start ()
{
#if UNITY_5 || UNITY_2017 || UNITY_2018
Application.logMessageReceived += ShowTips;
#else
Application.RegisterLogCallback(ShowTips);
#endif
new LuaResLoader();
state = new LuaState();
state.Start();
state.LogGC = true;
state.DoString(script);
LuaFunction func = state.GetFunction("Test");
func.BeginPCall();
func.PCall();
thread = func.CheckLuaThread();
thread.name = "LuaThread";
func.EndPCall();
func.Dispose();
func = null;
thread.Resume(10);
}
void OnApplicationQuit()
{
if (thread != null)
{
thread.Dispose();
thread = null;
}
state.Dispose();
state = null;
#if UNITY_5 || UNITY_2017 || UNITY_2018
Application.logMessageReceived -= ShowTips;
#else
Application.RegisterLogCallback(null);
#endif
}
void ShowTips(string msg, string stackTrace, LogType type)
{
tips += msg;
tips += "\r\n";
}
void Update()
{
state.CheckTop();
state.Collect();
}
void OnGUI()
{
GUI.Label(new Rect(Screen.width / 2 - 300, Screen.height / 2 - 200, 600, 400), tips);
if (GUI.Button(new Rect(10, 50, 120, 40), "Resume Thead"))
{
int ret = -1;
if (thread != null && thread.Resume(true, out ret) == (int)LuaThreadStatus.LUA_YIELD)
{
Debugger.Log("lua yield: " + ret);
}
}
else if (GUI.Button(new Rect(10, 150, 120, 40), "Close Thread"))
{
if (thread != null)
{
thread.Dispose();
thread = null;
}
}
}
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<!--
Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/
-->
<html>
<head>
<title>CSS Reference</title>
<link rel="author" title="Daniel Holbert" href="mailto:[email protected]">
<meta charset="utf-8">
<style>
.container {
display: block;
padding: 1px 2px;
border: 1px solid black;
background: yellow;
margin-bottom: 5px;
margin-right: 5px;
float: left; /* For testing in "rows" of containers */
}
br { clear: both }
.big > .container {
height: 10px;
width: 16px;
}
.small > .container {
height: 2px;
width: 4px;
}
.container > * {
background: teal;
height: 6px;
width: 8px;
}
.big > .container > * { margin-left: 8px; }
.small > .container > * { margin-left: -4px; }
.big .alignStart { /* nothing special */ }
.big .alignCenter { margin-top: 2px }
.big .alignEnd { margin-top: 4px }
.small .alignStart { /* nothing special */ }
.small .alignCenter { margin-top: -2px }
.small .alignEnd { margin-top: -4px }
</style>
</head>
<body>
<div class="big">
<!-- The various align-content values, from
https://www.w3.org/TR/css-align-3/#propdef-align-content -->
<!-- normal -->
<div class="container"><div class="alignCenter"><!--normal--></div></div>
<br>
<!-- <baseline-position> -->
<div class="container"><div class="alignStart"><!--baseline--></div></div>
<div class="container"><div class="alignEnd"><!--last baseline--></div></div>
<br>
<!-- <content-distribution> -->
<div class="container"><div class="alignStart"><!--space-between--></div></div>
<div class="container"><div class="alignCenter"><!--space-around--></div></div>
<div class="container"><div class="alignCenter"><!--space-evenly--></div></div>
<div class="container"><div class="alignCenter"><!--stretch (+ align-self:center)--></div></div>
<br>
<!-- <content-position>, part 1: -->
<div class="container"><div class="alignCenter"><!--center--></div></div>
<div class="container"><div class="alignStart"><!--start--></div></div>
<div class="container"><div class="alignEnd"><!--end--></div></div>
<br>
<!-- <content-position>, part 2: -->
<div class="container"><div class="alignEnd"><!--flex-start--></div></div>
<div class="container"><div class="alignStart"><!--flex-end--></div></div>
<div class="container"><div class="alignStart"><!--left--></div></div>
<div class="container"><div class="alignStart"><!--right--></div></div>
<br>
</div>
<div class="small">
<!-- The various align-content values, from
https://www.w3.org/TR/css-align-3/#propdef-align-content -->
<!-- normal -->
<div class="container"><div class="alignCenter"><!--normal--></div></div>
<br>
<!-- <baseline-position> -->
<div class="container"><div class="alignStart"><!--baseline--></div></div>
<div class="container"><div class="alignEnd"><!--last baseline--></div></div>
<br>
<!-- <content-distribution> -->
<div class="container"><div class="alignStart"><!--space-between--></div></div>
<div class="container"><div class="alignCenter"><!--space-around--></div></div>
<div class="container"><div class="alignCenter"><!--space-evenly--></div></div>
<div class="container"><div class="alignCenter"><!--stretch (+ align-self:center)--></div></div>
<br>
<!-- <content-position>, part 1: -->
<div class="container"><div class="alignCenter"><!--center--></div></div>
<div class="container"><div class="alignStart"><!--start--></div></div>
<div class="container"><div class="alignEnd"><!--end--></div></div>
<br>
<!-- <content-position>, part 2: -->
<div class="container"><div class="alignEnd"><!--flex-start--></div></div>
<div class="container"><div class="alignStart"><!--flex-end--></div></div>
<div class="container"><div class="alignStart"><!--left--></div></div>
<div class="container"><div class="alignStart"><!--right--></div></div>
<br>
</div>
</body>
</html>
| {
"pile_set_name": "Github"
} |
phase name id description
---------- -- -----------
parser 1 parse source into ASTs, perform simple desugaring
jspretyper 2 capture pre-typer only tree info (for Scala.js)
namer 3 resolve names, attach symbols to named trees
packageobjects 4 load package objects
typer 5 the meat and potatoes: type the trees
jsinterop 6 prepare ASTs for JavaScript interop
patmat 7 translate match expressions
superaccessors 8 add super accessors in traits and nested classes
extmethods 9 add extension methods for inline classes
pickler 10 serialize symbol tables
refchecks 11 reference/override checking, translate nested objects
xplicitinnerjs 12 make references to inner JS classes explicit
uncurry 13 uncurry, translate function values to anonymous classes
tailcalls 14 replace tail calls by jumps
specialize 15 @specialized-driven class and method specialization
xplicitlocaljs 16 make references to local JS classes explicit
explicitouter 17 this refs to outer pointers
erasure 18 erase types, add interfaces for traits
posterasure 19 clean up erased inline classes
lazyvals 20 allocate bitmaps, translate lazy vals into lazified defs
lambdalift 21 move nested functions to top level
constructors 22 move field definitions into constructors
flatten 23 eliminate inner classes
mixin 24 mixin composition
jscode 25 generate JavaScript code from ASTs
cleanup 26 platform-specific cleanups, generate reflective calls
delambdafy 27 remove lambdas
icode 28 generate portable intermediate code
jvm 29 generate JVM bytecode
ploogin 30 A sample phase that does so many things it's kind of hard...
terminal 31 the last phase during a compilation run
| {
"pile_set_name": "Github"
} |
<?php
// $Id: calendar-day-node.tpl.php,v 1.1.2.2 2010/12/31 23:31:28 karens Exp $
/**
* @file
* Template to display a view item as a calendar day node.
*
* $node
* A node object for this calendar item. Note this is
* not a complete node object, but it will have $node->nid
* that you can use to load the full object, and
* $node->type to tell the content type of the node.
*
* $fields
* An array of information for every field selected in the 'Fields'
* section of this view, formatted as requested in the View setup.
*
* Calendar info for this individual calendar item is in local time --
* the user timezone where configurable timezones are allowed and set,
* otherwise the site timezone. If this item has extends over more than
* one day, it has been broken apart into separate nodes for each calendar
* date and calendar_start will be no earlier than the start of
* the current day and calendar_end will be no later than the end
* of the current day.
*
* $calendar_start - A formatted datetime start date for this item.
* i.e. '2008-05-12 05:26:15'.
* $calendar_end - A formatted datetime end date for this item,
* the same as the start date except for fields that have from/to
* fields defined, like Date module dates.
* $calendar_start_date - a PHP date object for the start time.
* $calendar_end_date - a PHP date object for the end time.
*
* You can use PHP date functions on the date object to display date
* information in other ways, like:
*
* print date_format($calendar_start_date, 'l, j F Y - g:ia');
*
* @see template_preprocess_calendar_day_node.
*/
$node_class = (isset($node->class)) ? ' ' . $node->class : '';
?>
<div class="item<?php print $node_class?>">
<div class="view-item view-item-<?php print $view->name ?>">
<div class="calendar dayview">
<?php print theme('calendar_stripe_stripe', $node); ?>
<div class="<?php print $node->date_id ?> contents">
<?php foreach ($fields as $field): ?>
<div id="<?php print $field['id']; ?>" class="view-field view-data-<?php print $field['id'] ?>">
<?php if ($field['label']): ?>
<div class="view-label-<?php print $field['id'] ?>"><?php print $field['label'] ?></div>
<?php endif; ?>
<?php print $field['data']; ?>
</div>
<?php endforeach; ?>
</div>
</div>
</div>
</div> | {
"pile_set_name": "Github"
} |
package model.events
{
import flash.utils.ByteArray;
import flash.events.Event;
// 決定
public class AvatarQuestEvent extends Event
{
public static const GET_QUEST:String = 'get_quest';
public static const USE_QUEST:String = 'use_quest';
public var obj:*;
public var id:int;
public function AvatarQuestEvent(type:String, o:* = null, i:int = 0, bubbles:Boolean = false, cancelable:Boolean = false)
{
id = i
obj = o;
super(type, bubbles, cancelable);
}
public override function toString():String {
return formatToString("AvatarQuestEvent", "type", "obj", "id", "bubbles", "cancelable");
}
}
} | {
"pile_set_name": "Github"
} |
//////////////////////////////////////////////////////////////////////////////
//
// This file is part of the Corona game engine.
// For overview and more information on licensing please refer to README.md
// Home page: https://github.com/coronalabs/corona
// Contact: [email protected]
//
//////////////////////////////////////////////////////////////////////////////
#include "Core/Rtt_Build.h"
#include "Rtt_AndroidDisplayObject.h"
#include "NativeToJavaBridge.h"
#include "AndroidDisplayObjectRegistry.h"
#include "Rtt_Lua.h"
#include "Rtt_Runtime.h"
#include "Rtt_RenderingStream.h"
// ----------------------------------------------------------------------------
namespace Rtt
{
// ----------------------------------------------------------------------------
AndroidDisplayObject::AndroidDisplayObject(
const Rect& bounds, AndroidDisplayObjectRegistry *displayObjectRegistry, NativeToJavaBridge *ntjb )
: PlatformDisplayObject(),
fSelfBounds( bounds),
fView( NULL ),
fDisplayObjectRegistry( displayObjectRegistry ),
fId( AndroidDisplayObjectRegistry::INVALID_ID ),
fNativeToJavaBridge(ntjb)
{
// Fetch a unique ID for this display object from the registry.
if (fDisplayObjectRegistry)
{
fId = fDisplayObjectRegistry->Register(this);
}
// Convert the given x/y coordinates from a top-left reference point to a center reference point.
Translate(bounds.xMin + Rtt_RealDiv2(bounds.Width()), bounds.yMin + Rtt_RealDiv2(bounds.Height()));
fSelfBounds.MoveCenterToOrigin();
}
AndroidDisplayObject::~AndroidDisplayObject()
{
// Have the Java display object destroyed first.
if (fId != AndroidDisplayObjectRegistry::INVALID_ID)
{
fNativeToJavaBridge->DisplayObjectDestroy(fId);
}
// Remove this display object from the registry.
if (fDisplayObjectRegistry)
{
fDisplayObjectRegistry->Unregister(fId);
}
}
void
AndroidDisplayObject::InitializeView( void * view )
{
Rtt_ASSERT( ! fView );
Rtt_ASSERT( view );
fView = view;
}
void
AndroidDisplayObject::SetFocus()
{
fNativeToJavaBridge->DisplayObjectSetFocus( fId, true );
}
void
AndroidDisplayObject::DidMoveOffscreen()
{
}
void
AndroidDisplayObject::WillMoveOnscreen()
{
}
bool
AndroidDisplayObject::CanCull() const
{
// Disable culling for all native display objects.
// Note: This is needed so that the Build() function will get called when a native object
// is being moved partially or completely offscreen.
return false;
}
void
AndroidDisplayObject::Prepare( const Display& display )
{
Super::Prepare( display );
if ( ShouldPrepare() )
{
// First, update this object's cached scale factors in case the app window has been resized.
Rtt::Runtime *runtimePointer = fNativeToJavaBridge->GetRuntime();
Preinitialize(runtimePointer->GetDisplay());
// Update the native object's screen bounds.
Rect bounds;
GetScreenBounds(bounds);
fNativeToJavaBridge->DisplayObjectUpdateScreenBounds(
fId, bounds.xMin, bounds.yMin, bounds.Width(), bounds.Height());
}
}
void
AndroidDisplayObject::Draw( Renderer& renderer ) const
{
}
void
AndroidDisplayObject::GetSelfBounds( Rect& rect ) const
{
rect = fSelfBounds;
}
void
AndroidDisplayObject::SetSelfBounds( Real width, Real height )
{
if ( width > Rtt_REAL_0 )
{
fSelfBounds.Initialize( Rtt_RealDiv2(width), Rtt_RealDiv2(GetGeometricProperty(kHeight)) );
}
if ( height > Rtt_REAL_0 )
{
fSelfBounds.Initialize( Rtt_RealDiv2(GetGeometricProperty(kWidth)), Rtt_RealDiv2(height) );
}
// Causes prepare to be called which does the actual resizing
Invalidate( kGeometryFlag | kStageBoundsFlag | kTransformFlag );
}
bool
AndroidDisplayObject::HasBackground() const
{
return fNativeToJavaBridge->DisplayObjectGetBackground(fId);
}
void
AndroidDisplayObject::SetBackgroundVisible(bool isVisible)
{
fNativeToJavaBridge->DisplayObjectSetBackground(fId, isVisible);
}
int
AndroidDisplayObject::ValueForKey( lua_State *L, const char key[] ) const
{
Rtt_ASSERT( key );
int result = 1;
if ( strcmp( "isVisible", key ) == 0 )
{
bool visible = fNativeToJavaBridge->DisplayObjectGetVisible( fId );
lua_pushboolean( L, visible );
}
else if ( strcmp( "alpha", key ) == 0 )
{
float alpha = fNativeToJavaBridge->DisplayObjectGetAlpha( fId );
lua_pushnumber( L, alpha );
}
else if ( strcmp( "hasBackground", key ) == 0 )
{
lua_pushboolean( L, HasBackground() ? 1 : 0 );
}
else
{
result = 0;
}
return result;
}
bool
AndroidDisplayObject::SetValueForKey( lua_State *L, const char key[], int valueIndex )
{
Rtt_ASSERT( key );
bool result = true;
if ( strcmp( "isVisible", key ) == 0 )
{
bool visible = lua_toboolean( L, valueIndex );
fNativeToJavaBridge->DisplayObjectSetVisible( fId, visible );
}
else if ( strcmp( "alpha", key ) == 0 )
{
float alpha = lua_tonumber( L, valueIndex );
fNativeToJavaBridge->DisplayObjectSetAlpha( fId, alpha );
}
else if ( strcmp( "hasBackground", key ) == 0 )
{
bool isVisible = lua_toboolean( L, valueIndex ) ? true : false;
SetBackgroundVisible(isVisible);
}
else
{
result = false;
}
return result;
}
// ----------------------------------------------------------------------------
} // namespace Rtt
// ----------------------------------------------------------------------------
| {
"pile_set_name": "Github"
} |
# Make an object with a NoteProperty X by an expression getting $null
$x = 1 | Select-Object -Property @{Name = 'X'; Expression = {$null}}
# Works
$x | Group-Object -Property X
| {
"pile_set_name": "Github"
} |
/**
* @file
* SNMP message processing (RFC1157).
*/
/*
* Copyright (c) 2006 Axon Digital Design B.V., The Netherlands.
* Copyright (c) 2016 Elias Oenal.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
* IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
* OF SUCH DAMAGE.
*
* Author: Christiaan Simons <[email protected]>
* Martin Hentschel <[email protected]>
* Elias Oenal <[email protected]>
*/
#include "lwip/apps/snmp_opts.h"
#if LWIP_SNMP /* don't build if not configured for use in lwipopts.h */
#include "snmp_msg.h"
#include "snmp_asn1.h"
#include "snmp_core_priv.h"
#include "lwip/ip_addr.h"
#include "lwip/stats.h"
#if LWIP_SNMP_V3
#include "lwip/apps/snmpv3.h"
#include "snmpv3_priv.h"
#ifdef LWIP_HOOK_FILENAME
#include LWIP_HOOK_FILENAME
#endif
#endif
#include <string.h>
#define SNMP_V3_AUTH_FLAG 0x01
#define SNMP_V3_PRIV_FLAG 0x02
/* Security levels */
#define SNMP_V3_NOAUTHNOPRIV 0x00
#define SNMP_V3_AUTHNOPRIV SNMP_V3_AUTH_FLAG
#define SNMP_V3_AUTHPRIV (SNMP_V3_AUTH_FLAG | SNMP_V3_PRIV_FLAG)
/* public (non-static) constants */
/** SNMP community string */
const char *snmp_community = SNMP_COMMUNITY;
/** SNMP community string for write access */
const char *snmp_community_write = SNMP_COMMUNITY_WRITE;
/** SNMP community string for sending traps */
const char *snmp_community_trap = SNMP_COMMUNITY_TRAP;
snmp_write_callback_fct snmp_write_callback = NULL;
void* snmp_write_callback_arg = NULL;
#if LWIP_SNMP_CONFIGURE_VERSIONS
static u8_t v1_enabled = 1;
static u8_t v2c_enabled = 1;
static u8_t v3_enabled = 1;
static u8_t
snmp_version_enabled(u8_t version)
{
LWIP_ASSERT("Invalid SNMP version", (version == SNMP_VERSION_1) || (version == SNMP_VERSION_2c)
#if LWIP_SNMP_V3
|| (version == SNMP_VERSION_3)
#endif
);
if (version == SNMP_VERSION_1) {
return v1_enabled;
}
else if (version == SNMP_VERSION_2c) {
return v2c_enabled;
}
#if LWIP_SNMP_V3
else { /* version == SNMP_VERSION_3 */
return v3_enabled;
}
#endif
}
u8_t
snmp_v1_enabled(void)
{
return snmp_version_enabled(SNMP_VERSION_1);
}
u8_t
snmp_v2c_enabled(void)
{
return snmp_version_enabled(SNMP_VERSION_2c);
}
u8_t
snmp_v3_enabled(void)
{
return snmp_version_enabled(SNMP_VERSION_3);
}
static void
snmp_version_enable(u8_t version, u8_t enable)
{
LWIP_ASSERT("Invalid SNMP version", (version == SNMP_VERSION_1) || (version == SNMP_VERSION_2c)
#if LWIP_SNMP_V3
|| (version == SNMP_VERSION_3)
#endif
);
if (version == SNMP_VERSION_1) {
v1_enabled = enable;
}
else if (version == SNMP_VERSION_2c) {
v2c_enabled = enable;
}
#if LWIP_SNMP_V3
else { /* version == SNMP_VERSION_3 */
v3_enabled = enable;
}
#endif
}
void
snmp_v1_enable(u8_t enable)
{
snmp_version_enable(SNMP_VERSION_1, enable);
}
void
snmp_v2c_enable(u8_t enable)
{
snmp_version_enable(SNMP_VERSION_2c, enable);
}
void
snmp_v3_enable(u8_t enable)
{
snmp_version_enable(SNMP_VERSION_3, enable);
}
#endif
/**
* @ingroup snmp_core
* Returns current SNMP community string.
* @return current SNMP community string
*/
const char *
snmp_get_community(void)
{
return snmp_community;
}
/**
* @ingroup snmp_core
* Sets SNMP community string.
* The string itself (its storage) must be valid throughout the whole life of
* program (or until it is changed to sth else).
*
* @param community is a pointer to new community string
*/
void
snmp_set_community(const char * const community)
{
LWIP_ASSERT("community string is too long!", strlen(community) <= SNMP_MAX_COMMUNITY_STR_LEN);
snmp_community = community;
}
/**
* @ingroup snmp_core
* Returns current SNMP write-access community string.
* @return current SNMP write-access community string
*/
const char *
snmp_get_community_write(void)
{
return snmp_community_write;
}
/**
* @ingroup snmp_traps
* Returns current SNMP community string used for sending traps.
* @return current SNMP community string used for sending traps
*/
const char *
snmp_get_community_trap(void)
{
return snmp_community_trap;
}
/**
* @ingroup snmp_core
* Sets SNMP community string for write-access.
* The string itself (its storage) must be valid throughout the whole life of
* program (or until it is changed to sth else).
*
* @param community is a pointer to new write-access community string
*/
void
snmp_set_community_write(const char * const community)
{
LWIP_ASSERT("community string must not be NULL", community != NULL);
LWIP_ASSERT("community string is too long!", strlen(community) <= SNMP_MAX_COMMUNITY_STR_LEN);
snmp_community_write = community;
}
/**
* @ingroup snmp_traps
* Sets SNMP community string used for sending traps.
* The string itself (its storage) must be valid throughout the whole life of
* program (or until it is changed to sth else).
*
* @param community is a pointer to new trap community string
*/
void
snmp_set_community_trap(const char * const community)
{
LWIP_ASSERT("community string is too long!", strlen(community) <= SNMP_MAX_COMMUNITY_STR_LEN);
snmp_community_trap = community;
}
/**
* @ingroup snmp_core
* Callback fired on every successful write access
*/
void
snmp_set_write_callback(snmp_write_callback_fct write_callback, void* callback_arg)
{
snmp_write_callback = write_callback;
snmp_write_callback_arg = callback_arg;
}
/* ----------------------------------------------------------------------- */
/* forward declarations */
/* ----------------------------------------------------------------------- */
static err_t snmp_process_get_request(struct snmp_request *request);
static err_t snmp_process_getnext_request(struct snmp_request *request);
static err_t snmp_process_getbulk_request(struct snmp_request *request);
static err_t snmp_process_set_request(struct snmp_request *request);
static err_t snmp_parse_inbound_frame(struct snmp_request *request);
static err_t snmp_prepare_outbound_frame(struct snmp_request *request);
static err_t snmp_complete_outbound_frame(struct snmp_request *request);
static void snmp_execute_write_callbacks(struct snmp_request *request);
/* ----------------------------------------------------------------------- */
/* implementation */
/* ----------------------------------------------------------------------- */
void
snmp_receive(void *handle, struct pbuf *p, const ip_addr_t *source_ip, u16_t port)
{
err_t err;
struct snmp_request request;
memset(&request, 0, sizeof(request));
request.handle = handle;
request.source_ip = source_ip;
request.source_port = port;
request.inbound_pbuf = p;
snmp_stats.inpkts++;
err = snmp_parse_inbound_frame(&request);
if (err == ERR_OK) {
err = snmp_prepare_outbound_frame(&request);
if (err == ERR_OK) {
if (request.error_status == SNMP_ERR_NOERROR) {
/* only process frame if we do not already have an error to return (e.g. all readonly) */
if (request.request_type == SNMP_ASN1_CONTEXT_PDU_GET_REQ) {
err = snmp_process_get_request(&request);
} else if (request.request_type == SNMP_ASN1_CONTEXT_PDU_GET_NEXT_REQ) {
err = snmp_process_getnext_request(&request);
} else if (request.request_type == SNMP_ASN1_CONTEXT_PDU_GET_BULK_REQ) {
err = snmp_process_getbulk_request(&request);
} else if (request.request_type == SNMP_ASN1_CONTEXT_PDU_SET_REQ) {
err = snmp_process_set_request(&request);
}
}
#if LWIP_SNMP_V3
else {
struct snmp_varbind vb;
vb.next = NULL;
vb.prev = NULL;
vb.type = SNMP_ASN1_TYPE_COUNTER32;
vb.value_len = sizeof(u32_t);
switch (request.error_status) {
case SNMP_ERR_AUTHORIZATIONERROR:
{
static const u32_t oid[] = { 1, 3, 6, 1, 6, 3, 15, 1, 1, 5, 0 };
snmp_oid_assign(&vb.oid, oid, LWIP_ARRAYSIZE(oid));
vb.value = &snmp_stats.wrongdigests;
}
break;
case SNMP_ERR_UNKNOWN_ENGINEID:
{
static const u32_t oid[] = { 1, 3, 6, 1, 6, 3, 15, 1, 1, 4, 0 };
snmp_oid_assign(&vb.oid, oid, LWIP_ARRAYSIZE(oid));
vb.value = &snmp_stats.unknownengineids;
}
break;
case SNMP_ERR_UNKNOWN_SECURITYNAME:
{
static const u32_t oid[] = { 1, 3, 6, 1, 6, 3, 15, 1, 1, 3, 0 };
snmp_oid_assign(&vb.oid, oid, LWIP_ARRAYSIZE(oid));
vb.value = &snmp_stats.unknownusernames;
}
break;
case SNMP_ERR_UNSUPPORTED_SECLEVEL:
{
static const u32_t oid[] = { 1, 3, 6, 1, 6, 3, 15, 1, 1, 1, 0 };
snmp_oid_assign(&vb.oid, oid, LWIP_ARRAYSIZE(oid));
vb.value = &snmp_stats.unsupportedseclevels;
}
break;
case SNMP_ERR_NOTINTIMEWINDOW:
{
static const u32_t oid[] = { 1, 3, 6, 1, 6, 3, 15, 1, 1, 2, 0 };
snmp_oid_assign(&vb.oid, oid, LWIP_ARRAYSIZE(oid));
vb.value = &snmp_stats.notintimewindows;
}
break;
case SNMP_ERR_DECRYIPTION_ERROR:
{
static const u32_t oid[] = { 1, 3, 6, 1, 6, 3, 15, 1, 1, 6, 0 };
snmp_oid_assign(&vb.oid, oid, LWIP_ARRAYSIZE(oid));
vb.value = &snmp_stats.decryptionerrors;
}
break;
default:
/* Unknown or unhandled error_status */
err = ERR_ARG;
}
if (err == ERR_OK) {
snmp_append_outbound_varbind(&(request.outbound_pbuf_stream), &vb);
request.error_status = SNMP_ERR_NOERROR;
}
request.request_out_type = (SNMP_ASN1_CLASS_CONTEXT | SNMP_ASN1_CONTENTTYPE_CONSTRUCTED | SNMP_ASN1_CONTEXT_PDU_REPORT);
request.request_id = request.msg_id;
}
#endif
if (err == ERR_OK) {
err = snmp_complete_outbound_frame(&request);
if (err == ERR_OK) {
err = snmp_sendto(request.handle, request.outbound_pbuf, request.source_ip, request.source_port);
if ((request.request_type == SNMP_ASN1_CONTEXT_PDU_SET_REQ)
&& (request.error_status == SNMP_ERR_NOERROR)
&& (snmp_write_callback != NULL)) {
/* raise write notification for all written objects */
snmp_execute_write_callbacks(&request);
}
}
}
}
if (request.outbound_pbuf != NULL) {
pbuf_free(request.outbound_pbuf);
}
}
}
static u8_t
snmp_msg_getnext_validate_node_inst(struct snmp_node_instance* node_instance, void* validate_arg)
{
if (((node_instance->access & SNMP_NODE_INSTANCE_ACCESS_READ) != SNMP_NODE_INSTANCE_ACCESS_READ) || (node_instance->get_value == NULL)) {
return SNMP_ERR_NOSUCHINSTANCE;
}
if ((node_instance->asn1_type == SNMP_ASN1_TYPE_COUNTER64) && (((struct snmp_request*)validate_arg)->version == SNMP_VERSION_1)) {
/* according to RFC 2089 skip Counter64 objects in GetNext requests from v1 clients */
return SNMP_ERR_NOSUCHINSTANCE;
}
return SNMP_ERR_NOERROR;
}
static void
snmp_process_varbind(struct snmp_request *request, struct snmp_varbind *vb, u8_t get_next)
{
err_t err;
struct snmp_node_instance node_instance;
memset(&node_instance, 0, sizeof(node_instance));
if (get_next) {
struct snmp_obj_id result_oid;
request->error_status = snmp_get_next_node_instance_from_oid(vb->oid.id, vb->oid.len, snmp_msg_getnext_validate_node_inst, request, &result_oid, &node_instance);
if (request->error_status == SNMP_ERR_NOERROR) {
snmp_oid_assign(&vb->oid, result_oid.id, result_oid.len);
}
} else {
request->error_status = snmp_get_node_instance_from_oid(vb->oid.id, vb->oid.len, &node_instance);
if (request->error_status == SNMP_ERR_NOERROR) {
/* use 'getnext_validate' method for validation to avoid code duplication (some checks have to be executed here) */
request->error_status = snmp_msg_getnext_validate_node_inst(&node_instance, request);
if (request->error_status != SNMP_ERR_NOERROR) {
if (node_instance.release_instance != NULL) {
node_instance.release_instance(&node_instance);
}
}
}
}
if (request->error_status != SNMP_ERR_NOERROR) {
if (request->error_status >= SNMP_VARBIND_EXCEPTION_OFFSET) {
if ((request->version == SNMP_VERSION_2c) || request->version == SNMP_VERSION_3) {
/* in SNMP v2c a varbind related exception is stored in varbind and not in frame header */
vb->type = (SNMP_ASN1_CONTENTTYPE_PRIMITIVE | SNMP_ASN1_CLASS_CONTEXT | (request->error_status & SNMP_VARBIND_EXCEPTION_MASK));
vb->value_len = 0;
err = snmp_append_outbound_varbind(&(request->outbound_pbuf_stream), vb);
if (err == ERR_OK) {
/* we stored the exception in varbind -> go on */
request->error_status = SNMP_ERR_NOERROR;
} else if (err == ERR_BUF) {
request->error_status = SNMP_ERR_TOOBIG;
} else {
request->error_status = SNMP_ERR_GENERROR;
}
}
} else {
/* according to RFC 1157/1905, all other errors only return genError */
request->error_status = SNMP_ERR_GENERROR;
}
} else {
s16_t len = node_instance.get_value(&node_instance, vb->value);
vb->type = node_instance.asn1_type;
if(len >= 0) {
vb->value_len = (u16_t)len; /* cast is OK because we checked >= 0 above */
LWIP_ASSERT("SNMP_MAX_VALUE_SIZE is configured too low", (vb->value_len & ~SNMP_GET_VALUE_RAW_DATA) <= SNMP_MAX_VALUE_SIZE);
err = snmp_append_outbound_varbind(&request->outbound_pbuf_stream, vb);
if (err == ERR_BUF) {
request->error_status = SNMP_ERR_TOOBIG;
} else if (err != ERR_OK) {
request->error_status = SNMP_ERR_GENERROR;
}
} else {
request->error_status = SNMP_ERR_GENERROR;
}
if (node_instance.release_instance != NULL) {
node_instance.release_instance(&node_instance);
}
}
}
/**
* Service an internal or external event for SNMP GET.
*
* @param request points to the associated message process state
*/
static err_t
snmp_process_get_request(struct snmp_request *request)
{
snmp_vb_enumerator_err_t err;
struct snmp_varbind vb;
vb.value = request->value_buffer;
LWIP_DEBUGF(SNMP_DEBUG, ("SNMP get request\n"));
while (request->error_status == SNMP_ERR_NOERROR) {
err = snmp_vb_enumerator_get_next(&request->inbound_varbind_enumerator, &vb);
if (err == SNMP_VB_ENUMERATOR_ERR_OK) {
if ((vb.type == SNMP_ASN1_TYPE_NULL) && (vb.value_len == 0)) {
snmp_process_varbind(request, &vb, 0);
} else {
request->error_status = SNMP_ERR_GENERROR;
}
} else if (err == SNMP_VB_ENUMERATOR_ERR_EOVB) {
/* no more varbinds in request */
break;
} else if (err == SNMP_VB_ENUMERATOR_ERR_ASN1ERROR) {
/* malformed ASN.1, don't answer */
return ERR_ARG;
} else {
request->error_status = SNMP_ERR_GENERROR;
}
}
return ERR_OK;
}
/**
* Service an internal or external event for SNMP GET.
*
* @param request points to the associated message process state
*/
static err_t
snmp_process_getnext_request(struct snmp_request *request)
{
snmp_vb_enumerator_err_t err;
struct snmp_varbind vb;
vb.value = request->value_buffer;
LWIP_DEBUGF(SNMP_DEBUG, ("SNMP get-next request\n"));
while (request->error_status == SNMP_ERR_NOERROR) {
err = snmp_vb_enumerator_get_next(&request->inbound_varbind_enumerator, &vb);
if (err == SNMP_VB_ENUMERATOR_ERR_OK) {
if ((vb.type == SNMP_ASN1_TYPE_NULL) && (vb.value_len == 0)) {
snmp_process_varbind(request, &vb, 1);
} else {
request->error_status = SNMP_ERR_GENERROR;
}
} else if (err == SNMP_VB_ENUMERATOR_ERR_EOVB) {
/* no more varbinds in request */
break;
} else if (err == SNMP_VB_ENUMERATOR_ERR_ASN1ERROR) {
/* malformed ASN.1, don't answer */
return ERR_ARG;
} else {
request->error_status = SNMP_ERR_GENERROR;
}
}
return ERR_OK;
}
/**
* Service an internal or external event for SNMP GETBULKT.
*
* @param request points to the associated message process state
*/
static err_t
snmp_process_getbulk_request(struct snmp_request *request)
{
snmp_vb_enumerator_err_t err;
s32_t non_repeaters = request->non_repeaters;
s32_t repetitions;
u16_t repetition_offset = 0;
struct snmp_varbind_enumerator repetition_varbind_enumerator;
struct snmp_varbind vb;
vb.value = request->value_buffer;
if (SNMP_LWIP_GETBULK_MAX_REPETITIONS > 0) {
repetitions = LWIP_MIN(request->max_repetitions, SNMP_LWIP_GETBULK_MAX_REPETITIONS);
} else {
repetitions = request->max_repetitions;
}
LWIP_DEBUGF(SNMP_DEBUG, ("SNMP get-bulk request\n"));
/* process non repeaters and first repetition */
while (request->error_status == SNMP_ERR_NOERROR) {
if (non_repeaters == 0) {
repetition_offset = request->outbound_pbuf_stream.offset;
if (repetitions == 0) {
/* do not resolve repeaters when repetitions is set to 0 */
break;
}
repetitions--;
}
err = snmp_vb_enumerator_get_next(&request->inbound_varbind_enumerator, &vb);
if (err == SNMP_VB_ENUMERATOR_ERR_EOVB) {
/* no more varbinds in request */
break;
} else if (err == SNMP_VB_ENUMERATOR_ERR_ASN1ERROR) {
/* malformed ASN.1, don't answer */
return ERR_ARG;
} else if ((err != SNMP_VB_ENUMERATOR_ERR_OK) || (vb.type != SNMP_ASN1_TYPE_NULL) || (vb.value_len != 0)) {
request->error_status = SNMP_ERR_GENERROR;
} else {
snmp_process_varbind(request, &vb, 1);
non_repeaters--;
}
}
/* process repetitions > 1 */
while ((request->error_status == SNMP_ERR_NOERROR) && (repetitions > 0) && (request->outbound_pbuf_stream.offset != repetition_offset)) {
u8_t all_endofmibview = 1;
snmp_vb_enumerator_init(&repetition_varbind_enumerator, request->outbound_pbuf, repetition_offset, request->outbound_pbuf_stream.offset - repetition_offset);
repetition_offset = request->outbound_pbuf_stream.offset; /* for next loop */
while (request->error_status == SNMP_ERR_NOERROR) {
vb.value = NULL; /* do NOT decode value (we enumerate outbound buffer here, so all varbinds have values assigned) */
err = snmp_vb_enumerator_get_next(&repetition_varbind_enumerator, &vb);
if (err == SNMP_VB_ENUMERATOR_ERR_OK) {
vb.value = request->value_buffer;
snmp_process_varbind(request, &vb, 1);
if (request->error_status != SNMP_ERR_NOERROR) {
/* already set correct error-index (here it cannot be taken from inbound varbind enumerator) */
request->error_index = request->non_repeaters + repetition_varbind_enumerator.varbind_count;
} else if (vb.type != (SNMP_ASN1_CONTENTTYPE_PRIMITIVE | SNMP_ASN1_CLASS_CONTEXT | SNMP_ASN1_CONTEXT_VARBIND_END_OF_MIB_VIEW)) {
all_endofmibview = 0;
}
} else if (err == SNMP_VB_ENUMERATOR_ERR_EOVB) {
/* no more varbinds in request */
break;
} else {
LWIP_DEBUGF(SNMP_DEBUG, ("Very strange, we cannot parse the varbind output that we created just before!"));
request->error_status = SNMP_ERR_GENERROR;
request->error_index = request->non_repeaters + repetition_varbind_enumerator.varbind_count;
}
}
if ((request->error_status == SNMP_ERR_NOERROR) && all_endofmibview) {
/* stop when all varbinds in a loop return EndOfMibView */
break;
}
repetitions--;
}
if (request->error_status == SNMP_ERR_TOOBIG) {
/* for GetBulk it is ok, if not all requested variables fit into the response -> just return the varbinds added so far */
request->error_status = SNMP_ERR_NOERROR;
}
return ERR_OK;
}
/**
* Service an internal or external event for SNMP SET.
*
* @param request points to the associated message process state
*/
static err_t
snmp_process_set_request(struct snmp_request *request)
{
snmp_vb_enumerator_err_t err;
struct snmp_varbind vb;
vb.value = request->value_buffer;
LWIP_DEBUGF(SNMP_DEBUG, ("SNMP set request\n"));
/* perform set test on all objects */
while (request->error_status == SNMP_ERR_NOERROR) {
err = snmp_vb_enumerator_get_next(&request->inbound_varbind_enumerator, &vb);
if (err == SNMP_VB_ENUMERATOR_ERR_OK) {
struct snmp_node_instance node_instance;
memset(&node_instance, 0, sizeof(node_instance));
request->error_status = snmp_get_node_instance_from_oid(vb.oid.id, vb.oid.len, &node_instance);
if (request->error_status == SNMP_ERR_NOERROR) {
if (node_instance.asn1_type != vb.type) {
request->error_status = SNMP_ERR_WRONGTYPE;
} else if (((node_instance.access & SNMP_NODE_INSTANCE_ACCESS_WRITE) != SNMP_NODE_INSTANCE_ACCESS_WRITE) || (node_instance.set_value == NULL)) {
request->error_status = SNMP_ERR_NOTWRITABLE;
} else {
if (node_instance.set_test != NULL) {
request->error_status = node_instance.set_test(&node_instance, vb.value_len, vb.value);
}
}
if (node_instance.release_instance != NULL) {
node_instance.release_instance(&node_instance);
}
}
} else if (err == SNMP_VB_ENUMERATOR_ERR_EOVB) {
/* no more varbinds in request */
break;
} else if (err == SNMP_VB_ENUMERATOR_ERR_INVALIDLENGTH) {
request->error_status = SNMP_ERR_WRONGLENGTH;
} else if (err == SNMP_VB_ENUMERATOR_ERR_ASN1ERROR) {
/* malformed ASN.1, don't answer */
return ERR_ARG;
} else {
request->error_status = SNMP_ERR_GENERROR;
}
}
/* perform real set operation on all objects */
if (request->error_status == SNMP_ERR_NOERROR) {
snmp_vb_enumerator_init(&request->inbound_varbind_enumerator, request->inbound_pbuf, request->inbound_varbind_offset, request->inbound_varbind_len);
while (request->error_status == SNMP_ERR_NOERROR) {
err = snmp_vb_enumerator_get_next(&request->inbound_varbind_enumerator, &vb);
if (err == SNMP_VB_ENUMERATOR_ERR_OK) {
struct snmp_node_instance node_instance;
memset(&node_instance, 0, sizeof(node_instance));
request->error_status = snmp_get_node_instance_from_oid(vb.oid.id, vb.oid.len, &node_instance);
if (request->error_status == SNMP_ERR_NOERROR) {
if (node_instance.set_value(&node_instance, vb.value_len, vb.value) != SNMP_ERR_NOERROR) {
if (request->inbound_varbind_enumerator.varbind_count == 1) {
request->error_status = SNMP_ERR_COMMITFAILED;
} else {
/* we cannot undo the set operations done so far */
request->error_status = SNMP_ERR_UNDOFAILED;
}
}
if (node_instance.release_instance != NULL) {
node_instance.release_instance(&node_instance);
}
}
} else if (err == SNMP_VB_ENUMERATOR_ERR_EOVB) {
/* no more varbinds in request */
break;
} else {
/* first time enumerating varbinds work but second time not, although nothing should have changed in between ??? */
request->error_status = SNMP_ERR_GENERROR;
}
}
}
return ERR_OK;
}
#define PARSE_EXEC(code, retValue) \
if ((code) != ERR_OK) { \
LWIP_DEBUGF(SNMP_DEBUG, ("Malformed ASN.1 detected.\n")); \
snmp_stats.inasnparseerrs++; \
return retValue; \
}
#define PARSE_ASSERT(cond, retValue) \
if (!(cond)) { \
LWIP_DEBUGF(SNMP_DEBUG, ("SNMP parse assertion failed!: " # cond)); \
snmp_stats.inasnparseerrs++; \
return retValue; \
}
#define BUILD_EXEC(code, retValue) \
if ((code) != ERR_OK) { \
LWIP_DEBUGF(SNMP_DEBUG, ("SNMP error during creation of outbound frame!: " # code)); \
return retValue; \
}
#define IF_PARSE_EXEC(code) PARSE_EXEC(code, ERR_ARG)
#define IF_PARSE_ASSERT(code) PARSE_ASSERT(code, ERR_ARG)
/**
* Checks and decodes incoming SNMP message header, logs header errors.
*
* @param request points to the current message request state return
* @return
* - ERR_OK SNMP header is sane and accepted
* - ERR_VAL SNMP header is either malformed or rejected
*/
static err_t
snmp_parse_inbound_frame(struct snmp_request *request)
{
struct snmp_pbuf_stream pbuf_stream;
struct snmp_asn1_tlv tlv;
s32_t parent_tlv_value_len;
s32_t s32_value;
err_t err;
#if LWIP_SNMP_V3
snmpv3_auth_algo_t auth;
snmpv3_priv_algo_t priv;
#endif
IF_PARSE_EXEC(snmp_pbuf_stream_init(&pbuf_stream, request->inbound_pbuf, 0, request->inbound_pbuf->tot_len));
/* decode main container consisting of version, community and PDU */
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT((tlv.type == SNMP_ASN1_TYPE_SEQUENCE) && (tlv.value_len == pbuf_stream.length));
parent_tlv_value_len = tlv.value_len;
/* decode version */
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_INTEGER);
parent_tlv_value_len -= SNMP_ASN1_TLV_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
IF_PARSE_EXEC(snmp_asn1_dec_s32t(&pbuf_stream, tlv.value_len, &s32_value));
if (((s32_value != SNMP_VERSION_1) &&
(s32_value != SNMP_VERSION_2c)
#if LWIP_SNMP_V3
&& (s32_value != SNMP_VERSION_3)
#endif
)
#if LWIP_SNMP_CONFIGURE_VERSIONS
|| (!snmp_version_enabled(s32_value))
#endif
)
{
/* unsupported SNMP version */
snmp_stats.inbadversions++;
return ERR_ARG;
}
request->version = (u8_t)s32_value;
#if LWIP_SNMP_V3
if (request->version == SNMP_VERSION_3) {
u16_t u16_value;
u16_t inbound_msgAuthenticationParameters_offset;
/* SNMPv3 doesn't use communities */
/* @todo: Differentiate read/write access */
strcpy((char*)request->community, snmp_community);
request->community_strlen = (u16_t)strlen(snmp_community);
/* RFC3414 globalData */
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_SEQUENCE);
parent_tlv_value_len -= SNMP_ASN1_TLV_HDR_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
/* decode msgID */
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_INTEGER);
parent_tlv_value_len -= SNMP_ASN1_TLV_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
IF_PARSE_EXEC(snmp_asn1_dec_s32t(&pbuf_stream, tlv.value_len, &s32_value));
request->msg_id = s32_value;
/* decode msgMaxSize */
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_INTEGER);
parent_tlv_value_len -= SNMP_ASN1_TLV_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
IF_PARSE_EXEC(snmp_asn1_dec_s32t(&pbuf_stream, tlv.value_len, &s32_value));
request->msg_max_size = s32_value;
/* decode msgFlags */
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_OCTET_STRING);
parent_tlv_value_len -= SNMP_ASN1_TLV_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
IF_PARSE_EXEC(snmp_asn1_dec_s32t(&pbuf_stream, tlv.value_len, &s32_value));
request->msg_flags = (u8_t)s32_value;
/* decode msgSecurityModel */
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_INTEGER);
parent_tlv_value_len -= SNMP_ASN1_TLV_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
IF_PARSE_EXEC(snmp_asn1_dec_s32t(&pbuf_stream, tlv.value_len, &s32_value));
request->msg_security_model = s32_value;
/* RFC3414 msgSecurityParameters
* The User-based Security Model defines the contents of the OCTET
* STRING as a SEQUENCE.
*
* We skip the protective dummy OCTET STRING header
* to access the SEQUENCE header.
*/
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_OCTET_STRING);
parent_tlv_value_len -= SNMP_ASN1_TLV_HDR_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
/* msgSecurityParameters SEQUENCE header */
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_SEQUENCE);
parent_tlv_value_len -= SNMP_ASN1_TLV_HDR_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
/* decode msgAuthoritativeEngineID */
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_OCTET_STRING);
parent_tlv_value_len -= SNMP_ASN1_TLV_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
IF_PARSE_EXEC(snmp_asn1_dec_raw(&pbuf_stream, tlv.value_len, request->msg_authoritative_engine_id,
&u16_value, SNMP_V3_MAX_ENGINE_ID_LENGTH));
request->msg_authoritative_engine_id_len = (u8_t)u16_value;
/* msgAuthoritativeEngineBoots */
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_INTEGER);
parent_tlv_value_len -= SNMP_ASN1_TLV_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
IF_PARSE_EXEC(snmp_asn1_dec_s32t(&pbuf_stream, tlv.value_len, &request->msg_authoritative_engine_boots));
/* msgAuthoritativeEngineTime */
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_INTEGER);
parent_tlv_value_len -= SNMP_ASN1_TLV_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
IF_PARSE_EXEC(snmp_asn1_dec_s32t(&pbuf_stream, tlv.value_len, &request->msg_authoritative_engine_time));
/* msgUserName */
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_OCTET_STRING);
parent_tlv_value_len -= SNMP_ASN1_TLV_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
IF_PARSE_EXEC(snmp_asn1_dec_raw(&pbuf_stream, tlv.value_len, request->msg_user_name,
&u16_value, SNMP_V3_MAX_USER_LENGTH));
request->msg_user_name_len = (u8_t)u16_value;
/* msgAuthenticationParameters */
memset(request->msg_authentication_parameters, 0, SNMP_V3_MAX_AUTH_PARAM_LENGTH);
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_OCTET_STRING);
parent_tlv_value_len -= SNMP_ASN1_TLV_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
/* Remember position */
inbound_msgAuthenticationParameters_offset = pbuf_stream.offset;
LWIP_UNUSED_ARG(inbound_msgAuthenticationParameters_offset);
/* Read auth parameters */
/* IF_PARSE_ASSERT(tlv.value_len <= SNMP_V3_MAX_AUTH_PARAM_LENGTH); */
IF_PARSE_EXEC(snmp_asn1_dec_raw(&pbuf_stream, tlv.value_len, request->msg_authentication_parameters,
&u16_value, tlv.value_len));
request->msg_authentication_parameters_len = (u8_t)u16_value;
/* msgPrivacyParameters */
memset(request->msg_privacy_parameters, 0, SNMP_V3_MAX_PRIV_PARAM_LENGTH);
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_OCTET_STRING);
parent_tlv_value_len -= SNMP_ASN1_TLV_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
IF_PARSE_EXEC(snmp_asn1_dec_raw(&pbuf_stream, tlv.value_len, request->msg_privacy_parameters,
&u16_value, SNMP_V3_MAX_PRIV_PARAM_LENGTH));
request->msg_privacy_parameters_len = (u8_t)u16_value;
/* validate securityParameters here (do this after decoding because we don't want to increase other counters for wrong frames)
* 1) securityParameters was correctly serialized if we reach here.
* 2) securityParameters are already cached.
* 3) if msgAuthoritativeEngineID is unknown, zero-length or too long:
b) https://tools.ietf.org/html/rfc3414#section-7
*/
{
const char *eid;
u8_t eid_len;
snmpv3_get_engine_id(&eid, &eid_len);
if ((request->msg_authoritative_engine_id_len == 0) ||
(request->msg_authoritative_engine_id_len != eid_len) ||
(memcmp(eid, request->msg_authoritative_engine_id, eid_len) != 0)) {
snmp_stats.unknownengineids++;
request->msg_flags = 0; /* noauthnopriv */
request->error_status = SNMP_ERR_UNKNOWN_ENGINEID;
return ERR_OK;
}
}
/* 4) verify username */
if(snmpv3_get_user((char*)request->msg_user_name, &auth, NULL, &priv, NULL)) {
snmp_stats.unknownusernames++;
request->msg_flags = 0; /* noauthnopriv */
request->error_status = SNMP_ERR_UNKNOWN_SECURITYNAME;
return ERR_OK;
}
/* 5) verify security level */
switch(request->msg_flags & (SNMP_V3_AUTH_FLAG | SNMP_V3_PRIV_FLAG)) {
case SNMP_V3_NOAUTHNOPRIV:
if ((auth != SNMP_V3_AUTH_ALGO_INVAL) || (priv != SNMP_V3_PRIV_ALGO_INVAL)) {
/* Invalid security level for user */
snmp_stats.unsupportedseclevels++;
request->msg_flags = SNMP_V3_NOAUTHNOPRIV;
request->error_status = SNMP_ERR_UNSUPPORTED_SECLEVEL;
return ERR_OK;
}
break;
#if LWIP_SNMP_V3_CRYPTO
case SNMP_V3_AUTHNOPRIV:
if ((auth == SNMP_V3_AUTH_ALGO_INVAL) || (priv != SNMP_V3_PRIV_ALGO_INVAL)) {
/* Invalid security level for user */
snmp_stats.unsupportedseclevels++;
request->msg_flags = SNMP_V3_NOAUTHNOPRIV;
request->error_status = SNMP_ERR_UNSUPPORTED_SECLEVEL;
return ERR_OK;
}
break;
case SNMP_V3_AUTHPRIV:
if ((auth == SNMP_V3_AUTH_ALGO_INVAL) || (priv == SNMP_V3_PRIV_ALGO_INVAL)) {
/* Invalid security level for user */
snmp_stats.unsupportedseclevels++;
request->msg_flags = SNMP_V3_NOAUTHNOPRIV;
request->error_status = SNMP_ERR_UNSUPPORTED_SECLEVEL;
return ERR_OK;
}
break;
#endif
default:
snmp_stats.unsupportedseclevels++;
request->msg_flags = SNMP_V3_NOAUTHNOPRIV;
request->error_status = SNMP_ERR_UNSUPPORTED_SECLEVEL;
return ERR_OK;
}
/* 6) if securitylevel specifies authentication, authenticate message. */
#if LWIP_SNMP_V3_CRYPTO
if (request->msg_flags & SNMP_V3_AUTH_FLAG) {
const u8_t zero_arr[SNMP_V3_MAX_AUTH_PARAM_LENGTH] = { 0 };
u8_t key[20];
u8_t hmac[LWIP_MAX(SNMP_V3_SHA_LEN, SNMP_V3_MD5_LEN)];
struct snmp_pbuf_stream auth_stream;
if (request->msg_authentication_parameters_len > SNMP_V3_MAX_AUTH_PARAM_LENGTH) {
snmp_stats.wrongdigests++;
request->msg_flags = SNMP_V3_NOAUTHNOPRIV;
request->error_status = SNMP_ERR_AUTHORIZATIONERROR;
return ERR_OK;
}
/* Rewind stream */
IF_PARSE_EXEC(snmp_pbuf_stream_init(&auth_stream, request->inbound_pbuf, 0, request->inbound_pbuf->tot_len));
IF_PARSE_EXEC(snmp_pbuf_stream_seek_abs(&auth_stream, inbound_msgAuthenticationParameters_offset));
/* Set auth parameters to zero for verification */
IF_PARSE_EXEC(snmp_asn1_enc_raw(&auth_stream, zero_arr, request->msg_authentication_parameters_len));
/* Verify authentication */
IF_PARSE_EXEC(snmp_pbuf_stream_init(&auth_stream, request->inbound_pbuf, 0, request->inbound_pbuf->tot_len));
IF_PARSE_EXEC(snmpv3_get_user((char*)request->msg_user_name, &auth, key, NULL, NULL));
IF_PARSE_EXEC(snmpv3_auth(&auth_stream, request->inbound_pbuf->tot_len, key, auth, hmac));
if(memcmp(request->msg_authentication_parameters, hmac, SNMP_V3_MAX_AUTH_PARAM_LENGTH)) {
snmp_stats.wrongdigests++;
request->msg_flags = SNMP_V3_NOAUTHNOPRIV;
request->error_status = SNMP_ERR_AUTHORIZATIONERROR;
return ERR_OK;
}
/* 7) if securitylevel specifies authentication, verify engineboots, enginetime and lastenginetime */
{
s32_t boots = snmpv3_get_engine_boots_internal();
if ((request->msg_authoritative_engine_boots != boots) || (boots == 2147483647UL)) {
snmp_stats.notintimewindows++;
request->msg_flags = SNMP_V3_AUTHNOPRIV;
request->error_status = SNMP_ERR_NOTINTIMEWINDOW;
return ERR_OK;
}
}
{
s32_t time = snmpv3_get_engine_time_internal();
if (request->msg_authoritative_engine_time > time) {
snmp_stats.notintimewindows++;
request->msg_flags = SNMP_V3_AUTHNOPRIV;
request->error_status = SNMP_ERR_NOTINTIMEWINDOW;
return ERR_OK;
}
else if (time > 150) {
if (request->msg_authoritative_engine_time < time - 150) {
snmp_stats.notintimewindows++;
request->msg_flags = SNMP_V3_AUTHNOPRIV;
request->error_status = SNMP_ERR_NOTINTIMEWINDOW;
return ERR_OK;
}
}
}
}
#endif
/* 8) if securitylevel specifies privacy, decrypt message. */
#if LWIP_SNMP_V3_CRYPTO
if (request->msg_flags & SNMP_V3_PRIV_FLAG) {
/* Decrypt message */
u8_t key[20];
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_OCTET_STRING);
parent_tlv_value_len -= SNMP_ASN1_TLV_HDR_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
IF_PARSE_EXEC(snmpv3_get_user((char*)request->msg_user_name, NULL, NULL, &priv, key));
if(snmpv3_crypt(&pbuf_stream, tlv.value_len, key,
request->msg_privacy_parameters, request->msg_authoritative_engine_boots,
request->msg_authoritative_engine_time, priv, SNMP_V3_PRIV_MODE_DECRYPT) != ERR_OK) {
snmp_stats.decryptionerrors++;
request->msg_flags = SNMP_V3_AUTHNOPRIV;
request->error_status = SNMP_ERR_DECRYIPTION_ERROR;
return ERR_OK;
}
}
#endif
/* 9) calculate max size of scoped pdu?
* 10) securityname for user is retrieved from usertable?
* 11) security data is cached?
* 12)
*/
/* Scoped PDU
* Encryption context
*/
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_SEQUENCE);
parent_tlv_value_len -= SNMP_ASN1_TLV_HDR_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
/* contextEngineID */
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_OCTET_STRING);
parent_tlv_value_len -= SNMP_ASN1_TLV_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
IF_PARSE_EXEC(snmp_asn1_dec_raw(&pbuf_stream, tlv.value_len, request->context_engine_id,
&u16_value, SNMP_V3_MAX_ENGINE_ID_LENGTH));
request->context_engine_id_len = (u8_t)u16_value;
/* TODO: do we need to verify this contextengineid too? */
/* contextName */
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_OCTET_STRING);
parent_tlv_value_len -= SNMP_ASN1_TLV_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
IF_PARSE_EXEC(snmp_asn1_dec_raw(&pbuf_stream, tlv.value_len, request->context_name,
&u16_value, SNMP_V3_MAX_ENGINE_ID_LENGTH));
request->context_name_len = (u8_t)u16_value;
/* TODO: do we need to verify this contextname too? */
} else
#endif
{
/* decode community */
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_OCTET_STRING);
parent_tlv_value_len -= SNMP_ASN1_TLV_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
err = snmp_asn1_dec_raw(&pbuf_stream, tlv.value_len, request->community, &request->community_strlen, SNMP_MAX_COMMUNITY_STR_LEN);
if (err == ERR_MEM) {
/* community string does not fit in our buffer -> its too long -> its invalid */
request->community_strlen = 0;
snmp_pbuf_stream_seek(&pbuf_stream, tlv.value_len);
} else {
IF_PARSE_ASSERT(err == ERR_OK);
}
/* add zero terminator */
request->community[request->community_strlen] = 0;
}
/* decode PDU type (next container level) */
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.value_len <= pbuf_stream.length);
request->inbound_padding_len = pbuf_stream.length - tlv.value_len;
parent_tlv_value_len = tlv.value_len;
/* validate PDU type */
switch(tlv.type) {
case (SNMP_ASN1_CLASS_CONTEXT | SNMP_ASN1_CONTENTTYPE_CONSTRUCTED | SNMP_ASN1_CONTEXT_PDU_GET_REQ):
/* GetRequest PDU */
snmp_stats.ingetrequests++;
break;
case (SNMP_ASN1_CLASS_CONTEXT | SNMP_ASN1_CONTENTTYPE_CONSTRUCTED | SNMP_ASN1_CONTEXT_PDU_GET_NEXT_REQ):
/* GetNextRequest PDU */
snmp_stats.ingetnexts++;
break;
case (SNMP_ASN1_CLASS_CONTEXT | SNMP_ASN1_CONTENTTYPE_CONSTRUCTED | SNMP_ASN1_CONTEXT_PDU_GET_BULK_REQ):
/* GetBulkRequest PDU */
if (request->version < SNMP_VERSION_2c) {
/* RFC2089: invalid, drop packet */
return ERR_ARG;
}
break;
case (SNMP_ASN1_CLASS_CONTEXT | SNMP_ASN1_CONTENTTYPE_CONSTRUCTED | SNMP_ASN1_CONTEXT_PDU_SET_REQ):
/* SetRequest PDU */
snmp_stats.insetrequests++;
break;
default:
/* unsupported input PDU for this agent (no parse error) */
LWIP_DEBUGF(SNMP_DEBUG, ("Unknown/Invalid SNMP PDU type received: %d", tlv.type)); \
return ERR_ARG;
break;
}
request->request_type = tlv.type & SNMP_ASN1_DATATYPE_MASK;
request->request_out_type = (SNMP_ASN1_CLASS_CONTEXT | SNMP_ASN1_CONTENTTYPE_CONSTRUCTED | SNMP_ASN1_CONTEXT_PDU_GET_RESP);
/* validate community (do this after decoding PDU type because we don't want to increase 'inbadcommunitynames' for wrong frame types */
if (request->community_strlen == 0) {
/* community string was too long or really empty*/
snmp_stats.inbadcommunitynames++;
snmp_authfail_trap();
return ERR_ARG;
} else if (request->request_type == SNMP_ASN1_CONTEXT_PDU_SET_REQ) {
if (snmp_community_write[0] == 0) {
/* our write community is empty, that means all our objects are readonly */
request->error_status = SNMP_ERR_NOTWRITABLE;
request->error_index = 1;
} else if (strncmp(snmp_community_write, (const char*)request->community, SNMP_MAX_COMMUNITY_STR_LEN) != 0) {
/* community name does not match */
snmp_stats.inbadcommunitynames++;
snmp_authfail_trap();
return ERR_ARG;
}
} else {
if (strncmp(snmp_community, (const char*)request->community, SNMP_MAX_COMMUNITY_STR_LEN) != 0) {
/* community name does not match */
snmp_stats.inbadcommunitynames++;
snmp_authfail_trap();
return ERR_ARG;
}
}
/* decode request ID */
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_INTEGER);
parent_tlv_value_len -= SNMP_ASN1_TLV_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
IF_PARSE_EXEC(snmp_asn1_dec_s32t(&pbuf_stream, tlv.value_len, &request->request_id));
/* decode error status / non-repeaters */
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_INTEGER);
parent_tlv_value_len -= SNMP_ASN1_TLV_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
if (request->request_type == SNMP_ASN1_CONTEXT_PDU_GET_BULK_REQ) {
IF_PARSE_EXEC(snmp_asn1_dec_s32t(&pbuf_stream, tlv.value_len, &request->non_repeaters));
if (request->non_repeaters < 0) {
/* RFC 1905, 4.2.3 */
request->non_repeaters = 0;
}
} else {
/* only check valid value, don't touch 'request->error_status', maybe a response error status was already set to above; */
IF_PARSE_EXEC(snmp_asn1_dec_s32t(&pbuf_stream, tlv.value_len, &s32_value));
IF_PARSE_ASSERT(s32_value == SNMP_ERR_NOERROR);
}
/* decode error index / max-repetitions */
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT(tlv.type == SNMP_ASN1_TYPE_INTEGER);
parent_tlv_value_len -= SNMP_ASN1_TLV_LENGTH(tlv);
IF_PARSE_ASSERT(parent_tlv_value_len > 0);
if (request->request_type == SNMP_ASN1_CONTEXT_PDU_GET_BULK_REQ) {
IF_PARSE_EXEC(snmp_asn1_dec_s32t(&pbuf_stream, tlv.value_len, &request->max_repetitions));
if (request->max_repetitions < 0) {
/* RFC 1905, 4.2.3 */
request->max_repetitions = 0;
}
} else {
IF_PARSE_EXEC(snmp_asn1_dec_s32t(&pbuf_stream, tlv.value_len, &request->error_index));
IF_PARSE_ASSERT(s32_value == 0);
}
/* decode varbind-list type (next container level) */
IF_PARSE_EXEC(snmp_asn1_dec_tlv(&pbuf_stream, &tlv));
IF_PARSE_ASSERT((tlv.type == SNMP_ASN1_TYPE_SEQUENCE) && (tlv.value_len <= pbuf_stream.length));
request->inbound_varbind_offset = pbuf_stream.offset;
request->inbound_varbind_len = pbuf_stream.length - request->inbound_padding_len;
snmp_vb_enumerator_init(&(request->inbound_varbind_enumerator), request->inbound_pbuf, request->inbound_varbind_offset, request->inbound_varbind_len);
return ERR_OK;
}
#define OF_BUILD_EXEC(code) BUILD_EXEC(code, ERR_ARG)
static err_t
snmp_prepare_outbound_frame(struct snmp_request *request)
{
struct snmp_asn1_tlv tlv;
struct snmp_pbuf_stream* pbuf_stream = &(request->outbound_pbuf_stream);
/* try allocating pbuf(s) for maximum response size */
request->outbound_pbuf = pbuf_alloc(PBUF_TRANSPORT, 1472, PBUF_RAM);
if (request->outbound_pbuf == NULL) {
return ERR_MEM;
}
snmp_pbuf_stream_init(pbuf_stream, request->outbound_pbuf, 0, request->outbound_pbuf->tot_len);
/* 'Message' sequence */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_SEQUENCE, 3, 0);
OF_BUILD_EXEC( snmp_ans1_enc_tlv(pbuf_stream, &tlv) );
/* version */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_INTEGER, 0, 0);
snmp_asn1_enc_s32t_cnt(request->version, &tlv.value_len);
OF_BUILD_EXEC( snmp_ans1_enc_tlv(pbuf_stream, &tlv) );
OF_BUILD_EXEC( snmp_asn1_enc_s32t(pbuf_stream, tlv.value_len, request->version) );
#if LWIP_SNMP_V3
if (request->version < SNMP_VERSION_3) {
#endif
/* community */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_OCTET_STRING, 0, request->community_strlen);
OF_BUILD_EXEC( snmp_ans1_enc_tlv(pbuf_stream, &tlv) );
OF_BUILD_EXEC( snmp_asn1_enc_raw(pbuf_stream, request->community, request->community_strlen) );
#if LWIP_SNMP_V3
} else {
const char* id;
/* globalData */
request->outbound_msg_global_data_offset = pbuf_stream->offset;
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_SEQUENCE, 1, 0);
OF_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv));
/* msgID */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_INTEGER, 0, 1);
snmp_asn1_enc_s32t_cnt(request->msg_id, &tlv.value_len);
OF_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv));
OF_BUILD_EXEC(snmp_asn1_enc_s32t(pbuf_stream, tlv.value_len, request->msg_id));
/* msgMaxSize */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_INTEGER, 0, 1);
snmp_asn1_enc_s32t_cnt(request->msg_max_size, &tlv.value_len);
OF_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv));
OF_BUILD_EXEC(snmp_asn1_enc_s32t(pbuf_stream, tlv.value_len, request->msg_max_size));
/* msgFlags */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_OCTET_STRING, 0, 1);
OF_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv));
OF_BUILD_EXEC(snmp_asn1_enc_raw(pbuf_stream, &request->msg_flags, 1));
/* msgSecurityModel */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_INTEGER, 0, 1);
snmp_asn1_enc_s32t_cnt(request->msg_security_model, &tlv.value_len);
OF_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv));
OF_BUILD_EXEC(snmp_asn1_enc_s32t(pbuf_stream, tlv.value_len, request->msg_security_model));
/* end of msgGlobalData */
request->outbound_msg_global_data_end = pbuf_stream->offset;
/* msgSecurityParameters */
request->outbound_msg_security_parameters_str_offset = pbuf_stream->offset;
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_OCTET_STRING, 1, 0);
OF_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv));
request->outbound_msg_security_parameters_seq_offset = pbuf_stream->offset;
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_SEQUENCE, 1, 0);
OF_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv));
/* msgAuthoritativeEngineID */
snmpv3_get_engine_id(&id, &request->msg_authoritative_engine_id_len);
MEMCPY(request->msg_authoritative_engine_id, id, request->msg_authoritative_engine_id_len);
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_OCTET_STRING, 0, request->msg_authoritative_engine_id_len);
OF_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv));
OF_BUILD_EXEC(snmp_asn1_enc_raw(pbuf_stream, request->msg_authoritative_engine_id, request->msg_authoritative_engine_id_len));
request->msg_authoritative_engine_time = snmpv3_get_engine_time();
request->msg_authoritative_engine_boots = snmpv3_get_engine_boots();
/* msgAuthoritativeEngineBoots */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_INTEGER, 0, 0);
snmp_asn1_enc_s32t_cnt(request->msg_authoritative_engine_boots, &tlv.value_len);
OF_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv));
OF_BUILD_EXEC(snmp_asn1_enc_s32t(pbuf_stream, tlv.value_len, request->msg_authoritative_engine_boots));
/* msgAuthoritativeEngineTime */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_INTEGER, 0, 0);
snmp_asn1_enc_s32t_cnt(request->msg_authoritative_engine_time, &tlv.value_len);
OF_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv));
OF_BUILD_EXEC(snmp_asn1_enc_s32t(pbuf_stream, tlv.value_len, request->msg_authoritative_engine_time));
/* msgUserName */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_OCTET_STRING, 0, request->msg_user_name_len);
OF_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv));
OF_BUILD_EXEC(snmp_asn1_enc_raw(pbuf_stream, request->msg_user_name, request->msg_user_name_len));
#if LWIP_SNMP_V3_CRYPTO
/* msgAuthenticationParameters */
if (request->msg_flags & SNMP_V3_AUTH_FLAG) {
memset(request->msg_authentication_parameters, 0, SNMP_V3_MAX_AUTH_PARAM_LENGTH);
request->outbound_msg_authentication_parameters_offset = pbuf_stream->offset;
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_OCTET_STRING, 1, SNMP_V3_MAX_AUTH_PARAM_LENGTH);
OF_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv));
OF_BUILD_EXEC(snmp_asn1_enc_raw(pbuf_stream, request->msg_authentication_parameters, SNMP_V3_MAX_AUTH_PARAM_LENGTH));
} else
#endif
{
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_OCTET_STRING, 0, 0);
OF_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv));
}
#if LWIP_SNMP_V3_CRYPTO
/* msgPrivacyParameters */
if (request->msg_flags & SNMP_V3_PRIV_FLAG) {
snmpv3_build_priv_param(request->msg_privacy_parameters);
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_OCTET_STRING, 0, SNMP_V3_MAX_PRIV_PARAM_LENGTH);
OF_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv));
OF_BUILD_EXEC(snmp_asn1_enc_raw(pbuf_stream, request->msg_privacy_parameters, SNMP_V3_MAX_PRIV_PARAM_LENGTH));
} else
#endif
{
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_OCTET_STRING, 0, 0);
OF_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv) );
}
/* End of msgSecurityParameters, so we can calculate the length of this sequence later */
request->outbound_msg_security_parameters_end = pbuf_stream->offset;
#if LWIP_SNMP_V3_CRYPTO
/* For encryption we have to encapsulate the payload in an octet string */
if (request->msg_flags & SNMP_V3_PRIV_FLAG) {
request->outbound_scoped_pdu_string_offset = pbuf_stream->offset;
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_OCTET_STRING, 3, 0);
OF_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv));
}
#endif
/* Scoped PDU
* Encryption context
*/
request->outbound_scoped_pdu_seq_offset = pbuf_stream->offset;
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_SEQUENCE, 3, 0);
OF_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv));
/* contextEngineID */
snmpv3_get_engine_id(&id, &request->context_engine_id_len);
MEMCPY(request->context_engine_id, id, request->context_engine_id_len);
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_OCTET_STRING, 0, request->context_engine_id_len);
OF_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv));
OF_BUILD_EXEC(snmp_asn1_enc_raw(pbuf_stream, request->context_engine_id, request->context_engine_id_len));
/* contextName */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_OCTET_STRING, 0, request->context_name_len);
OF_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv));
OF_BUILD_EXEC(snmp_asn1_enc_raw(pbuf_stream, request->context_name, request->context_name_len));
}
#endif
/* 'PDU' sequence */
request->outbound_pdu_offset = pbuf_stream->offset;
SNMP_ASN1_SET_TLV_PARAMS(tlv, request->request_out_type, 3, 0);
OF_BUILD_EXEC( snmp_ans1_enc_tlv(pbuf_stream, &tlv) );
/* request ID */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_INTEGER, 0, 0);
snmp_asn1_enc_s32t_cnt(request->request_id, &tlv.value_len);
OF_BUILD_EXEC( snmp_ans1_enc_tlv(pbuf_stream, &tlv) );
OF_BUILD_EXEC( snmp_asn1_enc_s32t(pbuf_stream, tlv.value_len, request->request_id) );
/* error status */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_INTEGER, 0, 1);
OF_BUILD_EXEC( snmp_ans1_enc_tlv(pbuf_stream, &tlv) );
request->outbound_error_status_offset = pbuf_stream->offset;
OF_BUILD_EXEC( snmp_pbuf_stream_write(pbuf_stream, 0) );
/* error index */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_INTEGER, 0, 1);
OF_BUILD_EXEC( snmp_ans1_enc_tlv(pbuf_stream, &tlv) );
request->outbound_error_index_offset = pbuf_stream->offset;
OF_BUILD_EXEC( snmp_pbuf_stream_write(pbuf_stream, 0) );
/* 'VarBindList' sequence */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_SEQUENCE, 3, 0);
OF_BUILD_EXEC( snmp_ans1_enc_tlv(pbuf_stream, &tlv) );
request->outbound_varbind_offset = pbuf_stream->offset;
return ERR_OK;
}
/** Calculate the length of a varbind list */
err_t
snmp_varbind_length(struct snmp_varbind *varbind, struct snmp_varbind_len *len)
{
/* calculate required lengths */
snmp_asn1_enc_oid_cnt(varbind->oid.id, varbind->oid.len, &len->oid_value_len);
snmp_asn1_enc_length_cnt(len->oid_value_len, &len->oid_len_len);
if (varbind->value_len == 0) {
len->value_value_len = 0;
} else if (varbind->value_len & SNMP_GET_VALUE_RAW_DATA) {
len->value_value_len = varbind->value_len & (~SNMP_GET_VALUE_RAW_DATA);
} else {
switch (varbind->type) {
case SNMP_ASN1_TYPE_INTEGER:
if (varbind->value_len != sizeof (s32_t)) {
return ERR_VAL;
}
snmp_asn1_enc_s32t_cnt(*((s32_t*) varbind->value), &len->value_value_len);
break;
case SNMP_ASN1_TYPE_COUNTER:
case SNMP_ASN1_TYPE_GAUGE:
case SNMP_ASN1_TYPE_TIMETICKS:
if (varbind->value_len != sizeof (u32_t)) {
return ERR_VAL;
}
snmp_asn1_enc_u32t_cnt(*((u32_t*) varbind->value), &len->value_value_len);
break;
case SNMP_ASN1_TYPE_OCTET_STRING:
case SNMP_ASN1_TYPE_IPADDR:
case SNMP_ASN1_TYPE_OPAQUE:
len->value_value_len = varbind->value_len;
break;
case SNMP_ASN1_TYPE_NULL:
if (varbind->value_len != 0) {
return ERR_VAL;
}
len->value_value_len = 0;
break;
case SNMP_ASN1_TYPE_OBJECT_ID:
if ((varbind->value_len & 0x03) != 0) {
return ERR_VAL;
}
snmp_asn1_enc_oid_cnt((u32_t*) varbind->value, varbind->value_len >> 2, &len->value_value_len);
break;
case SNMP_ASN1_TYPE_COUNTER64:
if (varbind->value_len != (2 * sizeof (u32_t))) {
return ERR_VAL;
}
snmp_asn1_enc_u64t_cnt((u32_t*) varbind->value, &len->value_value_len);
break;
default:
/* unsupported type */
return ERR_VAL;
}
}
snmp_asn1_enc_length_cnt(len->value_value_len, &len->value_len_len);
len->vb_value_len = 1 + len->oid_len_len + len->oid_value_len + 1 + len->value_len_len + len->value_value_len;
snmp_asn1_enc_length_cnt(len->vb_value_len, &len->vb_len_len);
return ERR_OK;
}
#define OVB_BUILD_EXEC(code) BUILD_EXEC(code, ERR_ARG)
err_t
snmp_append_outbound_varbind(struct snmp_pbuf_stream *pbuf_stream, struct snmp_varbind* varbind)
{
struct snmp_asn1_tlv tlv;
struct snmp_varbind_len len;
err_t err;
err = snmp_varbind_length(varbind, &len);
if (err != ERR_OK) {
return err;
}
/* check length already before adding first data because in case of GetBulk,
* data added so far is returned and therefore no partial data shall be added
*/
if ((1 + len.vb_len_len + len.vb_value_len) > pbuf_stream->length) {
return ERR_BUF;
}
/* 'VarBind' sequence */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_SEQUENCE, len.vb_len_len, len.vb_value_len);
OVB_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv));
/* VarBind OID */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_OBJECT_ID, len.oid_len_len, len.oid_value_len);
OVB_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv));
OVB_BUILD_EXEC(snmp_asn1_enc_oid(pbuf_stream, varbind->oid.id, varbind->oid.len));
/* VarBind value */
SNMP_ASN1_SET_TLV_PARAMS(tlv, varbind->type, len.value_len_len, len.value_value_len);
OVB_BUILD_EXEC(snmp_ans1_enc_tlv(pbuf_stream, &tlv));
if (len.value_value_len > 0) {
if (varbind->value_len & SNMP_GET_VALUE_RAW_DATA) {
OVB_BUILD_EXEC(snmp_asn1_enc_raw(pbuf_stream, (u8_t*) varbind->value, len.value_value_len));
} else {
switch (varbind->type) {
case SNMP_ASN1_TYPE_INTEGER:
OVB_BUILD_EXEC(snmp_asn1_enc_s32t(pbuf_stream, len.value_value_len, *((s32_t*) varbind->value)));
break;
case SNMP_ASN1_TYPE_COUNTER:
case SNMP_ASN1_TYPE_GAUGE:
case SNMP_ASN1_TYPE_TIMETICKS:
OVB_BUILD_EXEC(snmp_asn1_enc_u32t(pbuf_stream, len.value_value_len, *((u32_t*) varbind->value)));
break;
case SNMP_ASN1_TYPE_OCTET_STRING:
case SNMP_ASN1_TYPE_IPADDR:
case SNMP_ASN1_TYPE_OPAQUE:
OVB_BUILD_EXEC(snmp_asn1_enc_raw(pbuf_stream, (u8_t*) varbind->value, len.value_value_len));
len.value_value_len = varbind->value_len;
break;
case SNMP_ASN1_TYPE_OBJECT_ID:
OVB_BUILD_EXEC(snmp_asn1_enc_oid(pbuf_stream, (u32_t*) varbind->value, varbind->value_len / sizeof (u32_t)));
break;
case SNMP_ASN1_TYPE_COUNTER64:
OVB_BUILD_EXEC(snmp_asn1_enc_u64t(pbuf_stream, len.value_value_len, (u32_t*) varbind->value));
break;
default:
LWIP_ASSERT("Unknown variable type", 0);
break;
}
}
}
return ERR_OK;
}
static err_t
snmp_complete_outbound_frame(struct snmp_request *request)
{
struct snmp_asn1_tlv tlv;
u16_t frame_size;
u8_t outbound_padding = 0;
if (request->version == SNMP_VERSION_1) {
if (request->error_status != SNMP_ERR_NOERROR) {
/* map v2c error codes to v1 compliant error code (according to RFC 2089) */
switch (request->error_status) {
/* mapping of implementation specific "virtual" error codes
* (during processing of frame we already stored them in error_status field,
* so no need to check all varbinds here for those exceptions as suggested by RFC) */
case SNMP_ERR_NOSUCHINSTANCE:
case SNMP_ERR_NOSUCHOBJECT:
case SNMP_ERR_ENDOFMIBVIEW:
request->error_status = SNMP_ERR_NOSUCHNAME;
break;
/* mapping according to RFC */
case SNMP_ERR_WRONGVALUE:
case SNMP_ERR_WRONGENCODING:
case SNMP_ERR_WRONGTYPE:
case SNMP_ERR_WRONGLENGTH:
case SNMP_ERR_INCONSISTENTVALUE:
request->error_status = SNMP_ERR_BADVALUE;
break;
case SNMP_ERR_NOACCESS:
case SNMP_ERR_NOTWRITABLE:
case SNMP_ERR_NOCREATION:
case SNMP_ERR_INCONSISTENTNAME:
case SNMP_ERR_AUTHORIZATIONERROR:
request->error_status = SNMP_ERR_NOSUCHNAME;
break;
case SNMP_ERR_RESOURCEUNAVAILABLE:
case SNMP_ERR_COMMITFAILED:
case SNMP_ERR_UNDOFAILED:
default:
request->error_status = SNMP_ERR_GENERROR;
break;
}
}
} else {
if (request->request_type == SNMP_ASN1_CONTEXT_PDU_SET_REQ) {
/* map error codes to according to RFC 1905 (4.2.5. The SetRequest-PDU) return 'NotWritable' for unknown OIDs) */
switch (request->error_status) {
case SNMP_ERR_NOSUCHINSTANCE:
case SNMP_ERR_NOSUCHOBJECT:
case SNMP_ERR_ENDOFMIBVIEW:
request->error_status = SNMP_ERR_NOTWRITABLE;
break;
default:
break;
}
}
if (request->error_status >= SNMP_VARBIND_EXCEPTION_OFFSET) {
/* should never occur because v2 frames store exceptions directly inside varbinds and not as frame error_status */
LWIP_DEBUGF(SNMP_DEBUG, ("snmp_complete_outbound_frame() > Found v2 request with varbind exception code stored as error status!\n"));
return ERR_ARG;
}
}
if ((request->error_status != SNMP_ERR_NOERROR) || (request->request_type == SNMP_ASN1_CONTEXT_PDU_SET_REQ)) {
/* all inbound vars are returned in response without any modification for error responses and successful set requests*/
struct snmp_pbuf_stream inbound_stream;
OF_BUILD_EXEC( snmp_pbuf_stream_init(&inbound_stream, request->inbound_pbuf, request->inbound_varbind_offset, request->inbound_varbind_len) );
OF_BUILD_EXEC( snmp_pbuf_stream_init(&(request->outbound_pbuf_stream), request->outbound_pbuf, request->outbound_varbind_offset, request->outbound_pbuf->tot_len - request->outbound_varbind_offset) );
snmp_pbuf_stream_writeto(&inbound_stream, &(request->outbound_pbuf_stream), 0);
}
frame_size = request->outbound_pbuf_stream.offset;
#if LWIP_SNMP_V3 && LWIP_SNMP_V3_CRYPTO
/* Calculate padding for encryption */
if (request->version == SNMP_VERSION_3 && (request->msg_flags & SNMP_V3_PRIV_FLAG)) {
u8_t i;
outbound_padding = (8 - (u8_t)((frame_size - request->outbound_scoped_pdu_seq_offset) & 0x07)) & 0x07;
for (i = 0; i < outbound_padding; i++) {
snmp_pbuf_stream_write(&request->outbound_pbuf_stream, 0);
}
}
#endif
/* complete missing length in 'Message' sequence ; 'Message' tlv is located at the beginning (offset 0) */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_SEQUENCE, 3, frame_size + outbound_padding - 1 - 3); /* - type - length_len(fixed, see snmp_prepare_outbound_frame()) */
OF_BUILD_EXEC( snmp_pbuf_stream_init(&(request->outbound_pbuf_stream), request->outbound_pbuf, 0, request->outbound_pbuf->tot_len) );
OF_BUILD_EXEC( snmp_ans1_enc_tlv(&(request->outbound_pbuf_stream), &tlv) );
#if LWIP_SNMP_V3
if (request->version == SNMP_VERSION_3) {
/* complete missing length in 'globalData' sequence */
/* - type - length_len(fixed, see snmp_prepare_outbound_frame()) */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_SEQUENCE, 1, request->outbound_msg_global_data_end
- request->outbound_msg_global_data_offset - 1 - 1);
OF_BUILD_EXEC(snmp_pbuf_stream_seek_abs(&(request->outbound_pbuf_stream), request->outbound_msg_global_data_offset));
OF_BUILD_EXEC(snmp_ans1_enc_tlv(&(request->outbound_pbuf_stream), &tlv));
/* complete missing length in 'msgSecurityParameters' sequence */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_OCTET_STRING, 1, request->outbound_msg_security_parameters_end
- request->outbound_msg_security_parameters_str_offset - 1 - 1);
OF_BUILD_EXEC(snmp_pbuf_stream_seek_abs(&(request->outbound_pbuf_stream), request->outbound_msg_security_parameters_str_offset));
OF_BUILD_EXEC(snmp_ans1_enc_tlv(&(request->outbound_pbuf_stream), &tlv));
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_SEQUENCE, 1, request->outbound_msg_security_parameters_end
- request->outbound_msg_security_parameters_seq_offset - 1 - 1);
OF_BUILD_EXEC(snmp_pbuf_stream_seek_abs(&(request->outbound_pbuf_stream), request->outbound_msg_security_parameters_seq_offset));
OF_BUILD_EXEC(snmp_ans1_enc_tlv(&(request->outbound_pbuf_stream), &tlv));
/* complete missing length in scoped PDU sequence */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_SEQUENCE, 3, frame_size - request->outbound_scoped_pdu_seq_offset - 1 - 3);
OF_BUILD_EXEC(snmp_pbuf_stream_seek_abs(&(request->outbound_pbuf_stream), request->outbound_scoped_pdu_seq_offset));
OF_BUILD_EXEC(snmp_ans1_enc_tlv(&(request->outbound_pbuf_stream), &tlv));
}
#endif
/* complete missing length in 'PDU' sequence */
SNMP_ASN1_SET_TLV_PARAMS(tlv, request->request_out_type, 3,
frame_size - request->outbound_pdu_offset - 1 - 3); /* - type - length_len(fixed, see snmp_prepare_outbound_frame()) */
OF_BUILD_EXEC( snmp_pbuf_stream_seek_abs(&(request->outbound_pbuf_stream), request->outbound_pdu_offset) );
OF_BUILD_EXEC( snmp_ans1_enc_tlv(&(request->outbound_pbuf_stream), &tlv) );
/* process and encode final error status */
if (request->error_status != 0) {
u16_t len;
snmp_asn1_enc_s32t_cnt(request->error_status, &len);
if (len != 1) {
/* error, we only reserved one byte for it */
return ERR_ARG;
}
OF_BUILD_EXEC( snmp_pbuf_stream_seek_abs(&(request->outbound_pbuf_stream), request->outbound_error_status_offset) );
OF_BUILD_EXEC( snmp_asn1_enc_s32t(&(request->outbound_pbuf_stream), len, request->error_status) );
/* for compatibility to v1, log statistics; in v2 (RFC 1907) these statistics are obsoleted */
switch (request->error_status) {
case SNMP_ERR_TOOBIG:
snmp_stats.outtoobigs++;
break;
case SNMP_ERR_NOSUCHNAME:
snmp_stats.outnosuchnames++;
break;
case SNMP_ERR_BADVALUE:
snmp_stats.outbadvalues++;
break;
case SNMP_ERR_GENERROR:
default:
snmp_stats.outgenerrs++;
break;
}
if (request->error_status == SNMP_ERR_TOOBIG) {
request->error_index = 0; /* defined by RFC 1157 */
} else if (request->error_index == 0) {
/* set index to varbind where error occured (if not already set before, e.g. during GetBulk processing) */
request->error_index = request->inbound_varbind_enumerator.varbind_count;
}
} else {
if (request->request_type == SNMP_ASN1_CONTEXT_PDU_SET_REQ) {
snmp_stats.intotalsetvars += request->inbound_varbind_enumerator.varbind_count;
} else {
snmp_stats.intotalreqvars += request->inbound_varbind_enumerator.varbind_count;
}
}
/* encode final error index*/
if (request->error_index != 0) {
u16_t len;
snmp_asn1_enc_s32t_cnt(request->error_index, &len);
if (len != 1) {
/* error, we only reserved one byte for it */
return ERR_VAL;
}
OF_BUILD_EXEC( snmp_pbuf_stream_seek_abs(&(request->outbound_pbuf_stream), request->outbound_error_index_offset) );
OF_BUILD_EXEC( snmp_asn1_enc_s32t(&(request->outbound_pbuf_stream), len, request->error_index) );
}
/* complete missing length in 'VarBindList' sequence ; 'VarBindList' tlv is located directly before varbind offset */
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_SEQUENCE, 3, frame_size - request->outbound_varbind_offset);
OF_BUILD_EXEC( snmp_pbuf_stream_seek_abs(&(request->outbound_pbuf_stream), request->outbound_varbind_offset - 1 - 3) ); /* - type - length_len(fixed, see snmp_prepare_outbound_frame()) */
OF_BUILD_EXEC( snmp_ans1_enc_tlv(&(request->outbound_pbuf_stream), &tlv) );
/* Authenticate response */
#if LWIP_SNMP_V3 && LWIP_SNMP_V3_CRYPTO
/* Encrypt response */
if (request->version == SNMP_VERSION_3 && (request->msg_flags & SNMP_V3_PRIV_FLAG)) {
u8_t key[20];
snmpv3_priv_algo_t algo;
/* complete missing length in PDU sequence */
OF_BUILD_EXEC(snmp_pbuf_stream_init(&request->outbound_pbuf_stream, request->outbound_pbuf, 0, request->outbound_pbuf->tot_len));
OF_BUILD_EXEC(snmp_pbuf_stream_seek_abs(&(request->outbound_pbuf_stream), request->outbound_scoped_pdu_string_offset));
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_OCTET_STRING, 3, frame_size + outbound_padding
- request->outbound_scoped_pdu_string_offset - 1 - 3);
OF_BUILD_EXEC(snmp_ans1_enc_tlv(&(request->outbound_pbuf_stream), &tlv));
OF_BUILD_EXEC(snmpv3_get_user((char*)request->msg_user_name, NULL, NULL, &algo, key));
OF_BUILD_EXEC(snmpv3_crypt(&request->outbound_pbuf_stream, tlv.value_len, key,
request->msg_privacy_parameters, request->msg_authoritative_engine_boots,
request->msg_authoritative_engine_time, algo, SNMP_V3_PRIV_MODE_ENCRYPT));
}
if (request->version == SNMP_VERSION_3 && (request->msg_flags & SNMP_V3_AUTH_FLAG)) {
u8_t key[20];
snmpv3_auth_algo_t algo;
u8_t hmac[20];
OF_BUILD_EXEC(snmpv3_get_user((char*)request->msg_user_name, &algo, key, NULL, NULL));
OF_BUILD_EXEC(snmp_pbuf_stream_init(&(request->outbound_pbuf_stream),
request->outbound_pbuf, 0, request->outbound_pbuf->tot_len));
OF_BUILD_EXEC(snmpv3_auth(&request->outbound_pbuf_stream, frame_size + outbound_padding, key, algo, hmac));
MEMCPY(request->msg_authentication_parameters, hmac, SNMP_V3_MAX_AUTH_PARAM_LENGTH);
OF_BUILD_EXEC(snmp_pbuf_stream_init(&request->outbound_pbuf_stream,
request->outbound_pbuf, 0, request->outbound_pbuf->tot_len));
OF_BUILD_EXEC(snmp_pbuf_stream_seek_abs(&request->outbound_pbuf_stream,
request->outbound_msg_authentication_parameters_offset));
SNMP_ASN1_SET_TLV_PARAMS(tlv, SNMP_ASN1_TYPE_OCTET_STRING, 1, SNMP_V3_MAX_AUTH_PARAM_LENGTH);
OF_BUILD_EXEC(snmp_ans1_enc_tlv(&request->outbound_pbuf_stream, &tlv));
OF_BUILD_EXEC(snmp_asn1_enc_raw(&request->outbound_pbuf_stream,
request->msg_authentication_parameters, SNMP_V3_MAX_AUTH_PARAM_LENGTH));
}
#endif
pbuf_realloc(request->outbound_pbuf, frame_size + outbound_padding);
snmp_stats.outgetresponses++;
snmp_stats.outpkts++;
return ERR_OK;
}
static void
snmp_execute_write_callbacks(struct snmp_request *request)
{
struct snmp_varbind_enumerator inbound_varbind_enumerator;
struct snmp_varbind vb;
snmp_vb_enumerator_init(&inbound_varbind_enumerator, request->inbound_pbuf, request->inbound_varbind_offset, request->inbound_varbind_len);
vb.value = NULL; /* do NOT decode value (we enumerate outbound buffer here, so all varbinds have values assigned, which we don't need here) */
while (snmp_vb_enumerator_get_next(&inbound_varbind_enumerator, &vb) == SNMP_VB_ENUMERATOR_ERR_OK) {
snmp_write_callback(vb.oid.id, vb.oid.len, snmp_write_callback_arg);
}
}
/* ----------------------------------------------------------------------- */
/* VarBind enumerator methods */
/* ----------------------------------------------------------------------- */
void
snmp_vb_enumerator_init(struct snmp_varbind_enumerator* enumerator, struct pbuf* p, u16_t offset, u16_t length)
{
snmp_pbuf_stream_init(&(enumerator->pbuf_stream), p, offset, length);
enumerator->varbind_count = 0;
}
#define VB_PARSE_EXEC(code) PARSE_EXEC(code, SNMP_VB_ENUMERATOR_ERR_ASN1ERROR)
#define VB_PARSE_ASSERT(code) PARSE_ASSERT(code, SNMP_VB_ENUMERATOR_ERR_ASN1ERROR)
snmp_vb_enumerator_err_t
snmp_vb_enumerator_get_next(struct snmp_varbind_enumerator* enumerator, struct snmp_varbind* varbind)
{
struct snmp_asn1_tlv tlv;
u16_t varbind_len;
err_t err;
if (enumerator->pbuf_stream.length == 0)
{
return SNMP_VB_ENUMERATOR_ERR_EOVB;
}
enumerator->varbind_count++;
/* decode varbind itself (parent container of a varbind) */
VB_PARSE_EXEC(snmp_asn1_dec_tlv(&(enumerator->pbuf_stream), &tlv));
VB_PARSE_ASSERT((tlv.type == SNMP_ASN1_TYPE_SEQUENCE) && (tlv.value_len <= enumerator->pbuf_stream.length));
varbind_len = tlv.value_len;
/* decode varbind name (object id) */
VB_PARSE_EXEC(snmp_asn1_dec_tlv(&(enumerator->pbuf_stream), &tlv));
VB_PARSE_ASSERT((tlv.type == SNMP_ASN1_TYPE_OBJECT_ID) && (SNMP_ASN1_TLV_LENGTH(tlv) < varbind_len) && (tlv.value_len < enumerator->pbuf_stream.length));
VB_PARSE_EXEC(snmp_asn1_dec_oid(&(enumerator->pbuf_stream), tlv.value_len, varbind->oid.id, &(varbind->oid.len), SNMP_MAX_OBJ_ID_LEN));
varbind_len -= SNMP_ASN1_TLV_LENGTH(tlv);
/* decode varbind value (object id) */
VB_PARSE_EXEC(snmp_asn1_dec_tlv(&(enumerator->pbuf_stream), &tlv));
VB_PARSE_ASSERT((SNMP_ASN1_TLV_LENGTH(tlv) == varbind_len) && (tlv.value_len <= enumerator->pbuf_stream.length));
varbind->type = tlv.type;
/* shall the value be decoded ? */
if (varbind->value != NULL) {
switch (varbind->type) {
case SNMP_ASN1_TYPE_INTEGER:
VB_PARSE_EXEC(snmp_asn1_dec_s32t(&(enumerator->pbuf_stream), tlv.value_len, (s32_t*)varbind->value));
varbind->value_len = sizeof(s32_t*);
break;
case SNMP_ASN1_TYPE_COUNTER:
case SNMP_ASN1_TYPE_GAUGE:
case SNMP_ASN1_TYPE_TIMETICKS:
VB_PARSE_EXEC(snmp_asn1_dec_u32t(&(enumerator->pbuf_stream), tlv.value_len, (u32_t*)varbind->value));
varbind->value_len = sizeof(u32_t*);
break;
case SNMP_ASN1_TYPE_OCTET_STRING:
case SNMP_ASN1_TYPE_OPAQUE:
err = snmp_asn1_dec_raw(&(enumerator->pbuf_stream), tlv.value_len, (u8_t*)varbind->value, &varbind->value_len, SNMP_MAX_VALUE_SIZE);
if (err == ERR_MEM) {
return SNMP_VB_ENUMERATOR_ERR_INVALIDLENGTH;
}
VB_PARSE_ASSERT(err == ERR_OK);
break;
case SNMP_ASN1_TYPE_NULL:
varbind->value_len = 0;
break;
case SNMP_ASN1_TYPE_OBJECT_ID:
/* misuse tlv.length_len as OID_length transporter */
err = snmp_asn1_dec_oid(&(enumerator->pbuf_stream), tlv.value_len, (u32_t*)varbind->value, &tlv.length_len, SNMP_MAX_OBJ_ID_LEN);
if (err == ERR_MEM) {
return SNMP_VB_ENUMERATOR_ERR_INVALIDLENGTH;
}
VB_PARSE_ASSERT(err == ERR_OK);
varbind->value_len = tlv.length_len * sizeof(u32_t);
break;
case SNMP_ASN1_TYPE_IPADDR:
if (tlv.value_len == 4) {
/* must be exactly 4 octets! */
VB_PARSE_EXEC(snmp_asn1_dec_raw(&(enumerator->pbuf_stream), tlv.value_len, (u8_t*)varbind->value, &varbind->value_len, SNMP_MAX_VALUE_SIZE));
} else {
VB_PARSE_ASSERT(0);
}
break;
case SNMP_ASN1_TYPE_COUNTER64:
VB_PARSE_EXEC(snmp_asn1_dec_u64t(&(enumerator->pbuf_stream), tlv.value_len, (u32_t*)varbind->value));
varbind->value_len = 2 * sizeof(u32_t*);
break;
default:
VB_PARSE_ASSERT(0);
break;
}
} else {
snmp_pbuf_stream_seek(&(enumerator->pbuf_stream), tlv.value_len);
varbind->value_len = tlv.value_len;
}
return SNMP_VB_ENUMERATOR_ERR_OK;
}
#endif /* LWIP_SNMP */
| {
"pile_set_name": "Github"
} |
name: build
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
new:
runs-on: ubuntu-latest
name: Testing New Projects
steps:
- name: Set up Golang
uses: actions/[email protected]
with:
go-version: 1.13
- uses: actions/[email protected]
with:
path: go/src/sigs.k8s.io/apiserver-builder-alpha/
- uses: actions/cache@v2
with:
path: |
${{ github.workspace }}/go/pkg/mod
${{ github.workspace }}/.cache/bazel
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
restore-keys: |
${{ runner.os }}-go-
- name: Workspace Preparing
env:
GOPATH: ${{ github.workspace }}/go
working-directory: ${{ github.workspace }}/go/src/sigs.k8s.io/apiserver-builder-alpha/
run: bash scripts/workspace-install.sh
- name: Install
working-directory: ${{ github.workspace }}/go/src/sigs.k8s.io/apiserver-builder-alpha/
env:
GOPATH: ${{ github.workspace }}/go
run: make install
- name: Testing on new project
working-directory: ${{ github.workspace }}/go/src/sigs.k8s.io/apiserver-builder-alpha/test
env:
GOPATH: ${{ github.workspace }}/go
GOBIN: ${{ github.workspace }}/go/bin
run: |
export PATH=${PATH}:${GOBIN}
make test
basic-example-build:
runs-on: ubuntu-latest
name: Testing Basic Example
steps:
- name: Set up Golang
uses: actions/[email protected]
with:
go-version: 1.13
- uses: actions/[email protected]
with:
path: go/src/sigs.k8s.io/apiserver-builder-alpha/
- uses: actions/cache@v2
with:
path: |
${{ github.workspace }}/go/pkg/mod
${{ github.workspace }}/.cache/bazel
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
restore-keys: |
${{ runner.os }}-go-
- name: Workspace Preparing
env:
GOPATH: ${{ github.workspace }}/go
working-directory: ${{ github.workspace }}/go/src/sigs.k8s.io/apiserver-builder-alpha/
run: bash scripts/workspace-install.sh
- name: Install
working-directory: ${{ github.workspace }}/go/src/sigs.k8s.io/apiserver-builder-alpha/
env:
GOPATH: ${{ github.workspace }}/go
run: make install
- name: Testing on examples
working-directory: ${{ github.workspace }}/go/src/sigs.k8s.io/apiserver-builder-alpha/example/basic
env:
GOPATH: ${{ github.workspace }}/go
GOBIN: ${{ github.workspace }}/go/bin
run: |
export PATH=${PATH}:${GOBIN}
make test
basic-example-container:
runs-on: ubuntu-latest
name: Testing Basic Example Container Build
steps:
- name: Set up Golang
uses: actions/[email protected]
with:
go-version: 1.13
- uses: actions/[email protected]
with:
path: go/src/sigs.k8s.io/apiserver-builder-alpha/
- name: Create k8s Kind Cluster
uses: helm/[email protected]
- name: Workspace Preparing
env:
GOPATH: ${{ github.workspace }}/go
working-directory: ${{ github.workspace }}/go/src/sigs.k8s.io/apiserver-builder-alpha/
run: bash scripts/workspace-install.sh
- name: Install
working-directory: ${{ github.workspace }}/go/src/sigs.k8s.io/apiserver-builder-alpha/
env:
GOPATH: ${{ github.workspace }}/go
run: make install
- name: Building and running container images
working-directory: ${{ github.workspace }}/go/src/sigs.k8s.io/apiserver-builder-alpha/example/basic
env:
GOPATH: ${{ github.workspace }}/go
GOBIN: ${{ github.workspace }}/go/bin
run: |
export PATH=${PATH}:${GOBIN}
export IMAGE="example.io/basic"
apiserver-boot build container --image ${IMAGE}
kind load docker-image ${IMAGE} --name chart-testing
kubectl create -f config/apiserver.yaml
sleep 120 # Hold for 2 min
kubectl api-resources \
|| kubectl get pod -o name | grep basic-example | xargs -I {} kubectl logs {} -c apiserver
kubectl create -f sample/festival.yaml
sleep 5 # Hold for 1min
kubectl get fs festival-example -o jsonpath="{.spec.invited}" | grep 1 # successfully processed by controller
kine-example-container:
runs-on: ubuntu-latest
name: Testing Kine Example Container Build
services:
mariadb:
image: mariadb:latest
ports:
- 3306
env:
MYSQL_USER: test
MYSQL_PASSWORD: test
MYSQL_DATABASE: test
MYSQL_ROOT_PASSWORD: rootpassword
options: --health-cmd="mysqladmin ping" --health-interval=5s --health-timeout=2s --health-retries=3
steps:
- name: Set up Golang
uses: actions/[email protected]
with:
go-version: 1.13
- uses: actions/[email protected]
with:
path: go/src/sigs.k8s.io/apiserver-builder-alpha/
- name: Create k8s Kind Cluster
uses: helm/[email protected]
- name: Workspace Preparing
env:
GOPATH: ${{ github.workspace }}/go
working-directory: ${{ github.workspace }}/go/src/sigs.k8s.io/apiserver-builder-alpha/
run: |
bash scripts/workspace-install.sh
sudo systemctl start mysql.service
- name: Install
working-directory: ${{ github.workspace }}/go/src/sigs.k8s.io/apiserver-builder-alpha/
env:
GOPATH: ${{ github.workspace }}/go
run: make install
- name: Building and running container images
working-directory: ${{ github.workspace }}/go/src/sigs.k8s.io/apiserver-builder-alpha/example/kine
env:
GOPATH: ${{ github.workspace }}/go
GOBIN: ${{ github.workspace }}/go/bin
run: |
export PATH=${PATH}:${GOBIN}
export IMAGE="example.io/kine"
apiserver-boot build container --image ${IMAGE}
kind load docker-image ${IMAGE} --name chart-testing
kubectl create -f config/apiserver.yaml
sleep 120 # Hold for 2 min
kubectl api-resources \
|| kubectl get pod -o name | grep kine-example | xargs -I {} kubectl logs {} -c apiserver
#kubectl create -f sample/festival.yaml
#sleep 5 # Hold for 1min
#kubectl get fs festival-example -o jsonpath="{.spec.invited}" | grep 1 # successfully processed by controller
| {
"pile_set_name": "Github"
} |
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
module content.mojom;
enum RendererType {
// Media playback uses RendererImpl, which is the same as other
// platforms. For video codec supported by v4l2 (e.g. h264), it's still
// accelerated by hardware. Video will be rendererd on graphics plane.
DEFAULT_RENDERER = 0,
// Enables Mojo Renderer (MojoRenderer) for media playback.
// On Chromecast, CMA renderer is enabled here.
MOJO_RENDERER = 1,
// Enables RemotingRenderer for playing remoting media in
// blink::WebMediaPlayer.
REMOTING_RENDERER = 2,
};
| {
"pile_set_name": "Github"
} |
//! This account contains the clock slot, epoch, and leader_schedule_epoch
//!
pub use crate::clock::Clock;
use crate::sysvar::Sysvar;
crate::declare_sysvar_id!("SysvarC1ock11111111111111111111111111111111", Clock);
impl Sysvar for Clock {}
| {
"pile_set_name": "Github"
} |
// Copyright David Abrahams 2002.
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef PREPROCESSOR_DWA200247_HPP
# define PREPROCESSOR_DWA200247_HPP
# include <boost/preprocessor/cat.hpp>
# include <boost/preprocessor/comma_if.hpp>
# include <boost/preprocessor/repeat.hpp>
# include <boost/preprocessor/tuple/elem.hpp>
// stuff that should be in the preprocessor library
# define BOOST_PYTHON_APPLY(x) BOOST_PP_CAT(BOOST_PYTHON_APPLY_, x)
# define BOOST_PYTHON_APPLY_BOOST_PYTHON_ITEM(v) v
# define BOOST_PYTHON_APPLY_BOOST_PYTHON_NIL
// cv-qualifiers
# if !defined(__MWERKS__) || __MWERKS__ > 0x2407
# define BOOST_PYTHON_CV_COUNT 4
# else
# define BOOST_PYTHON_CV_COUNT 1
# endif
# ifndef BOOST_PYTHON_MAX_ARITY
# define BOOST_PYTHON_MAX_ARITY 15
# endif
# ifndef BOOST_PYTHON_MAX_BASES
# define BOOST_PYTHON_MAX_BASES 10
# endif
# define BOOST_PYTHON_CV_QUALIFIER(i) \
BOOST_PYTHON_APPLY( \
BOOST_PP_TUPLE_ELEM(4, i, BOOST_PYTHON_CV_QUALIFIER_I) \
)
# define BOOST_PYTHON_CV_QUALIFIER_I \
( \
BOOST_PYTHON_NIL, \
BOOST_PYTHON_ITEM(const), \
BOOST_PYTHON_ITEM(volatile), \
BOOST_PYTHON_ITEM(const volatile) \
)
// enumerators
# define BOOST_PYTHON_UNARY_ENUM(c, text) BOOST_PP_REPEAT(c, BOOST_PYTHON_UNARY_ENUM_I, text)
# define BOOST_PYTHON_UNARY_ENUM_I(z, n, text) BOOST_PP_COMMA_IF(n) text ## n
# define BOOST_PYTHON_BINARY_ENUM(c, a, b) BOOST_PP_REPEAT(c, BOOST_PYTHON_BINARY_ENUM_I, (a, b))
# define BOOST_PYTHON_BINARY_ENUM_I(z, n, _) BOOST_PP_COMMA_IF(n) BOOST_PP_CAT(BOOST_PP_TUPLE_ELEM(2, 0, _), n) BOOST_PP_CAT(BOOST_PP_TUPLE_ELEM(2, 1, _), n)
# define BOOST_PYTHON_ENUM_WITH_DEFAULT(c, text, def) BOOST_PP_REPEAT(c, BOOST_PYTHON_ENUM_WITH_DEFAULT_I, (text, def))
# define BOOST_PYTHON_ENUM_WITH_DEFAULT_I(z, n, _) BOOST_PP_COMMA_IF(n) BOOST_PP_CAT(BOOST_PP_TUPLE_ELEM(2, 0, _), n) = BOOST_PP_TUPLE_ELEM(2, 1, _)
// fixed text (no commas)
# define BOOST_PYTHON_FIXED(z, n, text) text
// flags
# define BOOST_PYTHON_FUNCTION_POINTER 0x0001
# define BOOST_PYTHON_POINTER_TO_MEMBER 0x0002
#endif // PREPROCESSOR_DWA200247_HPP
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8" ?>
<container xmlns="http://symfony.com/schema/dic/services"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://symfony.com/schema/dic/services http://symfony.com/schema/dic/services/services-1.0.xsd">
<services>
<service id="sulu_core.doctrine.references" class="Sulu\Component\Doctrine\ReferencesOption">
<argument type="service" id="doctrine"/>
<!-- the $targetEntityMapping argument is set by the ResolveTargetEntitiesPass -->
<argument type="collection" />
<tag name="doctrine.event_subscriber"/>
</service>
</services>
</container>
| {
"pile_set_name": "Github"
} |
# OPcache (Opcode Cache)
----
### Custom Files
*Live configuration files and boilerplates used by current SlickStack installation that may be altered by LittleBizzy or Ubuntu for better performance.*
* (coming soon)
### Default Files
*Raw files from the vendor’s original release (e.g. official repo), not altered by other providers, included for reference and documentation purposes.*
* (coming soon)
### Related Links
*Links and resources related to the above module that may help users understand its functionality, or how it interacts with other LEMP stack modules.*
* (coming soon)
----
*Last updated: Jul 17, 2019*
| {
"pile_set_name": "Github"
} |
#pragma once
#include "Scenes/Hexus/CardData/CardData.h"
class CardBinary3 : public CardData
{
public:
CardBinary3();
virtual ~CardBinary3();
};
| {
"pile_set_name": "Github"
} |
/* array structure interface version 3 declarations */
#if !defined(PG_ARRAYINTER_HEADER)
#define PG_ARRAYINTER_HEADER
static const int PAI_CONTIGUOUS = 0x01;
static const int PAI_FORTRAN = 0x02;
static const int PAI_ALIGNED = 0x100;
static const int PAI_NOTSWAPPED = 0x200;
static const int PAI_WRITEABLE = 0x400;
static const int PAI_ARR_HAS_DESCR = 0x800;
typedef struct {
int two; /* contains the integer 2 -- simple sanity check */
int nd; /* number of dimensions */
char typekind; /* kind in array -- character code of typestr */
int itemsize; /* size of each element */
int flags; /* flags indicating how the data should be */
/* interpreted */
Py_intptr_t *shape; /* A length-nd array of shape information */
Py_intptr_t *strides; /* A length-nd array of stride information */
void *data; /* A pointer to the first element of the array */
PyObject *descr; /* NULL or a data-description */
} PyArrayInterface;
#endif
| {
"pile_set_name": "Github"
} |
package emr
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//
// Code generated by Alibaba Cloud SDK Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
// ApmAppInfoList is a nested struct in emr response
type ApmAppInfoList struct {
ApmAppInfo []ApmAppInfo `json:"ApmAppInfo" xml:"ApmAppInfo"`
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<!--
copyright : (C) 2003-2004 Bernhard Wymann
email : [email protected]
version : $Id: software-requirements.html,v 1.1 2004/11/30 14:02:05 berniw Exp $
Permission is granted to copy, distribute and/or modify this document
under the terms of the GNU Free Documentation License, Version 1.2
or any later version published by the Free Software Foundation;
with no Invariant Sections, no Front-Cover Texts, and no Back-Cover
Texts. A copy of the license is included in the section entitled "GNU
Free Documentation License".
-->
<head>
<title>TORCS Software Requirements</title>
<link rel="stylesheet" type="text/css" href="../../css/format.css"/>
<meta http-equiv="content-type" content="text/html; charset=ISO-8859-1"/>
<meta name="description" content="requirements"/>
<meta name="author" content="Bernhard Wymann"/>
<meta name="keywords" content="torcs, requirements, berniw, bernhard wymann"/>
<script src="../../js/utilities.js" type="text/javascript"></script>
</head>
<body bgcolor="#ffffff">
<table class="maincontent">
<tr>
<td class="maincontent">
<h1>TORCS Requirements</h1>
<h2>Software</h2>
<h3>Introduction</h3>
<p>
You need a working OpenGL/DRI driver, development tools and some additional libraries. Most often you
can go into your Linux distribution setup and simply click something like "development machine" and
all necessary tools will be installed. You need gcc, header files for OpenGL, GLUT, GLU, XFree86 and libc.
OpenGL driver setup is most often supported by the distribution, consult the documentation.
Below we will check if the basic requirements are fullfilled. If you have already compiled OpenGL
applications and you are sure your hardware is set up correct, <a href="./packages.html">skip</a>
the rest of this page.
</p>
<h3>Checking OpenGL/DRI</h3>
<p>
Start your XFree86 (if not already done), open a terminal and run as normal user (the $ means the
prompt of a normal user, # the prompt of root).
</p>
<p>
<tt>$ glxinfo | grep direct</tt></p>
<p>The result should look like this:</p>
<p><tt>direct rendering: Yes</tt></p>
<p>
If that's not the case you have to check your OpenGL setup.
</p>
<h3>Checking GLUT</h3>
<p>
For rpm based distributions, run the following command (we run a query over the whole package database):
</p>
<p><tt>$ rpm -qa | grep glut</tt></p>
<p>The result should look something like this (this depends on the package names):</p>
<p>
<tt>
mesaglut-3.4.2-42<br/>
mesaglut-devel-3.4.2-42<br/>
</tt>
</p>
<p>
If the result lists nothing you have to install GLUT. Use the tool provided from your distribution. Make
sure that you also install glut.h. If just one package shows up, you can list the package content and search
for glut.h:
</p>
<p><tt>$ rpm -ql NAME | grep glut.h</tt></p>
<p>
where NAME means in the above example mesaglut or mesaglut-devel, without version numbers. If
nothing shows up, glut.h is not installed, so search your distribution and install it.
</p>
<h3>Checking Libpng</h3>
<p>
For rpm based distributions, run the following command:
</p>
<p><tt>$ rpm -qa | grep png</tt></p>
<p>The result should look something like this (this depends on the package names):</p>
<p><tt>libpng-2.1.0.12-153</tt></p>
<p>
If the result lists nothing you have to install libpng with header files (eg. png.h).<br/><br/>
</p>
</td>
</tr>
</table>
<table class="navigation_foot">
<tr>
<td class="navigation_foot">
<a href="./hardware-requirements.html">
<p style="text-align:left;">Back</p>
</a>
</td>
<td class="navigation_foot">
<a href="./packages.html">
<p style="text-align:right;">Hey, now let's get the files!</p>
</a>
</td>
</tr>
</table>
</body>
</html>
| {
"pile_set_name": "Github"
} |
0000000000000000000000000000000000000000 c607fc30883e335def28cd686b51f6cfa02b06ec Edward Thomson <[email protected]> 1352096711 -0600 branch: Created from c607fc30883e335def28cd686b51f6cfa02b06ec
c607fc30883e335def28cd686b51f6cfa02b06ec ebc09d0137cfb0c26697aed0109fb943ad906f3f Edward Thomson <[email protected]> 1352096764 -0600 commit: existing file
ebc09d0137cfb0c26697aed0109fb943ad906f3f 3b47b031b3e55ae11e14a05260b1c3ffd6838d55 Edward Thomson <[email protected]> 1352096815 -0600 commit: 5alt-2
| {
"pile_set_name": "Github"
} |
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Storyshots @components/atoms/NotificationTemplate default 1`] = `
.c0 {
position: relative;
margin: 20px;
}
.c5 {
position: absolute;
width: 100%;
height: 100%;
border: 3px solid rgba(0,0,0,0.2);
top: 0;
left: 0;
pointer-events: none;
box-sizing: border-box;
}
.c1 {
width: 25rem;
padding: 1rem 1.5rem;
background-color: #fff;
box-shadow: 0px 6px 15px 3px rgba(0,0,0,0.25);
position: fixed;
bottom: 1rem;
right: 1rem;
border-left: 0.4rem solid;
border-color: #3ed256;
}
.c2 {
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
-webkit-box-pack: justify;
-webkit-justify-content: space-between;
-ms-flex-pack: justify;
justify-content: space-between;
}
.c3 {
text-transform: uppercase;
font-weight: 800;
-webkit-letter-spacing: 0.5px;
-moz-letter-spacing: 0.5px;
-ms-letter-spacing: 0.5px;
letter-spacing: 0.5px;
margin: 0 1.5rem 0 0;
}
.c4 {
cursor: pointer;
}
.c4 path {
-webkit-transition: 0.3s;
transition: 0.3s;
}
.c4:hover path {
fill: #13bebb;
}
<div
className="c0"
>
<div
className="c1"
data-test="alert"
>
<div
className="c2"
>
<p
className="c3"
>
test
</p>
<button
className="c4"
onClick={[Function]}
>
<svg
height={15}
viewBox="0 0 32 32"
width={15}
>
<path
d="M16 12.8l-12.8-12.8-3.2 3.2 12.8 12.8-12.8 12.8 3.2 3.2 12.8-12.8 12.8 12.8 3.2-3.2-12.8-12.8 12.8-12.8-3.2-3.2-12.8 12.8z"
fill="#c4c4c4"
/>
</svg>
</button>
</div>
</div>
<div
className="c5"
/>
</div>
`;
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow as tf
from model import layers
__all__ = ['conv2d_block']
def conv2d_block(
inputs,
n_channels,
kernel_size=(3, 3),
strides=(2, 2),
mode='SAME',
use_batch_norm=True,
activation='relu',
is_training=True,
data_format='NHWC',
conv2d_hparams=None,
batch_norm_hparams=None,
name='conv2d'
):
if not isinstance(conv2d_hparams, tf.contrib.training.HParams):
raise ValueError("The paramater `conv2d_hparams` is not of type `HParams`")
if not isinstance(batch_norm_hparams, tf.contrib.training.HParams) and use_batch_norm:
raise ValueError("The paramater `conv2d_hparams` is not of type `HParams`")
with tf.variable_scope(name):
if mode != 'SAME_RESNET':
net = layers.conv2d(
inputs,
n_channels=n_channels,
kernel_size=kernel_size,
strides=strides,
padding=mode,
data_format=data_format,
use_bias=not use_batch_norm,
trainable=is_training,
kernel_initializer=conv2d_hparams.kernel_initializer,
bias_initializer=conv2d_hparams.bias_initializer,
)
else: # Special padding mode for ResNet models
if strides == (1, 1):
net = layers.conv2d(
inputs,
n_channels=n_channels,
kernel_size=kernel_size,
strides=strides,
padding='SAME',
data_format=data_format,
use_bias=not use_batch_norm,
trainable=is_training,
kernel_initializer=conv2d_hparams.kernel_initializer,
bias_initializer=conv2d_hparams.bias_initializer,
)
else:
rate = 1 # Unused (for 'a trous' convolutions)
kernel_height_effective = kernel_size[0] + (kernel_size[0] - 1) * (rate - 1)
pad_h_beg = (kernel_height_effective - 1) // 2
pad_h_end = kernel_height_effective - 1 - pad_h_beg
kernel_width_effective = kernel_size[1] + (kernel_size[1] - 1) * (rate - 1)
pad_w_beg = (kernel_width_effective - 1) // 2
pad_w_end = kernel_width_effective - 1 - pad_w_beg
padding = [[0, 0], [pad_h_beg, pad_h_end], [pad_w_beg, pad_w_end], [0, 0]]
if data_format == 'NCHW':
padding = [padding[0], padding[3], padding[1], padding[2]]
padded_inputs = tf.pad(inputs, padding)
net = layers.conv2d(
padded_inputs, # inputs,
n_channels=n_channels,
kernel_size=kernel_size,
strides=strides,
padding='VALID',
data_format=data_format,
use_bias=not use_batch_norm,
trainable=is_training,
kernel_initializer=conv2d_hparams.kernel_initializer,
bias_initializer=conv2d_hparams.bias_initializer,
)
if use_batch_norm:
net = layers.batch_norm(
net,
decay=batch_norm_hparams.decay,
epsilon=batch_norm_hparams.epsilon,
scale=batch_norm_hparams.scale,
center=batch_norm_hparams.center,
is_training=is_training,
data_format=data_format,
param_initializers=batch_norm_hparams.param_initializers
)
if activation == 'relu':
net = layers.relu(net, name='relu')
elif activation == 'tanh':
net = layers.tanh(net, name='tanh')
elif activation != 'linear' and activation is not None:
raise KeyError('Invalid activation type: `%s`' % activation)
return net
| {
"pile_set_name": "Github"
} |
#pragma once
#include <stdint.h>
#include <kern/sched_prim.h>
#include "libudis86/extern.h"
struct op {
// one of either. order here matters.
union {
x86_saved_state64_t *state64;
x86_saved_state32_t *state32;
};
enum {
SAVEDSTATE_64,
SAVEDSTATE_32,
} state_flavor;
// just another version of the above
x86_saved_state_t *state;
// disassembly object
ud_t *ud_obj;
// boolean flag
uint8_t ring0;
};
typedef struct op op_t;
/**
* Trap handlers, analogous to a program's entry point
* @param state: xnu's trap.c saved thread state
*/
int opemu_ktrap(x86_saved_state_t *state);
void opemu_utrap(x86_saved_state_t *state) __attribute__((noreturn));
/**
* Forward declarations of private xnu functions
*/
extern void mach_call_munger(x86_saved_state_t *state);
extern void unix_syscall(x86_saved_state_t *);
extern void mach_call_munger64(x86_saved_state_t *state);
extern void unix_syscall64(x86_saved_state_t *);
int retrieve_reg(/*const*/ x86_saved_state_t *, const ud_type_t, uint64_t *);
/**
* Entry points for the "plugins"
*/
extern int op_sse3x_run(const op_t*);
extern int op_sse3_run(const op_t*);
| {
"pile_set_name": "Github"
} |
/*
Copyright 2012-2013 CNR-ISTI, http://isti.cnr.it
Institute of Information Science and Technologies
of the Italian National Research Council
See the NOTICE file distributed with this work for additional
information regarding copyright ownership
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.bubblecloud.zigbee.api.cluster.impl.api.general.commissioning;
/**
* @author <a href="mailto:[email protected]">Manlio Bacco</a>
* @version $LastChangedRevision$ ($LastChangedDate$)
* @since 0.8.0
*/
public interface RestartDeviceOptions {
/* startup mode
* 0b000: restart installing and using current startup parameters set
* 0b001: restart using and not replacing current set of stack attributes *
*/
byte[] getStartupMode();
/* immediate
* 1: restart either immediately on receipt of the RestartDeviceRequest frame if delay 0
* or immediately after prescribed delay and jitter has transpired if not
* 0: restart until after prescribed delay and jitter (if any) has transpired
* but can also wait for a "convenient" moment (i.e. all pending frames have been transmitted)
*/
byte getImmediate();
// byte[] getReserved();
}
| {
"pile_set_name": "Github"
} |
/*
* HID driver for LC Power Model RC1000MCE
*
* Copyright (c) 2011 Chris Schlund
* based on hid-topseed module
*/
/*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*/
#include <linux/device.h>
#include <linux/hid.h>
#include <linux/module.h>
#include "hid-ids.h"
#define ts_map_key_clear(c) hid_map_usage_clear(hi, usage, bit, max, \
EV_KEY, (c))
static int ts_input_mapping(struct hid_device *hdev, struct hid_input *hi,
struct hid_field *field, struct hid_usage *usage,
unsigned long **bit, int *max)
{
if ((usage->hid & HID_USAGE_PAGE) != 0x0ffbc0000)
return 0;
switch (usage->hid & HID_USAGE) {
case 0x046: ts_map_key_clear(KEY_YELLOW); break;
case 0x047: ts_map_key_clear(KEY_GREEN); break;
case 0x049: ts_map_key_clear(KEY_BLUE); break;
case 0x04a: ts_map_key_clear(KEY_RED); break;
case 0x00d: ts_map_key_clear(KEY_HOME); break;
case 0x025: ts_map_key_clear(KEY_TV); break;
case 0x048: ts_map_key_clear(KEY_VCR); break;
case 0x024: ts_map_key_clear(KEY_MENU); break;
default:
return 0;
}
return 1;
}
static const struct hid_device_id ts_devices[] = {
{ HID_USB_DEVICE( USB_VENDOR_ID_LCPOWER, USB_DEVICE_ID_LCPOWER_LC1000) },
{ }
};
MODULE_DEVICE_TABLE(hid, ts_devices);
static struct hid_driver ts_driver = {
.name = "LC RC1000MCE",
.id_table = ts_devices,
.input_mapping = ts_input_mapping,
};
static int __init ts_init(void)
{
return hid_register_driver(&ts_driver);
}
static void __exit ts_exit(void)
{
hid_unregister_driver(&ts_driver);
}
module_init(ts_init);
module_exit(ts_exit);
MODULE_LICENSE("GPL");
| {
"pile_set_name": "Github"
} |
<?php
/*
* This file is part of SwiftMailer.
* (c) 2004-2009 Chris Corbyn
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
/**
* Logs events in the Transport system.
*
* @author Chris Corbyn
*/
interface Swift_Plugins_Logger
{
/**
* Add a log entry.
*
* @param string $entry
*/
public function add($entry);
/**
* Clear the log contents.
*/
public function clear();
/**
* Get this log as a string.
*
* @return string
*/
public function dump();
}
| {
"pile_set_name": "Github"
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
log4j.rootLogger=INFO, console, file
# console appender
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.follow=true
log4j.appender.console.layout=org.apache.log4j.EnhancedPatternLayout
log4j.appender.console.layout.ConversionPattern=[%p] [%c{1}] %m%n%throwable{0}
# file appender
log4j.appender.file=org.apache.log4j.FileAppender
log4j.appender.file.File=${pio.log.dir}/pio.log
log4j.appender.file.layout=org.apache.log4j.EnhancedPatternLayout
log4j.appender.file.layout.ConversionPattern=%d %-5p %c [%t] - %m%n
# quiet some packages that are too verbose
log4j.logger.org.elasticsearch=WARN
log4j.logger.org.apache.hadoop=WARN
log4j.logger.org.apache.hadoop.hbase.zookeeper=ERROR
log4j.logger.org.apache.hadoop.util.NativeCodeLoader=ERROR
log4j.logger.org.apache.spark=WARN
log4j.logger.org.apache.zookeeper=ERROR
log4j.logger.org.eclipse.jetty=WARN
log4j.logger.org.spark-project.jetty=WARN
log4j.logger.akka=WARN
| {
"pile_set_name": "Github"
} |
#include <stdlib.h>
#include <glib.h>
#include <gmodule.h>
#include <check.h>
#include <string.h>
#include <stdio.h>
#include "md5.h"
/* From RFC 1321 */
struct md5_test {
const char *str;
md5_byte_t expected[16];
} tests[] = {
{ "",
{ 0xd4, 0x1d, 0x8c, 0xd9, 0x8f, 0x00, 0xb2, 0x04, 0xe9, 0x80, 0x09, 0x98, 0xec, 0xf8, 0x42, 0x7e } },
{ "a",
{ 0x0c, 0xc1, 0x75, 0xb9, 0xc0, 0xf1, 0xb6, 0xa8, 0x31, 0xc3, 0x99, 0xe2, 0x69, 0x77, 0x26, 0x61 } },
{ "abc",
{ 0x90, 0x01, 0x50, 0x98, 0x3c, 0xd2, 0x4f, 0xb0, 0xd6, 0x96, 0x3f, 0x7d, 0x28, 0xe1, 0x7f, 0x72 } },
{ "message digest",
{ 0xf9, 0x6b, 0x69, 0x7d, 0x7c, 0xb7, 0x93, 0x8d, 0x52, 0x5a, 0x2f, 0x31, 0xaa, 0xf1, 0x61, 0xd0 } },
{ "abcdefghijklmnopqrstuvwxyz",
{ 0xc3, 0xfc, 0xd3, 0xd7, 0x61, 0x92, 0xe4, 0x00, 0x7d, 0xfb, 0x49, 0x6c, 0xca, 0x67, 0xe1, 0x3b } },
{ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789",
{ 0xd1, 0x74, 0xab, 0x98, 0xd2, 0x77, 0xd9, 0xf5, 0xa5, 0x61, 0x1c, 0x2c, 0x9f, 0x41, 0x9d, 0x9f } },
{ "12345678901234567890123456789012345678901234567890123456789012345678901234567890",
{ 0x57, 0xed, 0xf4, 0xa2, 0x2b, 0xe3, 0xc9, 0x55, 0xac, 0x49, 0xda, 0x2e, 0x21, 0x07, 0xb6, 0x7a } },
{ NULL },
};
START_TEST(check_sums)
{
int i;
for (i = 0; tests[i].str; i++) {
md5_byte_t sum[16];
tcase_fn_start(tests[i].str, __FILE__, __LINE__);
md5_state_t state;
md5_init(&state);
md5_append(&state, (const md5_byte_t *) tests[i].str, strlen(tests[i].str));
md5_finish(&state, sum);
fail_if(memcmp(tests[i].expected, sum, 16) != 0, "%s failed", tests[i].str);
}
}
END_TEST
Suite *md5_suite(void)
{
Suite *s = suite_create("MD5");
TCase *tc_core = tcase_create("Core");
suite_add_tcase(s, tc_core);
tcase_add_test(tc_core, check_sums);
return s;
}
| {
"pile_set_name": "Github"
} |
Shader "Hidden/PostProcessing/Debug/Vectorscope"
{
HLSLINCLUDE
#pragma exclude_renderers gles gles3 d3d11_9x
#pragma target 4.5
#include "../StdLib.hlsl"
#include "../Colors.hlsl"
StructuredBuffer<uint> _VectorscopeBuffer;
float3 _Params; // x: width, y: height, z: exposure, w: unused
float Tonemap(float x, float exposure)
{
const float a = 6.2;
const float b = 0.5;
const float c = 1.7;
const float d = 0.06;
x *= exposure;
x = max(0.0, x - 0.004);
x = (x * (a * x + b)) / (x * (a * x + c) + d);
return x * x;
}
float4 Frag(VaryingsDefault i) : SV_Target
{
i.texcoord.x = 1.0 - i.texcoord.x;
float2 uv = i.texcoord - (0.5).xx;
float3 c = YCbCrToRgb(float3(0.5, uv.x, uv.y));
float dist = sqrt(dot(uv, uv));
float delta = fwidth(dist) * 0.5;
float alphaOut = 1.0 - smoothstep(0.5 - delta, 0.5 + delta, dist);
uint2 uvI = i.texcoord.xy * _Params.xy;
uint v = _VectorscopeBuffer[uvI.x + uvI.y * _Params.x];
float vt = saturate(Tonemap(v, _Params.z));
float4 color = float4(lerp(c, (0.0).xxx, vt), 1.0);
return color;
}
ENDHLSL
SubShader
{
Cull Off ZWrite Off ZTest Always
Pass
{
HLSLPROGRAM
#pragma vertex VertDefault
#pragma fragment Frag
ENDHLSL
}
}
}
| {
"pile_set_name": "Github"
} |
//
// DateTimeAxis.cs
//
// Author:
// Lluis Sanchez Gual
//
// Copyright (C) 2005 Novell, Inc (http://www.novell.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
namespace MonoDevelop.Components.Chart
{
public class DateTimeAxis: Axis
{
public DateTimeAxis ()
{
}
public DateTimeAxis (bool showLabels): base (showLabels)
{
}
protected override TickEnumerator CreateTickEnumerator (double minTickStep)
{
long val = (long) minTickStep;
int scale;
if (val > TimeSpan.TicksPerDay * 30 * 365)
return null;
else if (val > TimeSpan.TicksPerDay * 30)
scale = 7;
else if (val > TimeSpan.TicksPerDay)
scale = 6;
else if (val > TimeSpan.TicksPerHour)
scale = 5;
else if (val > TimeSpan.TicksPerMinute * 15)
scale = 4;
else if (val > TimeSpan.TicksPerMinute)
scale = 3;
else if (val > TimeSpan.TicksPerSecond * 15)
scale = 2;
else if (val > TimeSpan.TicksPerSecond)
scale = 1;
else
scale = 0;
return new DateTimeTickEnumerator (scale);
}
}
internal class DateTimeTickEnumerator: TickEnumerator
{
int scale;
DateTime current;
public DateTimeTickEnumerator (int scale)
{
this.scale = scale;
}
public override void Init (double startValue)
{
DateTime t = new DateTime ((long)startValue);
DateTime nt;
switch (scale) {
case 0: nt = new DateTime (t.Year, t.Month, t.Day, t.Hour, t.Minute, t.Second); break;
case 1: nt = new DateTime (t.Year, t.Month, t.Day, t.Hour, t.Minute, (t.Second / 15) * 15); break;
case 2: nt = new DateTime (t.Year, t.Month, t.Day, t.Hour, t.Minute, 0); break;
case 3: nt = new DateTime (t.Year, t.Month, t.Day, t.Hour, (t.Minute / 15) * 15, 0); break;
case 4: nt = new DateTime (t.Year, t.Month, t.Day, t.Hour, 0, 0); break;
case 5: nt = new DateTime (t.Year, t.Month, t.Day); break;
case 6: nt = new DateTime (t.Year, t.Month, 1); break;
default: nt = new DateTime (t.Year, 1, 1); break;
}
current = nt;
}
public override void MoveNext ()
{
switch (scale) {
case 0: current = current.AddSeconds (1); break;
case 1: current = current.AddSeconds (15); break;
case 2: current = current.AddMinutes (1); break;
case 3: current = current.AddMinutes (15); break;
case 4: current = current.AddHours (1); break;
case 5: current = current.AddDays (1); break;
case 6: current = current.AddMonths (1); break;
case 7: current = current.AddYears (1); break;
}
}
public override void MovePrevious ()
{
switch (scale) {
case 0: current = current.AddSeconds (-1); break;
case 1: current = current.AddSeconds (-15); break;
case 2: current = current.AddMinutes (-1); break;
case 3: current = current.AddMinutes (-15); break;
case 4: current = current.AddHours (-1); break;
case 5: current = current.AddDays (-1); break;
case 6: current = current.AddMonths (-1); break;
case 7: current = current.AddYears (-1); break;
}
}
public override double CurrentValue {
get { return (double) current.Ticks; }
}
public override string CurrentLabel {
get {
switch (scale) {
case 0: case 1: return string.Format ("{0}:{1:00}:{2:00}", current.Hour, current.Minute, current.Second);
case 2: case 3: return string.Format ("{0}:{1:00}", current.Hour, current.Minute);
case 4: return string.Format ("{0}:00", current.Hour);
case 5: return current.ToShortDateString ();
case 6: return string.Format ("{0}/{1}", current.Month, current.Year);
default: return string.Format ("{0}", current.Year);
}
}
}
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE sun-application-client PUBLIC '-//Sun Microsystems, Inc.//DTD Sun ONE Application Server 8.0 Application Client 1.4//EN' 'http://www.sun.com/software/sunone/appserver/dtds/sun-application-client_1_4-0.dtd'>
<!--
Copyright (c) 2017, 2018 Oracle and/or its affiliates. All rights reserved.
This program and the accompanying materials are made available under the
terms of the Eclipse Public License v. 2.0, which is available at
http://www.eclipse.org/legal/epl-2.0.
This Source Code may also be made available under the following Secondary
Licenses when the conditions for such availability set forth in the
Eclipse Public License v. 2.0 are satisfied: GNU General Public License,
version 2 with the GNU Classpath Exception, which is available at
https://www.gnu.org/software/classpath/license.html.
SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0
-->
<sun-application-client>
<service-ref>
<service-ref-name>com.sun.s1asdev.security.wss.roles2.client.Client/service</service-ref-name>
<port-info>
<wsdl-port>
<namespaceURI>http://ejbws.roles2.wss.security.s1asdev.sun.com</namespaceURI>
<localpart>HelloEjbPort</localpart>
</wsdl-port>
<message-security-binding auth-layer="SOAP"
provider-id="XWS_ClientProvider">
<message-security>
<message/>
<request-protection auth-source="sender"/>
<response-protection auth-source="content"/>
</message-security>
</message-security-binding>
</port-info>
</service-ref>
</sun-application-client>
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2018 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy ofthe License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specificlanguage governing permissions and
* limitations under the License.
*
*/
package rbac
import (
"bufio"
"bytes"
"context"
"errors"
"io"
"strings"
"github.com/casbin/casbin/model"
"github.com/casbin/casbin/persist"
"github.com/casbin/casbin/util"
etcd "github.com/coreos/etcd/client"
)
const etcdPolicyKey = "/casbinPolicy"
// EtcdAdapter represents the etcd adapter for policy persistence, can load policy
// from etcd or save policy to etcd.
type EtcdAdapter struct {
kapi etcd.KeysAPI
}
// NewEtcdAdapter is the constructor for EtcdAdapter.
func NewEtcdAdapter(kapi etcd.KeysAPI) (*EtcdAdapter, error) {
return &EtcdAdapter{kapi: kapi}, nil
}
// LoadPolicy loads policy from etcd.
func (a *EtcdAdapter) LoadPolicy(model model.Model) error {
resp, err := a.kapi.Get(context.Background(), etcdPolicyKey, nil)
if err != nil {
return err
}
buf := bufio.NewReader(bytes.NewReader([]byte(resp.Node.Value)))
for {
line, err := buf.ReadString('\n')
line = strings.TrimSpace(line)
persist.LoadPolicyLine(line, model)
if err != nil {
if err == io.EOF {
return nil
}
return err
}
}
}
// SavePolicy saves policy to etcd.
func (a *EtcdAdapter) SavePolicy(model model.Model) error {
var tmp bytes.Buffer
for ptype, ast := range model["p"] {
for _, rule := range ast.Policy {
tmp.WriteString(ptype + ", ")
tmp.WriteString(util.ArrayToString(rule))
tmp.WriteString("\n")
}
}
for ptype, ast := range model["g"] {
for _, rule := range ast.Policy {
tmp.WriteString(ptype + ", ")
tmp.WriteString(util.ArrayToString(rule))
tmp.WriteString("\n")
}
}
s := strings.TrimRight(tmp.String(), "\n")
_, err := a.kapi.Set(context.Background(), etcdPolicyKey, s, nil)
return err
}
// AddPolicy adds a policy rule to the storage.
func (a *EtcdAdapter) AddPolicy(sec string, ptype string, rule []string) error {
return errors.New("not implemented")
}
// RemovePolicy removes a policy rule from the storage.
func (a *EtcdAdapter) RemovePolicy(sec string, ptype string, rule []string) error {
return errors.New("not implemented")
}
// RemoveFilteredPolicy removes policy rules that match the filter from the storage.
func (a *EtcdAdapter) RemoveFilteredPolicy(sec string, ptype string, fieldIndex int, fieldValues ...string) error {
return errors.New("not implemented")
}
| {
"pile_set_name": "Github"
} |
#if GOOGLE_CUDA
#define EIGEN_USE_GPU
#define WARPS_PER_BLOCK 1
#define THREADS_PER_WARP 32
#include <stdio.h>
#include <iostream>
#include "correlation_kernel.h"
#include "tensorflow/core/framework/register_types.h"
#include "tensorflow/core/framework/types.h"
#include "tensorflow/core/framework/tensor_types.h"
#include "tensorflow/core/platform/types.h"
#include "tensorflow/core/util/cuda_kernel_helper.h"
namespace tensorflow {
typedef Eigen::GpuDevice GPUDevice;
__global__ void CorrelateData(int batch_size,
int out_width,
int out_height,
int out_channels,
int out_count,
int max_displacement,
int neighborhood_grid_radius,
int neighborhood_grid_width,
int kernel_radius,
int kernel_size,
int stride_1,
int stride_2,
int in_width_padded,
int in_height_padded,
int in_channels,
const float *input_a,
const float *input_b,
float *output) {
extern __shared__ char patch_data_char[];
float *patch_data = (float *)patch_data_char;
// First (upper left) position of kernel upper-left corner in current center
// position of neighborhood in image 1
int x1 = blockIdx.x * stride_1 + max_displacement;
int y1 = blockIdx.y * stride_1 + max_displacement;
int item = blockIdx.z;
int ch_off = threadIdx.x;
// Load 3D patch into shared shared memory
// HEIGHT
for (int j = 0; j < kernel_size; j++) {
// WIDTH
for (int i = 0; i < kernel_size; i++) {
int ji_off = ((j * kernel_size) + i) * in_channels;
// CHANNELS
for (int ch = ch_off; ch < in_channels; ch += (WARPS_PER_BLOCK * THREADS_PER_WARP)) {
int idx1 = ((item * in_height_padded + y1 + j) * in_width_padded + x1 + i) *
in_channels + ch;
int idxPatchData = ji_off + ch;
patch_data[idxPatchData] = input_a[idx1];
}
}
}
__syncthreads();
__shared__ float sum[WARPS_PER_BLOCK * THREADS_PER_WARP];
// Compute correlation
for (int out_channel = 0; out_channel < out_channels; out_channel++) {
sum[ch_off] = 0;
int s2o = (out_channel % neighborhood_grid_width - neighborhood_grid_radius) * stride_2;
int s2p = (out_channel / neighborhood_grid_width - neighborhood_grid_radius) * stride_2;
int x2 = x1 + s2o;
int y2 = y1 + s2p;
// HEIGHT
for (int j = 0; j < kernel_size; j++) {
// WIDTH
for (int i = 0; i < kernel_size; i++) {
int ji_off = ((j * kernel_size) + i) * in_channels;
// CHANNELS
for (int ch = ch_off; ch < in_channels; ch += (WARPS_PER_BLOCK * THREADS_PER_WARP)) {
int idxPatchData = ji_off + ch;
int idx2 = ((item * in_height_padded + y2 + j) * in_width_padded + x2 + i) *
in_channels + ch;
sum[ch_off] += patch_data[idxPatchData] * input_b[idx2];
}
}
}
__syncthreads();
if (ch_off == 0) {
float total_sum = 0;
for (int idx = 0; idx < WARPS_PER_BLOCK * THREADS_PER_WARP; idx++) {
total_sum += sum[idx];
}
const int sumelems = kernel_size * kernel_size * in_channels;
const int index = (blockIdx.y * out_width + blockIdx.x) * out_channels + out_channel;
/* from Caffe: const int index = ((out_channel * out_height +
blockIdx.y) * out_width) + blockIdx.x; */
output[index + item * out_count] = total_sum / (float)sumelems;
// Caffe, NKHW: ((n * K + k) * H + h) * W + w at point (n, k, h, w)
// TF, NHWK: ((n * H + h) * W + w) * K + k at point (n, h, w, k)
// n = 0
// caffe: ((k * H + h) * W + w) + n * K * H * W
// tf: (h * W + w) * K + k + n * H * W * K
}
}
}
void Correlation(const GPUDevice& device,
const float *input_a,
const float *input_b,
const int batch_size,
const int out_height,
const int out_width,
const int out_channels,
const int out_count,
const int in_height_padded,
const int in_width_padded,
const int in_channels,
int max_displacement,
int neighborhood_grid_radius,
int neighborhood_grid_width,
int kernel_radius,
int kernel_size,
int stride_1,
int stride_2,
float *output) {
dim3 totalBlocksCorr(out_width, out_height, batch_size);
dim3 threadsPerBlock(THREADS_PER_WARP *WARPS_PER_BLOCK);
const int shared_memory_per_block = (kernel_size * kernel_size) * in_channels;
CorrelateData << < totalBlocksCorr, threadsPerBlock, shared_memory_per_block * sizeof(float),
device.stream() >> > (
batch_size, out_width, out_height, out_channels, out_count,
max_displacement, neighborhood_grid_radius, neighborhood_grid_width, kernel_radius,
kernel_size, stride_1, stride_2, in_width_padded, in_height_padded, in_channels,
input_a, input_b, output);
}
} // end namespace tensorflow
#endif // GOOGLE_CUDA
| {
"pile_set_name": "Github"
} |
import $ from 'jquery';
/* Dialog Patterns */
function clear_errors(sel) {
sel.find(".dialog-error").remove();
sel.find(".has-error").removeClass("has-error");
sel.find(".dialog-wrapper").off(".dialog-error");
sel.off(".dialog-error");
return sel;
}
function field_error(target, error) {
var wrapper = target.parent();
var next, refresh;
if (!wrapper.is(".dialog-wrapper")) {
wrapper = $("<div class='dialog-wrapper'>").insertBefore(target);
/*
* Some bootstrap plugins replace html controls with their own
* stuff, so we have to account for that here.
*/
next = target.next();
if (next.is(".bootstrap-select") && next.selectpicker) {
next.remove();
refresh = next.selectpicker;
}
target.remove().appendTo(wrapper);
if (refresh)
refresh.call(target);
}
var message;
if (error.message)
message = $("<div class='dialog-error help-block'>").text(error.message);
wrapper.addClass("has-error").append(message);
if (!wrapper.hasClass("error-keep")) {
wrapper.on("keypress.dialog-error change.dialog-error", function() {
wrapper.removeClass("has-error")
.find(".dialog-error.help-block")
.css("visibility", "hidden");
});
}
}
function global_error(sel, error) {
var alert = $("<div class='pf-c-alert pf-m-danger pf-m-inline dialog-error' aria-label='inline danger alert'>");
var text = error.message || error.toString();
$("<div class='pf-c-alert__icon'>").append($("<span class='pficon pficon-error-circle-o'>"))
.prependTo(alert);
$("<h4 class='pf-c-alert__title'>").text(text)
.appendTo(alert);
/* Always log global dialog errors for easier debugging */
console.warn(text);
var footer = sel.find(".modal-footer");
if (footer.length)
alert.prependTo(footer);
else
alert.appendTo(sel);
}
function display_errors(sel, errors) {
clear_errors(sel);
/* The list of errors can also be passed as an array */
if (errors.length == 1 && $.isArray(errors[0]))
errors = errors[0];
var any = false;
errors.forEach(function(error) {
var target;
if (error) {
target = sel.find(error.target);
/* Errors for a specific field added below that field */
if (target && target.length)
field_error(target, error);
else
global_error(sel, error);
any = true;
}
});
if (!any)
return;
/* When dialog is shown again, remove all mods */
sel.on("show.bs.modal.dialog-error", function() {
clear_errors(sel);
});
}
function DialogWait(promise, handle) {
this.promise = promise;
this.disabled = [];
this.handle = handle;
}
function clear_wait(sel) {
var data = sel.data("dialog-wait");
sel.data("dialog-wait", null);
sel.find(".dialog-wait-ct").remove();
sel.find(".btn").off(".dialog-wait");
sel.off(".dialog-wait");
if (data) {
data.disabled.forEach(function(ctl) {
ctl.removeAttr("disabled");
});
}
}
function display_wait(sel, promise, handle) {
clear_wait(sel);
if (!promise) {
if (handle)
sel.modal("hide");
return sel;
}
/* Clear all errors in the dialog */
if (handle)
display_errors(sel, []);
var wait = $("<div class='dialog-wait-ct pull-right'>");
$("<div class='spinner spinner-sm'>").appendTo(wait);
var message = $("<span>").appendTo(wait);
sel.find(".modal-footer button").first()
.before(wait);
var data = new DialogWait(promise, handle);
sel.data("dialog-wait", data);
var cancellation = promise.cancel || promise.close;
var cancelled = false;
/* Disable everything and stash previous disabled state */
var controls = sel.find(".form-control").add(".btn", sel);
if (cancellation)
controls = controls.not("[data-dismiss]").not(".btn-cancel");
controls.each(function() {
var ctl = $(this);
if (!ctl.attr("disabled")) {
data.disabled.push(ctl);
ctl.attr("disabled", "disabled");
}
});
sel.find(".btn[data-dismiss], .btn-cancel").on("click.dialog-wait", function() {
cancelled = true;
if (cancellation)
cancellation.apply(promise);
return false;
});
/* When dialog is shown again, remove all mods */
sel.on("hide.bs.modal.dialog-wait", function() {
clear_wait(sel);
});
/*
* There is no way to remove a callback from a promise
* so we have to be careful to only react if still
* processing the same promise.
*/
function restore() {
var state;
var data = sel.data("dialog-wait");
if (data && data.promise === promise) {
clear_wait(sel);
state = promise.state();
if (cancelled || (state == "resolved" && data.handle))
sel.modal('hide');
else if (state == "rejected" && data.handle)
display_errors(sel, [arguments[0]]);
}
}
function update(arg) {
var data = sel.data("dialog-wait");
if (data && data.promise === promise) {
if (typeof arg !== "string")
arg = "";
message.text(arg);
}
}
promise
.always(restore)
.progress(update);
return sel;
}
$.fn.dialog = function dialog(action /* ... */) {
if (action === "failure")
return display_errors(this, Array.prototype.slice.call(arguments, 1));
else if (action === "wait")
return display_wait(this, arguments[1]);
else if (action === "promise")
return display_wait(this, arguments[1], true);
else if (action === "clear_errors")
return clear_errors(this);
else
console.warn("unknown dialog action: " + action);
};
window.addEventListener("hashchange", function() {
$(".modal").modal("hide");
});
/* ----------------------------------------------------------------------------
* Sliders
*
* <div class="slider" value="0.5">
* <div class="slider-bar">
* <div class="slider-thumb"></div>
* </div>
* <div class="slider-bar">
* <!-- optional left overs -->
* </div>
* </div>
*
* A slider control. The first div.slider-bar is the one that is resized.
* The value will be bounded between 0 and 1 as a floating point number.
*
* The following div.slider-bar if present is resized to fill the remainder
* of the slider if not given a specific size. You can put more div.slider-bar
* inside it to reflect squashing other previous allocations.
*
* If the following div.slider-bar have a width specified, then the
* slider supports the concept of overflowing. If the slider overflows
* it will get the .slider-warning class and go a bit red.
*
* On document creation any div.slider are automatically turned into
* Bar graphs.
*
* Slider has the following extra read/write properties:
*
* .value: the floating point value the slider is set to.
* .disabled: whether to display slider as disabled and refuse interacton.
*
* Slider has this event:
*
* on('change'): fired when the slider changes, passes value as additional arg.
*/
function resize_flex(slider, flex, total, part) {
var value = 0;
if (part > total)
value = 1;
else if (part < 0 || isNaN(part))
value = 0;
else if (!isNaN(total) && total > 0 && part >= 0)
value = (part / total);
$(flex).css('width', (value * 100) + "%")
.next("div")
.css('margin-left', $(flex).css('width'));
/* Set the property and the attribute */
slider.value = value;
}
function update_value(slider) {
resize_flex(slider, $(slider).children("div.slider-bar")
.first()[0], 1, slider.value);
}
function check_overflow(slider) {
$(slider).toggleClass("slider-warning",
slider.offsetWidth < slider.scrollWidth);
}
function setup_slider(slider) {
$(slider).attr('unselectable', 'on');
Object.defineProperty(slider, "value", {
get: function() {
return parseFloat(this.getAttribute("value"));
},
set: function(v) {
var s = String(v);
if (s != this.getAttribute("value")) {
this.setAttribute("value", v);
update_value(slider);
check_overflow(slider);
}
}
});
Object.defineProperty(slider, "disabled", {
get: function() {
if (!this.hasAttribute("disabled"))
return false;
return this.getAttribute("disabled").toLowerCase() != "false";
},
set: function(v) {
this.setAttribute("disabled", v ? "true" : "false");
}
});
update_value(slider);
check_overflow(slider);
$(slider).on("change", function() {
update_value(slider);
$(slider).toggleClass("slider-disabled", slider.disabled);
});
if (slider.disabled)
$(slider).addClass("slider-disabled");
$(slider).on("mousedown", function(ev) {
if (slider.disabled)
return true; /* default action */
var flex;
var offset = $(slider).offset().left;
if ($(ev.target).hasClass("slider-thumb")) {
var hitx = (ev.offsetX || ev.clientX - $(ev.target).offset().left);
offset += (hitx - $(ev.target).outerWidth() / 2);
flex = $(ev.target).parent()[0];
} else {
flex = $(slider).children("div.slider-bar")
.first()[0];
resize_flex(slider, flex, $(slider).width(), (ev.pageX - offset));
$(slider).trigger("change", [slider.value]);
check_overflow(slider);
}
$(document)
.on("mousemove.slider", function(ev) {
resize_flex(slider, flex, $(slider).width(), (ev.pageX - offset));
$(slider).trigger("change", [slider.value]);
check_overflow(slider);
return false;
})
.on("mouseup.slider", function(ev) {
$(document)
.off("mousemove.slider")
.off("mouseup.slider");
return false;
});
return false; /* no default action */
});
}
function setup_sliders() {
$("div.slider").each(function() {
setup_slider(this);
});
}
$.fn.slider = function Slider(action) {
var sel = this;
if (arguments.length === 0 || action == "refresh") {
sel.each(function() {
setup_slider(this);
});
return sel;
} else {
console.warn("unknown slider action: " + action);
}
};
$(document).ready(setup_sliders);
| {
"pile_set_name": "Github"
} |
/********************************************************************************
* Dionaea
* - catches bugs -
*
*
*
* Copyright (C) 2009 Paul Baecher & Markus Koetter
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
*
* contact [email protected]
*
*******************************************************************************/
#include <ev.h>
#include <glib.h>
#include <emu/emu.h>
#include <emu/emu_shellcode.h>
#include <emu/emu_log.h>
#include <emu/emu_cpu.h>
#include <emu/emu_cpu_data.h>
#include "dionaea.h"
#include "processor.h"
#include "log.h"
#include "incident.h"
#include "threads.h"
#define D_LOG_DOMAIN "emu"
#include "module.h"
struct processor proc_emu =
{
.name = "emu",
.new = proc_emu_ctx_new,
.free = proc_emu_ctx_free,
.cfg = proc_emu_ctx_cfg_new,
.thread_io_in = proc_emu_on_io_in,
};
void *proc_emu_ctx_cfg_new(gchar *group_name)
{
g_debug("%s node", __PRETTY_FUNCTION__);
struct emu_config *conf = g_malloc0(sizeof(struct emu_config));
GError *error = NULL;
conf->limits.files = g_key_file_get_integer(g_dionaea->config, group_name, "config.limits.files", &error);
if (error != NULL)
goto err;
conf->limits.filesize = g_key_file_get_integer(g_dionaea->config, group_name, "config.limits.filesize", &error);
if (error != NULL)
goto err;
conf->limits.sockets = g_key_file_get_integer(g_dionaea->config, group_name, "config.limits.sockets", &error);
if (error != NULL)
goto err;
conf->limits.steps = g_key_file_get_integer(g_dionaea->config, group_name, "config.limits.steps", &error);
if (error != NULL)
goto err;
conf->limits.idle = g_key_file_get_integer(g_dionaea->config, group_name, "config.limits.idle", &error);
if (error != NULL)
goto err;
conf->limits.listen = g_key_file_get_double(g_dionaea->config, group_name, "config.limits.listen", &error);
if (error != NULL)
goto err;
conf->limits.sustain = g_key_file_get_double(g_dionaea->config, group_name, "config.limits.sustain", &error);
if (error != NULL)
goto err;
conf->limits.cpu = g_key_file_get_double(g_dionaea->config, group_name, "config.limits.cpu", &error);
if (error != NULL)
goto err;
g_debug(
" files %i filesize %i sockets %i steps %i idle %f listen %f sustain %f cpu %f ",
conf->limits.files,
conf->limits.filesize,
conf->limits.sockets,
conf->limits.steps,
conf->limits.idle,
conf->limits.listen,
conf->limits.sustain,
conf->limits.cpu
);
// g_error("STOP");
return conf;
err:
g_warning("configuration for emulation is incomplete");
if (error != NULL)
g_warning("%s", error->message);
g_clear_error(&error);
g_free(conf);
return NULL;
}
void *proc_emu_ctx_new(void *cfg)
{
if( cfg == NULL )
{
g_error("emulation needs configuration");
}
struct emu_ctx *ctx = g_malloc0(sizeof(struct emu_ctx));
ctx->config = cfg;
return ctx;
}
void proc_emu_ctx_free(void *ctx)
{
g_free(ctx);
}
void proc_emu_on_io_in(struct connection *con, struct processor_data *pd)
{
g_debug("%s con %p pd %p", __PRETTY_FUNCTION__, con, pd);
struct emu_ctx *ctx = pd->ctx;
int offset = MAX(ctx->offset-300, 0);
void *streamdata = NULL;
int32_t size = bistream_get_stream(pd->bistream, bistream_in, offset, -1, &streamdata);
int ret = 0;
if( size != -1 )
{
struct emu *e = emu_new();
#if 0
emu_cpu_debugflag_set(emu_cpu_get(e), instruction_string);
emu_log_level_set(emu_logging_get(e),EMU_LOG_DEBUG);
#endif
ret = emu_shellcode_test(e, streamdata, size);
emu_free(e);
ctx->offset += size;
if( ret >= 0 )
{
struct incident *ix = incident_new("dionaea.shellcode.detected");
GAsyncQueue *aq = g_async_queue_ref(g_dionaea->threads->cmds);
g_async_queue_push(aq, async_cmd_new(async_incident_report, ix));
g_async_queue_unref(aq);
ev_async_send(g_dionaea->loop, &g_dionaea->threads->trigger);
g_debug("shellcode found offset %i", ret);
profile(ctx->config, con, streamdata, size, ret);
pd->state = processor_done;
}
g_free(streamdata);
}
}
| {
"pile_set_name": "Github"
} |
package srslog
import (
"errors"
"io"
"net"
)
// unixSyslog opens a connection to the syslog daemon running on the
// local machine using a Unix domain socket. This function exists because of
// Solaris support as implemented by gccgo. On Solaris you can not
// simply open a TCP connection to the syslog daemon. The gccgo
// sources have a syslog_solaris.go file that implements unixSyslog to
// return a type that satisfies the serverConn interface and simply calls the C
// library syslog function.
func unixSyslog() (conn serverConn, err error) {
logTypes := []string{"unixgram", "unix"}
logPaths := []string{"/dev/log", "/var/run/syslog", "/var/run/log"}
for _, network := range logTypes {
for _, path := range logPaths {
conn, err := net.Dial(network, path)
if err != nil {
continue
} else {
return &localConn{conn: conn}, nil
}
}
}
return nil, errors.New("Unix syslog delivery error")
}
// localConn adheres to the serverConn interface, allowing us to send syslog
// messages to the local syslog daemon over a Unix domain socket.
type localConn struct {
conn io.WriteCloser
}
// writeString formats syslog messages using time.Stamp instead of time.RFC3339,
// and omits the hostname (because it is expected to be used locally).
func (n *localConn) writeString(framer Framer, formatter Formatter, p Priority, hostname, tag, msg string) error {
if framer == nil {
framer = DefaultFramer
}
if formatter == nil {
formatter = UnixFormatter
}
_, err := n.conn.Write([]byte(framer(formatter(p, hostname, tag, msg))))
return err
}
// close the (local) network connection
func (n *localConn) close() error {
return n.conn.Close()
}
| {
"pile_set_name": "Github"
} |
! Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
! See https://llvm.org/LICENSE.txt for license information.
! SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
!
module my_container
type container
integer i
real r
contains
procedure :: init => init_container
procedure :: xi => extract_i
procedure :: xr => extract_r
generic :: extract => xi, xr
procedure :: assign_to_i
procedure :: assign_to_r
procedure,pass(second) :: addition => addit
procedure,pass(second) :: addition_array
generic :: assignment(=) => assign_to_i, assign_to_r
generic :: operator(+) => addition
generic :: operator(+) => addition_array
procedure,pass(second) :: add_array
end type container
contains
integer function extract_i(this, ii) RESULT(iii)
class(container) :: this
integer ii
iii = this%i
end function extract_i
real function extract_r(this, rr) RESULT(rrr)
class(container) :: this
real rr
rrr = this%r
end function extract_r
subroutine init_container(this, ic, ir)
class(container) :: this
integer :: ic
real :: ir
this%i = ic
this%r = ir
end subroutine init_container
subroutine assign_to_i(this, src)
class(container),intent(inout) :: this
integer,intent(in) :: src
this%i = src
end subroutine assign_to_i
subroutine assign_to_r(this, src)
class(container),intent(inout) :: this
real,intent(in):: src
this%r = src
end subroutine assign_to_r
type(container) function addit(first, second) RESULT(ttt)
class(container),intent(in) :: second
type(container),intent(in) :: first
type(container) :: tt
tt%i = first%i + second%i
tt%r = first%r + second%r
ttt = tt
tt = second
end function addit
type(container) function addition_array(first, second) RESULT(ttt)
class(container),intent(in) :: second
type(container),intent(in) :: first(:)
type(container) :: tt
integer sz
sz = size(first)
do i=1, sz
tt%i = first(i)%i + second%i
tt%r = first(i)%r + second%r
enddo
ttt = tt
end function addition_array
type(container) function add_array(first, second) RESULT(ttt)
class(container),intent(in) :: second
type(container),intent(in) :: first(:)
ttt = first + second
end function add_array
end module my_container
program prg
USE CHECK_MOD
use my_container
class(container),allocatable :: t
class(container),allocatable :: t2
type(container) :: t3
type(container) :: t_array(10)
integer ei
real er
character ec(10)
logical rslt(6)
logical expect(6)
integer i
real r
rslt = .false.
expect = .true.
allocate(t)
allocate(t2)
call t%init(23,4.5)
ei = 0
er = 0.0
er = t%extract(1.0)
ei = t%extract(1)
rslt(1) = er .eq. 4.5
rslt(2) = ei .eq. 23
t2 = ei
t2 = er
er = t2%extract(1.0)
ei = t2%extract(1)
rslt(3) = er .eq. 4.5
rslt(4) = ei .eq. 23
do i=1,10
r = i
call t_array(i)%init(i,r)
enddo
call t%init(0,0.0)
!t3 = t_array + t
t3 = t%add_array(t_array)
rslt(5) = t3%extract(1.0) .eq. 10.0
rslt(6) = t3%extract(1) .eq. 10
call check(rslt,expect,6)
end program prg
| {
"pile_set_name": "Github"
} |
require "spec_helper"
require "omnicontacts"
require "omnicontacts/middleware/base_oauth"
describe OmniContacts::Middleware::BaseOAuth do
before(:all) do
class TestProvider < OmniContacts::Middleware::BaseOAuth
def initialize app, consumer_key, consumer_secret, options = {}
super app, options
end
def redirect_path
"#{ MOUNT_PATH }testprovider/callback"
end
def self.mock_session
@mock_session ||= {}
end
def session
TestProvider.mock_session
end
end
OmniContacts.integration_test.enabled = true
end
let(:app) {
Rack::Builder.new do |b|
b.use TestProvider, "consumer_id", "consumer_secret"
b.run lambda { |env| [200, {"Content-Type" => "text/html"}, ["Hello World"]] }
end.to_app
}
it "should return a preconfigured list of contacts" do
OmniContacts.integration_test.mock(:testprovider, :email => "[email protected]")
get "#{ MOUNT_PATH }testprovider"
get "#{ MOUNT_PATH }testprovider/callback"
last_request.env["omnicontacts.contacts"].first[:email].should eq("[email protected]")
end
it "should redirect to failure url" do
OmniContacts.integration_test.mock(:testprovider, "some_error" )
get "#{ MOUNT_PATH }testprovider"
get "#{MOUNT_PATH }testprovider/callback"
last_response.should be_redirect
last_response.headers["location"].should eq("#{ MOUNT_PATH }failure?error_message=internal_error&importer=testprovider")
end
it "should pass through state query params to the failure url" do
OmniContacts.integration_test.mock(:testprovider, "some_error" )
get "#{MOUNT_PATH }testprovider/callback?state=/parent/resource/id"
last_response.headers["location"].should eq("#{ MOUNT_PATH }failure?error_message=internal_error&importer=testprovider&state=/parent/resource/id")
end
it "should store request params in session" do
OmniContacts.integration_test.mock(:testprovider, :email => "[email protected]")
get "#{ MOUNT_PATH }testprovider?foo=bar"
app.session['omnicontacts.params'].should eq({'foo' => 'bar'})
end
it "should pass the params from session to callback environment " do
OmniContacts.integration_test.mock(:testprovider, :email => "[email protected]")
app.session.merge!({'omnicontacts.params' => {'foo' => 'bar'}})
get "#{MOUNT_PATH }testprovider/callback?state=/parent/resource/id"
last_request.env["omnicontacts.params"].should eq({'foo' => 'bar'})
end
it "should pass the params from session on failure" do
OmniContacts.integration_test.mock(:testprovider, "some_error" )
get "#{ MOUNT_PATH }testprovider"
app.session.merge!({'omnicontacts.params' => {'foo' => 'bar'}})
get "#{MOUNT_PATH }testprovider/callback"
last_response.should be_redirect
last_response.headers["location"].should be_include("foo=bar")
end
after(:all) do
OmniContacts.integration_test.enabled = false
OmniContacts.integration_test.clear_mocks
end
end
| {
"pile_set_name": "Github"
} |
<!--
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
-->
<link rel="import" href="../polymer/polymer.html">
<link rel="import" href="../iron-fit-behavior/iron-fit-behavior.html">
<link rel="import" href="../iron-resizable-behavior/iron-resizable-behavior.html">
<link rel="import" href="iron-overlay-manager.html">
<link rel="import" href="iron-focusables-helper.html">
<script>
(function() {
'use strict';
/** @polymerBehavior */
Polymer.IronOverlayBehaviorImpl = {
properties: {
/**
* True if the overlay is currently displayed.
*/
opened: {
observer: '_openedChanged',
type: Boolean,
value: false,
notify: true
},
/**
* True if the overlay was canceled when it was last closed.
*/
canceled: {
observer: '_canceledChanged',
readOnly: true,
type: Boolean,
value: false
},
/**
* Set to true to display a backdrop behind the overlay. It traps the focus
* within the light DOM of the overlay.
*/
withBackdrop: {
observer: '_withBackdropChanged',
type: Boolean
},
/**
* Set to true to disable auto-focusing the overlay or child nodes with
* the `autofocus` attribute` when the overlay is opened.
*/
noAutoFocus: {
type: Boolean,
value: false
},
/**
* Set to true to disable canceling the overlay with the ESC key.
*/
noCancelOnEscKey: {
type: Boolean,
value: false
},
/**
* Set to true to disable canceling the overlay by clicking outside it.
*/
noCancelOnOutsideClick: {
type: Boolean,
value: false
},
/**
* Contains the reason(s) this overlay was last closed (see `iron-overlay-closed`).
* `IronOverlayBehavior` provides the `canceled` reason; implementers of the
* behavior can provide other reasons in addition to `canceled`.
*/
closingReason: {
// was a getter before, but needs to be a property so other
// behaviors can override this.
type: Object
},
/**
* Set to true to enable restoring of focus when overlay is closed.
*/
restoreFocusOnClose: {
type: Boolean,
value: false
},
/**
* Set to true to keep overlay always on top.
*/
alwaysOnTop: {
type: Boolean
},
/**
* Shortcut to access to the overlay manager.
* @private
* @type {Polymer.IronOverlayManagerClass}
*/
_manager: {
type: Object,
value: Polymer.IronOverlayManager
},
/**
* The node being focused.
* @type {?Node}
*/
_focusedChild: {
type: Object
}
},
listeners: {
'iron-resize': '_onIronResize'
},
/**
* The backdrop element.
* @type {Element}
*/
get backdropElement() {
return this._manager.backdropElement;
},
/**
* Returns the node to give focus to.
* @type {Node}
*/
get _focusNode() {
return this._focusedChild || Polymer.dom(this).querySelector('[autofocus]') || this;
},
/**
* Array of nodes that can receive focus (overlay included), ordered by `tabindex`.
* This is used to retrieve which is the first and last focusable nodes in order
* to wrap the focus for overlays `with-backdrop`.
*
* If you know what is your content (specifically the first and last focusable children),
* you can override this method to return only `[firstFocusable, lastFocusable];`
* @type {Array<Node>}
* @protected
*/
get _focusableNodes() {
return Polymer.IronFocusablesHelper.getTabbableNodes(this);
},
ready: function() {
// Used to skip calls to notifyResize and refit while the overlay is animating.
this.__isAnimating = false;
// with-backdrop needs tabindex to be set in order to trap the focus.
// If it is not set, IronOverlayBehavior will set it, and remove it if with-backdrop = false.
this.__shouldRemoveTabIndex = false;
// Used for wrapping the focus on TAB / Shift+TAB.
this.__firstFocusableNode = this.__lastFocusableNode = null;
// Used by __onNextAnimationFrame to cancel any previous callback.
this.__raf = null;
// Focused node before overlay gets opened. Can be restored on close.
this.__restoreFocusNode = null;
this._ensureSetup();
},
attached: function() {
// Call _openedChanged here so that position can be computed correctly.
if (this.opened) {
this._openedChanged(this.opened);
}
this._observer = Polymer.dom(this).observeNodes(this._onNodesChange);
},
detached: function() {
Polymer.dom(this).unobserveNodes(this._observer);
this._observer = null;
if (this.__raf) {
window.cancelAnimationFrame(this.__raf);
this.__raf = null;
}
this._manager.removeOverlay(this);
},
/**
* Toggle the opened state of the overlay.
*/
toggle: function() {
this._setCanceled(false);
this.opened = !this.opened;
},
/**
* Open the overlay.
*/
open: function() {
this._setCanceled(false);
this.opened = true;
},
/**
* Close the overlay.
*/
close: function() {
this._setCanceled(false);
this.opened = false;
},
/**
* Cancels the overlay.
* @param {Event=} event The original event
*/
cancel: function(event) {
var cancelEvent = this.fire('iron-overlay-canceled', event, {cancelable: true});
if (cancelEvent.defaultPrevented) {
return;
}
this._setCanceled(true);
this.opened = false;
},
/**
* Invalidates the cached tabbable nodes. To be called when any of the focusable
* content changes (e.g. a button is disabled).
*/
invalidateTabbables: function() {
this.__firstFocusableNode = this.__lastFocusableNode = null;
},
_ensureSetup: function() {
if (this._overlaySetup) {
return;
}
this._overlaySetup = true;
this.style.outline = 'none';
this.style.display = 'none';
},
/**
* Called when `opened` changes.
* @param {boolean=} opened
* @protected
*/
_openedChanged: function(opened) {
if (opened) {
this.removeAttribute('aria-hidden');
} else {
this.setAttribute('aria-hidden', 'true');
}
// Defer any animation-related code on attached
// (_openedChanged gets called again on attached).
if (!this.isAttached) {
return;
}
this.__isAnimating = true;
// Use requestAnimationFrame for non-blocking rendering.
this.__onNextAnimationFrame(this.__openedChanged);
},
_canceledChanged: function() {
this.closingReason = this.closingReason || {};
this.closingReason.canceled = this.canceled;
},
_withBackdropChanged: function() {
// If tabindex is already set, no need to override it.
if (this.withBackdrop && !this.hasAttribute('tabindex')) {
this.setAttribute('tabindex', '-1');
this.__shouldRemoveTabIndex = true;
} else if (this.__shouldRemoveTabIndex) {
this.removeAttribute('tabindex');
this.__shouldRemoveTabIndex = false;
}
if (this.opened && this.isAttached) {
this._manager.trackBackdrop();
}
},
/**
* tasks which must occur before opening; e.g. making the element visible.
* @protected
*/
_prepareRenderOpened: function() {
// Store focused node.
this.__restoreFocusNode = this._manager.deepActiveElement;
// Needed to calculate the size of the overlay so that transitions on its size
// will have the correct starting points.
this._preparePositioning();
this.refit();
this._finishPositioning();
// Safari will apply the focus to the autofocus element when displayed
// for the first time, so we make sure to return the focus where it was.
if (this.noAutoFocus && document.activeElement === this._focusNode) {
this._focusNode.blur();
this.__restoreFocusNode.focus();
}
},
/**
* Tasks which cause the overlay to actually open; typically play an animation.
* @protected
*/
_renderOpened: function() {
this._finishRenderOpened();
},
/**
* Tasks which cause the overlay to actually close; typically play an animation.
* @protected
*/
_renderClosed: function() {
this._finishRenderClosed();
},
/**
* Tasks to be performed at the end of open action. Will fire `iron-overlay-opened`.
* @protected
*/
_finishRenderOpened: function() {
this.notifyResize();
this.__isAnimating = false;
this.fire('iron-overlay-opened');
},
/**
* Tasks to be performed at the end of close action. Will fire `iron-overlay-closed`.
* @protected
*/
_finishRenderClosed: function() {
// Hide the overlay.
this.style.display = 'none';
// Reset z-index only at the end of the animation.
this.style.zIndex = '';
this.notifyResize();
this.__isAnimating = false;
this.fire('iron-overlay-closed', this.closingReason);
},
_preparePositioning: function() {
this.style.transition = this.style.webkitTransition = 'none';
this.style.transform = this.style.webkitTransform = 'none';
this.style.display = '';
},
_finishPositioning: function() {
// First, make it invisible & reactivate animations.
this.style.display = 'none';
// Force reflow before re-enabling animations so that they don't start.
// Set scrollTop to itself so that Closure Compiler doesn't remove this.
this.scrollTop = this.scrollTop;
this.style.transition = this.style.webkitTransition = '';
this.style.transform = this.style.webkitTransform = '';
// Now that animations are enabled, make it visible again
this.style.display = '';
// Force reflow, so that following animations are properly started.
// Set scrollTop to itself so that Closure Compiler doesn't remove this.
this.scrollTop = this.scrollTop;
},
/**
* Applies focus according to the opened state.
* @protected
*/
_applyFocus: function() {
if (this.opened) {
if (!this.noAutoFocus) {
this._focusNode.focus();
}
}
else {
this._focusNode.blur();
this._focusedChild = null;
// Restore focus.
if (this.restoreFocusOnClose && this.__restoreFocusNode) {
this.__restoreFocusNode.focus();
}
this.__restoreFocusNode = null;
// If many overlays get closed at the same time, one of them would still
// be the currentOverlay even if already closed, and would call _applyFocus
// infinitely, so we check for this not to be the current overlay.
var currentOverlay = this._manager.currentOverlay();
if (currentOverlay && this !== currentOverlay) {
currentOverlay._applyFocus();
}
}
},
/**
* Cancels (closes) the overlay. Call when click happens outside the overlay.
* @param {!Event} event
* @protected
*/
_onCaptureClick: function(event) {
if (!this.noCancelOnOutsideClick) {
this.cancel(event);
}
},
/**
* Keeps track of the focused child. If withBackdrop, traps focus within overlay.
* @param {!Event} event
* @protected
*/
_onCaptureFocus: function (event) {
if (!this.withBackdrop) {
return;
}
var path = Polymer.dom(event).path;
if (path.indexOf(this) === -1) {
event.stopPropagation();
this._applyFocus();
} else {
this._focusedChild = path[0];
}
},
/**
* Handles the ESC key event and cancels (closes) the overlay.
* @param {!Event} event
* @protected
*/
_onCaptureEsc: function(event) {
if (!this.noCancelOnEscKey) {
this.cancel(event);
}
},
/**
* Handles TAB key events to track focus changes.
* Will wrap focus for overlays withBackdrop.
* @param {!Event} event
* @protected
*/
_onCaptureTab: function(event) {
if (!this.withBackdrop) {
return;
}
this.__ensureFirstLastFocusables();
// TAB wraps from last to first focusable.
// Shift + TAB wraps from first to last focusable.
var shift = event.shiftKey;
var nodeToCheck = shift ? this.__firstFocusableNode : this.__lastFocusableNode;
var nodeToSet = shift ? this.__lastFocusableNode : this.__firstFocusableNode;
var shouldWrap = false;
if (nodeToCheck === nodeToSet) {
// If nodeToCheck is the same as nodeToSet, it means we have an overlay
// with 0 or 1 focusables; in either case we still need to trap the
// focus within the overlay.
shouldWrap = true;
} else {
// In dom=shadow, the manager will receive focus changes on the main
// root but not the ones within other shadow roots, so we can't rely on
// _focusedChild, but we should check the deepest active element.
var focusedNode = this._manager.deepActiveElement;
// If the active element is not the nodeToCheck but the overlay itself,
// it means the focus is about to go outside the overlay, hence we
// should prevent that (e.g. user opens the overlay and hit Shift+TAB).
shouldWrap = (focusedNode === nodeToCheck || focusedNode === this);
}
if (shouldWrap) {
// When the overlay contains the last focusable element of the document
// and it's already focused, pressing TAB would move the focus outside
// the document (e.g. to the browser search bar). Similarly, when the
// overlay contains the first focusable element of the document and it's
// already focused, pressing Shift+TAB would move the focus outside the
// document (e.g. to the browser search bar).
// In both cases, we would not receive a focus event, but only a blur.
// In order to achieve focus wrapping, we prevent this TAB event and
// force the focus. This will also prevent the focus to temporarily move
// outside the overlay, which might cause scrolling.
event.preventDefault();
this._focusedChild = nodeToSet;
this._applyFocus();
}
},
/**
* Refits if the overlay is opened and not animating.
* @protected
*/
_onIronResize: function() {
if (this.opened && !this.__isAnimating) {
this.__onNextAnimationFrame(this.refit);
}
},
/**
* Will call notifyResize if overlay is opened.
* Can be overridden in order to avoid multiple observers on the same node.
* @protected
*/
_onNodesChange: function() {
if (this.opened && !this.__isAnimating) {
// It might have added focusable nodes, so invalidate cached values.
this.invalidateTabbables();
this.notifyResize();
}
},
/**
* Will set first and last focusable nodes if any of them is not set.
* @private
*/
__ensureFirstLastFocusables: function() {
if (!this.__firstFocusableNode || !this.__lastFocusableNode) {
var focusableNodes = this._focusableNodes;
this.__firstFocusableNode = focusableNodes[0];
this.__lastFocusableNode = focusableNodes[focusableNodes.length - 1];
}
},
/**
* Tasks executed when opened changes: prepare for the opening, move the
* focus, update the manager, render opened/closed.
* @private
*/
__openedChanged: function() {
if (this.opened) {
// Make overlay visible, then add it to the manager.
this._prepareRenderOpened();
this._manager.addOverlay(this);
// Move the focus to the child node with [autofocus].
this._applyFocus();
this._renderOpened();
} else {
// Remove overlay, then restore the focus before actually closing.
this._manager.removeOverlay(this);
this._applyFocus();
this._renderClosed();
}
},
/**
* Executes a callback on the next animation frame, overriding any previous
* callback awaiting for the next animation frame. e.g.
* `__onNextAnimationFrame(callback1) && __onNextAnimationFrame(callback2)`;
* `callback1` will never be invoked.
* @param {!Function} callback Its `this` parameter is the overlay itself.
* @private
*/
__onNextAnimationFrame: function(callback) {
if (this.__raf) {
window.cancelAnimationFrame(this.__raf);
}
var self = this;
this.__raf = window.requestAnimationFrame(function nextAnimationFrame() {
self.__raf = null;
callback.call(self);
});
}
};
/**
Use `Polymer.IronOverlayBehavior` to implement an element that can be hidden or shown, and displays
on top of other content. It includes an optional backdrop, and can be used to implement a variety
of UI controls including dialogs and drop downs. Multiple overlays may be displayed at once.
See the [demo source code](https://github.com/PolymerElements/iron-overlay-behavior/blob/master/demo/simple-overlay.html)
for an example.
### Closing and canceling
An overlay may be hidden by closing or canceling. The difference between close and cancel is user
intent. Closing generally implies that the user acknowledged the content on the overlay. By default,
it will cancel whenever the user taps outside it or presses the escape key. This behavior is
configurable with the `no-cancel-on-esc-key` and the `no-cancel-on-outside-click` properties.
`close()` should be called explicitly by the implementer when the user interacts with a control
in the overlay element. When the dialog is canceled, the overlay fires an 'iron-overlay-canceled'
event. Call `preventDefault` on this event to prevent the overlay from closing.
### Positioning
By default the element is sized and positioned to fit and centered inside the window. You can
position and size it manually using CSS. See `Polymer.IronFitBehavior`.
### Backdrop
Set the `with-backdrop` attribute to display a backdrop behind the overlay. The backdrop is
appended to `<body>` and is of type `<iron-overlay-backdrop>`. See its doc page for styling
options.
In addition, `with-backdrop` will wrap the focus within the content in the light DOM.
Override the [`_focusableNodes` getter](#Polymer.IronOverlayBehavior:property-_focusableNodes)
to achieve a different behavior.
### Limitations
The element is styled to appear on top of other content by setting its `z-index` property. You
must ensure no element has a stacking context with a higher `z-index` than its parent stacking
context. You should place this element as a child of `<body>` whenever possible.
@demo demo/index.html
@polymerBehavior
*/
Polymer.IronOverlayBehavior = [Polymer.IronFitBehavior, Polymer.IronResizableBehavior, Polymer.IronOverlayBehaviorImpl];
/**
* Fired after the overlay opens.
* @event iron-overlay-opened
*/
/**
* Fired when the overlay is canceled, but before it is closed.
* @event iron-overlay-canceled
* @param {Event} event The closing of the overlay can be prevented
* by calling `event.preventDefault()`. The `event.detail` is the original event that
* originated the canceling (e.g. ESC keyboard event or click event outside the overlay).
*/
/**
* Fired after the overlay closes.
* @event iron-overlay-closed
* @param {Event} event The `event.detail` is the `closingReason` property
* (contains `canceled`, whether the overlay was canceled).
*/
})();
</script>
| {
"pile_set_name": "Github"
} |
/*!
* socket.io-node
* Copyright(c) 2011 LearnBoost <[email protected]>
* MIT Licensed
*/
/**
* Module requirements.
*/
var HTTPTransport = require('./http');
/**
* Exports the constructor.
*/
exports = module.exports = HTTPPolling;
/**
* HTTP polling constructor.
*
* @api public.
*/
function HTTPPolling (mng, data, req) {
HTTPTransport.call(this, mng, data, req);
};
/**
* Inherits from HTTPTransport.
*
* @api public.
*/
HTTPPolling.prototype.__proto__ = HTTPTransport.prototype;
/**
* Transport name
*
* @api public
*/
HTTPPolling.prototype.name = 'httppolling';
/**
* Override setHandlers
*
* @api private
*/
HTTPPolling.prototype.setHandlers = function () {
HTTPTransport.prototype.setHandlers.call(this);
this.socket.removeListener('end', this.bound.end);
this.socket.removeListener('close', this.bound.close);
};
/**
* Removes heartbeat timeouts for polling.
*/
HTTPPolling.prototype.setHeartbeatInterval = function () {
return this;
};
/**
* Handles a request
*
* @api private
*/
HTTPPolling.prototype.handleRequest = function (req) {
HTTPTransport.prototype.handleRequest.call(this, req);
if (req.method == 'GET') {
var self = this;
this.pollTimeout = setTimeout(function () {
self.packet({ type: 'noop' });
self.log.debug(self.name + ' closed due to exceeded duration');
}, this.manager.get('polling duration') * 1000);
this.log.debug('setting poll timeout');
}
};
/**
* Clears polling timeout
*
* @api private
*/
HTTPPolling.prototype.clearPollTimeout = function () {
if (this.pollTimeout) {
clearTimeout(this.pollTimeout);
this.pollTimeout = null;
this.log.debug('clearing poll timeout');
}
return this;
};
/**
* Override clear timeouts to clear the poll timeout
*
* @api private
*/
HTTPPolling.prototype.clearTimeouts = function () {
HTTPTransport.prototype.clearTimeouts.call(this);
this.clearPollTimeout();
};
/**
* doWrite to clear poll timeout
*
* @api private
*/
HTTPPolling.prototype.doWrite = function () {
this.clearPollTimeout();
};
/**
* Performs a write.
*
* @api private.
*/
HTTPPolling.prototype.write = function (data, close) {
this.doWrite(data);
this.response.end();
this.onClose();
};
/**
* Override end.
*
* @api private
*/
HTTPPolling.prototype.end = function (reason) {
this.clearPollTimeout();
return HTTPTransport.prototype.end.call(this, reason);
};
| {
"pile_set_name": "Github"
} |
package org.jdownloader.gui.packagehistorycontroller;
import org.appwork.storage.Storable;
import org.appwork.utils.StringUtils;
public class DownloadPath implements Storable, HistoryEntry {
@SuppressWarnings("unused")
private DownloadPath(/* Storable */) {
}
public DownloadPath(String myPath) {
name = myPath;
time = System.currentTimeMillis();
}
public String getName() {
return name;
}
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj instanceof DownloadPath) {
return StringUtils.equals(getName(), ((DownloadPath) obj).getName());
}
return false;
}
public int hashCode() {
return name == null ? "".hashCode() : name.hashCode();
}
public void setName(String path) {
this.name = path;
}
public long getTime() {
return time;
}
public void setTime(long time) {
this.time = time;
}
private String name;
private long time;
private static int compare(long x, long y) {
return (x < y) ? 1 : ((x == y) ? 0 : -1);
}
@Override
public int compareTo(HistoryEntry o) {
return compare(getTime(), o.getTime());
}
}
| {
"pile_set_name": "Github"
} |
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package rules
import (
"fmt"
"go/ast"
"github.com/securego/gosec"
)
type weakKeyStrength struct {
gosec.MetaData
calls gosec.CallList
bits int
}
func (w *weakKeyStrength) ID() string {
return w.MetaData.ID
}
func (w *weakKeyStrength) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) {
if callExpr := w.calls.ContainsPkgCallExpr(n, c, false); callExpr != nil {
if bits, err := gosec.GetInt(callExpr.Args[1]); err == nil && bits < (int64)(w.bits) {
return gosec.NewIssue(c, n, w.ID(), w.What, w.Severity, w.Confidence), nil
}
}
return nil, nil
}
// NewWeakKeyStrength builds a rule that detects RSA keys < 2048 bits
func NewWeakKeyStrength(id string, conf gosec.Config) (gosec.Rule, []ast.Node) {
calls := gosec.NewCallList()
calls.Add("crypto/rsa", "GenerateKey")
bits := 2048
return &weakKeyStrength{
calls: calls,
bits: bits,
MetaData: gosec.MetaData{
ID: id,
Severity: gosec.Medium,
Confidence: gosec.High,
What: fmt.Sprintf("RSA keys should be at least %d bits", bits),
},
}, []ast.Node{(*ast.CallExpr)(nil)}
}
| {
"pile_set_name": "Github"
} |
#include <fcntl.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/mman.h>
#include <sys/types.h>
#include <unistd.h>
char t[] = "H\xb8H\xb9H\xbaH\xbbH\xbcH\xbdH\xbeH\xbfI\xb8I\xb9I\xbaI\xbbI\xbcI\xbdI\xbeI\xbf";
int rd;
inline void* random_mmap(int perm) {
size_t r;
void *addr;
read(rd, &r, 6);
addr = (void*) (r & 0x7ffffffff000ll);
addr = mmap(addr, 4096, perm, MAP_PRIVATE | MAP_ANONYMOUS | MAP_FIXED, -1, 0);
if(addr == MAP_FAILED) exit(1);
return addr;
}
int main() {
char c[16];
memset(c, 0xee, 16);
alarm(30);
rd = open("/dev/urandom", O_RDONLY);
printf("%#llx\n", __builtin_return_address(0));
fflush(stdout);
register void* addr = random_mmap(PROT_WRITE | PROT_EXEC);
void *stk = random_mmap(PROT_WRITE | PROT_READ);
memset(stk, 0xee, 0x1000);
char *p = (char*)addr;
for(int i=0,j=0;i<16;i++) {
*p = t[j++]; p++;
*p = t[j++]; p++;
if(i == 4)
*(size_t *)p = stk + 2048;
else
read(rd, p, 8);
p += 8;
}
close(rd);
*p = (unsigned char)0xc3;
read(0, c, 16);
memcpy(stk + 2048, c, 16);
__asm__("jmp *%0" : : "r" (addr));
_exit(0);
}
| {
"pile_set_name": "Github"
} |
# Settings for the GCC(1) cpu-type "core2":
#
# Intel Core2 CPU with 64-bit extensions, MMX, SSE, SSE2, SSE3 and SSSE3
# instruction set support.
#
# This tune is recommended for the Intel Core 2 CPU family, including Conroe,
# Merom and beyond, as well as the first Atom CPUs, Diamondville, and beyond.
#
DEFAULTTUNE ?= "core2-32"
# Include the previous tune to pull in PACKAGE_EXTRA_ARCHS
require conf/machine/include/tune-i686.inc
# Extra tune features
TUNEVALID[core2] = "Enable core2 specific processor optimizations"
TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'core2', ' -march=core2 -mtune=core2 -msse3 -mfpmath=sse', '', d)}"
# Extra tune selections
AVAILTUNES += "core2-32"
TUNE_FEATURES_tune-core2-32 = "${TUNE_FEATURES_tune-x86} core2"
BASE_LIB_tune-core2-32 = "lib"
TUNE_PKGARCH_tune-core2-32 = "core2-32"
PACKAGE_EXTRA_ARCHS_tune-core2-32 = "${PACKAGE_EXTRA_ARCHS_tune-i686} core2-32"
QEMU_EXTRAOPTIONS_core2-32 = " -cpu n270"
AVAILTUNES += "core2-64"
TUNE_FEATURES_tune-core2-64 = "${TUNE_FEATURES_tune-x86-64} core2"
BASE_LIB_tune-core2-64 = "lib64"
TUNE_PKGARCH_tune-core2-64 = "core2-64"
PACKAGE_EXTRA_ARCHS_tune-core2-64 = "${PACKAGE_EXTRA_ARCHS_tune-x86-64} core2-64"
QEMU_EXTRAOPTIONS_core2-64 = " -cpu core2duo"
AVAILTUNES += "core2-64-x32"
TUNE_FEATURES_tune-core2-64-x32 = "${TUNE_FEATURES_tune-x86-64-x32} core2"
BASE_LIB_tune-core2-64-x32 = "libx32"
TUNE_PKGARCH_tune-core2-64-x32 = "core2-64-x32"
PACKAGE_EXTRA_ARCHS_tune-core2-64-x32 = "${PACKAGE_EXTRA_ARCHS_tune-x86-64-x32} core2-64-x32"
QEMU_EXTRAOPTIONS_core2-64-x32 = " -cpu core2duo"
| {
"pile_set_name": "Github"
} |
#ifndef MC__EDIT_ASPELL_H
#define MC__EDIT_ASPELL_H
#include "lib/global.h" /* include <glib.h> */
/*** typedefs(not structures) and defined constants **********************************************/
/*** enums ***************************************************************************************/
/*** structures declarations (and typedefs of structures)*****************************************/
/*** global variables defined in .c file *********************************************************/
/*** declarations of public functions ************************************************************/
void aspell_init (void);
void aspell_clean (void);
gboolean aspell_check (const char *word, const int word_size);
unsigned int aspell_suggest (GArray * suggest, const char *word, const int word_size);
void aspell_array_clean (GArray * array);
unsigned int aspell_get_lang_list (GArray * lang_list);
const char *aspell_get_lang (void);
gboolean aspell_set_lang (const char *lang);
gboolean aspell_add_to_dict (const char *word, const int word_size);
/*** inline functions ****************************************************************************/
#endif /* MC__EDIT_ASPELL_H */
| {
"pile_set_name": "Github"
} |
{
"documentation": "https://lucene.apache.org/solr/guide/authentication-and-authorization-plugins.html",
"description":"Shows the configuration for authorization, including the classes (type of authorization), permissions, user-roles, and other parameters.",
"methods": [
"GET"
],
"url": {
"paths": [
"/cluster/security/authorization"
]
}
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2016 Google Inc.
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#ifndef SkSVGTransformableNode_DEFINED
#define SkSVGTransformableNode_DEFINED
#include "experimental/svg/model/SkSVGNode.h"
#include "include/core/SkMatrix.h"
class SkSVGTransformableNode : public SkSVGNode {
public:
~SkSVGTransformableNode() override = default;
void setTransform(const SkSVGTransformType& t) { fTransform = t; }
protected:
SkSVGTransformableNode(SkSVGTag);
bool onPrepareToRender(SkSVGRenderContext*) const override;
void onSetAttribute(SkSVGAttribute, const SkSVGValue&) override;
void mapToParent(SkPath*) const;
private:
// FIXME: should be sparse
SkSVGTransformType fTransform;
using INHERITED = SkSVGNode;
};
#endif // SkSVGTransformableNode_DEFINED
| {
"pile_set_name": "Github"
} |
extern "C"
{
#include "cgc_stdio.h"
}
| {
"pile_set_name": "Github"
} |
// Copyright (c) 2018, 2020, Oracle Corporation and/or its affiliates.
// Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl.
package oracle.kubernetes.operator.helpers;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.annotation.Nonnull;
import com.meterware.simplestub.Memento;
import com.meterware.simplestub.StaticStubSupport;
import io.kubernetes.client.openapi.models.V1ConfigMap;
import io.kubernetes.client.openapi.models.V1ObjectMeta;
import oracle.kubernetes.operator.LabelConstants;
import oracle.kubernetes.operator.calls.FailureStatusSourceException;
import oracle.kubernetes.operator.work.Packet;
import oracle.kubernetes.operator.work.Step;
import oracle.kubernetes.utils.TestUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static com.meterware.simplestub.Stub.createStrictStub;
import static oracle.kubernetes.operator.KubernetesConstants.SCRIPT_CONFIG_MAP_NAME;
import static oracle.kubernetes.operator.ProcessingConstants.SCRIPT_CONFIG_MAP;
import static oracle.kubernetes.operator.helpers.KubernetesTestSupport.CONFIG_MAP;
import static oracle.kubernetes.operator.logging.MessageKeys.CM_CREATED;
import static oracle.kubernetes.operator.logging.MessageKeys.CM_EXISTS;
import static oracle.kubernetes.operator.logging.MessageKeys.CM_REPLACED;
import static oracle.kubernetes.utils.LogMatcher.containsFine;
import static oracle.kubernetes.utils.LogMatcher.containsInfo;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.sameInstance;
import static org.hamcrest.junit.MatcherAssert.assertThat;
public class ConfigMapHelperTest {
static final String[] SCRIPT_NAMES = {
"livenessProbe.sh",
"readState.sh",
"start-server.py",
"startServer.sh",
"stop-server.py",
"stopServer.sh",
"introspectDomain.sh",
"introspectDomain.py",
"startNodeManager.sh",
"utils.py",
"utils.sh",
"wlst.sh",
"tailLog.sh",
"monitorLog.sh",
"model_diff.py",
"modelInImage.sh",
"wdt_create_filter.py",
"model_filters.json",
"encryption_util.py"
};
private static final String DOMAIN_NS = "namespace";
private static final String OPERATOR_NS = "operator";
private static final String ADDITIONAL_NAME = "additional.sh";
private static final String[] PARTIAL_SCRIPT_NAMES = {"livenessProbe.sh", ADDITIONAL_NAME};
private static final String[] COMBINED_SCRIPT_NAMES = combine(SCRIPT_NAMES, PARTIAL_SCRIPT_NAMES);
private final V1ConfigMap defaultConfigMap = defineDefaultConfigMap();
private final RetryStrategyStub retryStrategy = createStrictStub(RetryStrategyStub.class);
private final KubernetesTestSupport testSupport = new KubernetesTestSupport();
private final List<Memento> mementos = new ArrayList<>();
private final List<LogRecord> logRecords = new ArrayList<>();
@SuppressWarnings("SameParameterValue")
private static String[] combine(String[] first, String[] second) {
return Stream.of(first, second).flatMap(Stream::of).distinct().toArray(String[]::new);
}
private static Map<String, String> nameOnlyScriptMap(String... scriptNames) {
return Stream.of(scriptNames).collect(Collectors.toMap(s -> s, s -> ""));
}
private V1ConfigMap defineDefaultConfigMap() {
return defineConfigMap(SCRIPT_NAMES);
}
private V1ConfigMap defineConfigMap(String... scriptNames) {
return new V1ConfigMap()
.apiVersion("v1")
.kind("ConfigMap")
.metadata(createMetadata())
.data(nameOnlyScriptMap(scriptNames));
}
private V1ObjectMeta createMetadata() {
return new V1ObjectMeta()
.name(SCRIPT_CONFIG_MAP_NAME)
.namespace(DOMAIN_NS)
.putLabelsItem(LabelConstants.OPERATORNAME_LABEL, OPERATOR_NS)
.putLabelsItem(LabelConstants.CREATEDBYOPERATOR_LABEL, "true");
}
/**
* Setup test.
* @throws Exception on failure
*/
@Before
public void setUp() throws Exception {
mementos.add(
TestUtils.silenceOperatorLogger()
.collectLogMessages(logRecords, CM_CREATED, CM_EXISTS, CM_REPLACED)
.withLogLevel(Level.FINE));
mementos.add(testSupport.install());
mementos.add(TestComparator.install());
}
/**
* Tear down test.
* @throws Exception on failure
*/
@After
public void tearDown() throws Exception {
for (Memento memento : mementos) {
memento.revert();
}
testSupport.throwOnCompletionFailure();
}
@Test
public void whenUnableToReadConfigMap_reportFailure() {
testSupport.failOnResource(CONFIG_MAP, SCRIPT_CONFIG_MAP_NAME, DOMAIN_NS, 401);
Step scriptConfigMapStep = ConfigMapHelper.createScriptConfigMapStep(OPERATOR_NS, DOMAIN_NS);
testSupport.runSteps(scriptConfigMapStep);
testSupport.verifyCompletionThrowable(FailureStatusSourceException.class);
}
@Test
public void whenNoConfigMap_createIt() {
testSupport.runSteps(ConfigMapHelper.createScriptConfigMapStep(OPERATOR_NS, DOMAIN_NS));
assertThat(testSupport.getResources(CONFIG_MAP), notNullValue());
assertThat(logRecords, containsInfo(CM_CREATED));
}
@Test
public void whenNoConfigMap_retryOnFailure() {
testSupport.addRetryStrategy(retryStrategy);
testSupport.failOnCreate(CONFIG_MAP, SCRIPT_CONFIG_MAP_NAME, DOMAIN_NS, 401);
Step scriptConfigMapStep = ConfigMapHelper.createScriptConfigMapStep(OPERATOR_NS, DOMAIN_NS);
testSupport.runSteps(scriptConfigMapStep);
testSupport.verifyCompletionThrowable(FailureStatusSourceException.class);
assertThat(retryStrategy.getConflictStep(), sameInstance(scriptConfigMapStep));
}
@Test
public void whenMatchingConfigMapExists_addToPacket() {
testSupport.defineResources(defaultConfigMap);
Packet packet = testSupport.runSteps(ConfigMapHelper.createScriptConfigMapStep(OPERATOR_NS, DOMAIN_NS));
assertThat(logRecords, containsFine(CM_EXISTS));
assertThat(packet, hasEntry(SCRIPT_CONFIG_MAP, defaultConfigMap));
}
@Test
public void whenExistingConfigMapIsMissingData_replaceIt() {
testSupport.defineResources(defineConfigMap(PARTIAL_SCRIPT_NAMES));
testSupport.runSteps(ConfigMapHelper.createScriptConfigMapStep(OPERATOR_NS, DOMAIN_NS));
assertThat(logRecords, containsInfo(CM_REPLACED));
assertThat(getScriptConfigKeys(), containsInAnyOrder(COMBINED_SCRIPT_NAMES));
}
private Collection<String> getScriptConfigKeys() {
return Optional.ofNullable(getScriptConfigMap())
.map(V1ConfigMap::getData)
.map(Map::keySet)
.orElseGet(Collections::emptySet);
}
private V1ConfigMap getScriptConfigMap() {
final List<V1ConfigMap> configMaps = testSupport.getResources(CONFIG_MAP);
return configMaps.stream().filter(this::isScriptConfigMap).findFirst().orElse(null);
}
private boolean isScriptConfigMap(@Nonnull V1ConfigMap map) {
return Optional.ofNullable(map.getMetadata()).map(this::isScriptConfigMapMetaData).orElse(false);
}
private boolean isScriptConfigMapMetaData(V1ObjectMeta meta) {
return SCRIPT_CONFIG_MAP_NAME.equals(meta.getName()) && DOMAIN_NS.equals(meta.getNamespace());
}
@Test
public void whenExistingConfigMapHasExtraData_dontRemoveIt() {
testSupport.defineResources(defineConfigMap(PARTIAL_SCRIPT_NAMES));
testSupport.runSteps(ConfigMapHelper.createScriptConfigMapStep(OPERATOR_NS, DOMAIN_NS));
assertThat(logRecords, containsInfo(CM_REPLACED));
assertThat(getScriptConfigKeys(), hasItem(ADDITIONAL_NAME));
}
// An implementation of the comparator that tests only the keys in the maps
static class TestComparator extends ConfigMapHelper.ConfigMapComparator {
static Memento install() throws NoSuchFieldException {
return StaticStubSupport.install(ConfigMapHelper.class, "COMPARATOR", new TestComparator());
}
@Override
boolean containsAllData(Map<String, String> actual, Map<String, String> expected) {
return actual.keySet().containsAll(expected.keySet());
}
}
}
| {
"pile_set_name": "Github"
} |
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Bonsai.Areas.Admin.Utils;
using Bonsai.Areas.Admin.ViewModels.Common;
using Bonsai.Areas.Admin.ViewModels.Components;
using Microsoft.AspNetCore.Mvc;
namespace Bonsai.Areas.Admin.Components
{
/// <summary>
/// Renders the page numbers for a list.
/// </summary>
public class ListPaginatorComponent: ViewComponent
{
/// <summary>
/// Renders the pagination control.
/// </summary>
public async Task<IViewComponentResult> InvokeAsync(string url, ListRequestVM request, int pageCount)
{
var result = new List<ListPaginatorPageVM>();
var pages = GetPageNumbers(request.Page, pageCount);
var prevPage = (int?) null;
foreach (var page in pages)
{
if(prevPage != null && prevPage != page - 1)
result.Add(new ListPaginatorPageVM { Title = "..." });
var clone = ListRequestVM.Clone(request);
clone.Page = page;
var cloneUrl = ListRequestHelper.GetUrl(url, clone);
result.Add(new ListPaginatorPageVM
{
Url = cloneUrl,
Title = (page + 1).ToString(),
IsCurrent = page == request.Page
});
prevPage = page;
}
return View("~/Areas/Admin/Views/Components/ListPaginator.cshtml", result);
}
/// <summary>
/// Returns the page IDs.
/// </summary>
private IEnumerable<int> GetPageNumbers(int current, int count)
{
const int Margin = 2;
return Enumerable.Range(0, Margin)
.Concat(Enumerable.Range(current - Margin, Margin * 2 + 1))
.Concat(Enumerable.Range(count - Margin, Margin))
.Where(x => x >= 0 && x < count)
.Distinct();
}
}
}
| {
"pile_set_name": "Github"
} |
<Canvas>
<Kind>42</Kind>
<Name>MATERIAL 1Settings</Name>
<IsMinified>1</IsMinified>
<XPosition>1.000000000</XPosition>
<YPosition>170.000000000</YPosition>
</Canvas>
<Widget>
<Kind>25</Kind>
<Name>AR</Name>
<Value>0.267015696</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>AG</Name>
<Value>0.225130811</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>AB</Name>
<Value>0.481675237</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>AR</Name>
<Value>0.800000012</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>AG</Name>
<Value>0.800000012</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>AB</Name>
<Value>0.800000012</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>ER</Name>
<Value>0.141361251</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>EG</Name>
<Value>0.209424004</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>EB</Name>
<Value>0.167539105</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>SR</Name>
<Value>0.235602096</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>SG</Name>
<Value>0.240837619</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>SB</Name>
<Value>0.575916052</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>SHINY</Name>
<Value>63.054187775</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>AH</Name>
<Value>0.693877637</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>AS</Name>
<Value>0.532608688</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>AV</Name>
<Value>0.481675237</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>DH</Name>
<Value>0.000000000</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>DS</Name>
<Value>0.000000000</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>DV</Name>
<Value>0.800000012</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>EV</Name>
<Value>0.397435576</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>ES</Name>
<Value>0.324999779</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>EV</Name>
<Value>0.209424004</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>SH</Name>
<Value>0.664102614</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>SS</Name>
<Value>0.590909004</Value>
</Widget>
<Widget>
<Kind>25</Kind>
<Name>SV</Name>
<Value>0.575916052</Value>
</Widget>
| {
"pile_set_name": "Github"
} |
/*-
* * Copyright 2016 Skymind, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*/
package org.datavec.api.writable;
import com.google.common.math.DoubleMath;
import org.datavec.api.io.WritableComparable;
import org.datavec.api.io.WritableComparator;
import org.nd4j.shade.jackson.annotation.JsonProperty;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
/** A WritableComparable for ints. */
public class IntWritable implements WritableComparable {
private int value;
public IntWritable() {}
public IntWritable(@JsonProperty("value") int value) {
set(value);
}
/** Set the value of this IntWritable. */
public void set(int value) {
this.value = value;
}
/** Return the value of this IntWritable. */
public int get() {
return value;
}
public void readFields(DataInput in) throws IOException {
value = in.readInt();
}
@Override
public void writeType(DataOutput out) throws IOException {
out.writeShort(WritableType.Int.typeIdx());
}
public void write(DataOutput out) throws IOException {
out.writeInt(value);
}
public boolean fuzzyEquals(Writable o, double tolerance) {
double other;
if (o instanceof IntWritable){
other = ((IntWritable) o).toDouble();
} else if (o instanceof LongWritable) {
other = ((LongWritable) o).toDouble();
} else if (o instanceof ByteWritable) {
other = ((ByteWritable) o).toDouble();
} else if (o instanceof DoubleWritable) {
other = ((DoubleWritable) o).toDouble();
} else if (o instanceof FloatWritable) {
other = ((FloatWritable) o).toDouble();
} else { return false; }
return DoubleMath.fuzzyEquals(this.value, other, tolerance);
}
/** Returns true iff <code>o</code> is a IntWritable with the same value. */
public boolean equals(Object o) {
if (o instanceof ByteWritable){
ByteWritable other = (ByteWritable) o;
return this.value == other.get();
}
if (o instanceof IntWritable) {
IntWritable other = (IntWritable) o;
return this.value == other.get();
}
if (o instanceof LongWritable) {
LongWritable other = (LongWritable) o;
return this.value == other.get();
} else {
return false;
}
}
public int hashCode() {
return value;
}
/** Compares two IntWritables. */
public int compareTo(Object o) {
int thisValue = this.value;
int thatValue = ((IntWritable) o).value;
return (thisValue < thatValue ? -1 : (thisValue == thatValue ? 0 : 1));
}
public String toString() {
return Integer.toString(value);
}
/** A Comparator optimized for IntWritable. */
public static class Comparator extends WritableComparator {
public Comparator() {
super(IntWritable.class);
}
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
int thisValue = readInt(b1, s1);
int thatValue = readInt(b2, s2);
return (thisValue < thatValue ? -1 : (thisValue == thatValue ? 0 : 1));
}
}
static { // register this comparator
WritableComparator.define(IntWritable.class, new Comparator());
}
@Override
public double toDouble() {
return value;
}
@Override
public float toFloat() {
return value;
}
@Override
public int toInt() {
return value;
}
@Override
public long toLong() {
return value;
}
@Override
public WritableType getType() {
return WritableType.Int;
}
}
| {
"pile_set_name": "Github"
} |
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
import com.adobe.test.Assert;
/**
File Name: nested.as
Description: tests nested Vectors: Vector.<Vector.<int>>
*
*/
// var SECTION="";
// var VERSION = "ECMA_1";
var v1:Vector.<Vector.<int>> = new <Vector.<int>> [
new <int>[0,1],
new <int>[2,3],
new <int>[4,5]];
Assert.expectEq("push nested vector.<int>",
"0,1,2,3,4,5",
v1.toString());
var v3:Vector.<Vector.<String>>=new <Vector.<String>>
[
new <String>['one','two'],
new <String>['three','four'],
new <String>['five','six'],
]
Assert.expectEq("push nested vector.<String>",
"one,two,three,four,five,six",
v3.toString());
Assert.expectEq("push nested vector.<String> as value",
"one,two,three,four,five,six",
new <Vector.<String>>
[
new <String>['one','two'],
new <String>['three','four'],
new <String>['five','six'],
]
.toString());
var v5:Vector.<Vector.<Vector.<int>>> =
new <Vector.<Vector.<int>>>[
new <Vector.<int>>[ new <int>[0,1]],
new <Vector.<int>>[ new <int>[2,3]],
new <Vector.<int>>[ new <int>[4,5]],
];
Assert.expectEq("push nested vector.<vector.<int>>",
"0,1,2,3,4,5",
v5.toString());
Assert.expectEq("nested vector.<vector.<int>> as value",
"0,1,2,3,4,5",
new <Vector.<Vector.<int>>>[
new <Vector.<int>>[ new <int>[0,1]],
new <Vector.<int>>[ new <int>[2,3]],
new <Vector.<int>>[ new <int>[4,5]],
].toString());
| {
"pile_set_name": "Github"
} |
#!/bin/sh
make -f makefile.std
sh png2pnm.sh
sh pnm2png.sh
| {
"pile_set_name": "Github"
} |
/*
* PNM image format
* Copyright (c) 2002, 2003 Fabrice Bellard
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "libavutil/imgutils.h"
#include "avcodec.h"
#include "pnm.h"
static inline int pnm_space(int c)
{
return c == ' ' || c == '\n' || c == '\r' || c == '\t';
}
static void pnm_get(PNMContext *sc, char *str, int buf_size)
{
char *s;
int c;
/* skip spaces and comments */
for (;;) {
c = *sc->bytestream++;
if (c == '#') {
do {
c = *sc->bytestream++;
} while (c != '\n' && sc->bytestream < sc->bytestream_end);
} else if (!pnm_space(c)) {
break;
}
}
s = str;
while (sc->bytestream < sc->bytestream_end && !pnm_space(c)) {
if ((s - str) < buf_size - 1)
*s++ = c;
c = *sc->bytestream++;
}
*s = '\0';
}
int ff_pnm_decode_header(AVCodecContext *avctx, PNMContext * const s)
{
char buf1[32], tuple_type[32];
int h, w, depth, maxval;
pnm_get(s, buf1, sizeof(buf1));
s->type= buf1[1]-'0';
if(buf1[0] != 'P')
return -1;
if (s->type==1 || s->type==4) {
avctx->pix_fmt = PIX_FMT_MONOWHITE;
} else if (s->type==2 || s->type==5) {
if (avctx->codec_id == CODEC_ID_PGMYUV)
avctx->pix_fmt = PIX_FMT_YUV420P;
else
avctx->pix_fmt = PIX_FMT_GRAY8;
} else if (s->type==3 || s->type==6) {
avctx->pix_fmt = PIX_FMT_RGB24;
} else if (s->type==7) {
w = -1;
h = -1;
maxval = -1;
depth = -1;
tuple_type[0] = '\0';
for (;;) {
pnm_get(s, buf1, sizeof(buf1));
if (!strcmp(buf1, "WIDTH")) {
pnm_get(s, buf1, sizeof(buf1));
w = strtol(buf1, NULL, 10);
} else if (!strcmp(buf1, "HEIGHT")) {
pnm_get(s, buf1, sizeof(buf1));
h = strtol(buf1, NULL, 10);
} else if (!strcmp(buf1, "DEPTH")) {
pnm_get(s, buf1, sizeof(buf1));
depth = strtol(buf1, NULL, 10);
} else if (!strcmp(buf1, "MAXVAL")) {
pnm_get(s, buf1, sizeof(buf1));
maxval = strtol(buf1, NULL, 10);
} else if (!strcmp(buf1, "TUPLTYPE") ||
/* libavcodec used to write invalid files */
!strcmp(buf1, "TUPLETYPE")) {
pnm_get(s, tuple_type, sizeof(tuple_type));
} else if (!strcmp(buf1, "ENDHDR")) {
break;
} else {
return -1;
}
}
/* check that all tags are present */
if (w <= 0 || h <= 0 || maxval <= 0 || depth <= 0 || tuple_type[0] == '\0' || av_image_check_size(w, h, 0, avctx))
return -1;
avctx->width = w;
avctx->height = h;
s->maxval = maxval;
if (depth == 1) {
if (maxval == 1) {
avctx->pix_fmt = PIX_FMT_MONOBLACK;
} else if (maxval == 255) {
avctx->pix_fmt = PIX_FMT_GRAY8;
} else {
avctx->pix_fmt = PIX_FMT_GRAY16BE;
}
} else if (depth == 2) {
if (maxval == 255)
avctx->pix_fmt = PIX_FMT_GRAY8A;
} else if (depth == 3) {
if (maxval < 256) {
avctx->pix_fmt = PIX_FMT_RGB24;
} else {
avctx->pix_fmt = PIX_FMT_RGB48BE;
}
} else if (depth == 4) {
if (maxval < 256) {
avctx->pix_fmt = PIX_FMT_RGBA;
} else {
avctx->pix_fmt = PIX_FMT_RGBA64BE;
}
} else {
return -1;
}
return 0;
} else {
return -1;
}
pnm_get(s, buf1, sizeof(buf1));
avctx->width = atoi(buf1);
if (avctx->width <= 0)
return -1;
pnm_get(s, buf1, sizeof(buf1));
avctx->height = atoi(buf1);
if(avctx->height <= 0 || av_image_check_size(avctx->width, avctx->height, 0, avctx))
return -1;
if (avctx->pix_fmt != PIX_FMT_MONOWHITE && avctx->pix_fmt != PIX_FMT_MONOBLACK) {
pnm_get(s, buf1, sizeof(buf1));
s->maxval = atoi(buf1);
if (s->maxval <= 0) {
av_log(avctx, AV_LOG_ERROR, "Invalid maxval: %d\n", s->maxval);
s->maxval = 255;
}
if (s->maxval >= 256) {
if (avctx->pix_fmt == PIX_FMT_GRAY8) {
avctx->pix_fmt = PIX_FMT_GRAY16BE;
if (s->maxval != 65535)
avctx->pix_fmt = PIX_FMT_GRAY16;
} else if (avctx->pix_fmt == PIX_FMT_RGB24) {
if (s->maxval > 255)
avctx->pix_fmt = PIX_FMT_RGB48BE;
} else {
av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format\n");
avctx->pix_fmt = PIX_FMT_NONE;
return -1;
}
}
}else
s->maxval=1;
/* more check if YUV420 */
if (avctx->pix_fmt == PIX_FMT_YUV420P) {
if ((avctx->width & 1) != 0)
return -1;
h = (avctx->height * 2);
if ((h % 3) != 0)
return -1;
h /= 3;
avctx->height = h;
}
return 0;
}
av_cold int ff_pnm_end(AVCodecContext *avctx)
{
PNMContext *s = avctx->priv_data;
if (s->picture.data[0])
avctx->release_buffer(avctx, &s->picture);
return 0;
}
av_cold int ff_pnm_init(AVCodecContext *avctx)
{
PNMContext *s = avctx->priv_data;
avcodec_get_frame_defaults(&s->picture);
avctx->coded_frame = &s->picture;
return 0;
}
| {
"pile_set_name": "Github"
} |
/*=========================================================================
Program: GDCM (Grassroots DICOM). A DICOM library
Copyright (c) 2006-2011 Mathieu Malaterre
All rights reserved.
See Copyright.txt or http://gdcm.sourceforge.net/Copyright.html for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notice for more information.
=========================================================================*/
#if 0
#ifndef GDCMMACROENTRY_H
#define GDCMMACROENTRY_H
#include "gdcmTypes.h"
#include "gdcmType.h"
#include <string>
namespace gdcm
{
/**
* \brief Class for representing a ModuleEntry
* \note bla
* \sa DictEntry
*/
class GDCM_EXPORT MacroEntry
{
public:
MacroEntry(const char *name = "", const char *type = "3", const char *description = ""):Name(name)/*,Type(type)*/,DescriptionField(description) {
DataElementType = Type::GetTypeType(type);
}
virtual ~MacroEntry() {} // important
friend std::ostream& operator<<(std::ostream& _os, const MacroEntry &_val);
void SetName(const char *name) { Name = name; }
const char *GetName() const { return Name.c_str(); }
void SetType(const Type &type) { DataElementType = type; }
const Type &GetType() const { return DataElementType; }
/*
* WARNING: 'Description' is currently a std::string, but it might change in the future
* do not expect it to remain the same, and always use the ModuleEntry::Description typedef
* instead.
*/
typedef std::string Description;
void SetDescription(const char *d) { DescriptionField = d; }
const Description & GetDescription() const { return DescriptionField; }
protected:
// PS 3.3 repeats the name of an attribute, but often contains typos
// for now we will not use this info, but instead access the DataDict instead
std::string Name;
// An attribute, encoded as a Data Element, may or may not be required in a
// Data Set, depending on that Attribute's Data Element Type.
Type DataElementType;
// TODO: for now contains the raw description (with enumerated values, defined terms...)
Description DescriptionField;
};
//-----------------------------------------------------------------------------
inline std::ostream& operator<<(std::ostream& _os, const MacroEntry &_val)
{
_os << _val.Name << "\t" << _val.DataElementType << "\t" << _val.DescriptionField;
return _os;
}
} // end namespace gdcm
#endif //GDCMMODULEENTRY_H
#endif
#ifndef GDCMMACROENTRY_H
#define GDCMMACROENTRY_H
#include "gdcmModuleEntry.h"
#endif
| {
"pile_set_name": "Github"
} |
include_directories(${ZQCNN_INCLUDE_DIRS})
file(GLOB ZQ_GEMM_SRC ${CMAKE_CURRENT_LIST_DIR}/math/*.c)
add_library(ZQ_GEMM ${ZQ_GEMM_SRC}) | {
"pile_set_name": "Github"
} |
deleteStyle=Haluatko varmasti poistaa tyylin \'%S\'?
deleteStyleTitle=Poistetaanko tyyli?
deleteStyleOK=Poista
extensions.{46551EC9-40F0-4e47-8E18-8E5CF550CFB8}.description=Stailaa netti Stylishilla, tyylien muokkausohjelmalla.
| {
"pile_set_name": "Github"
} |
---
title: 'New Apache Oozie Workflow, Coordinator & Bundle Editors'
author: admin
type: post
date: 2015-04-02T16:39:22+00:00
url: /new-apache-oozie-workflow-coordinator-bundle-editors/
sf_thumbnail_type:
- none
sf_thumbnail_link_type:
- link_to_post
sf_detail_type:
- none
sf_page_title:
- 1
sf_page_title_style:
- standard
sf_no_breadcrumbs:
- 1
sf_page_title_bg:
- none
sf_page_title_text_style:
- light
sf_background_image_size:
- cover
sf_social_sharing:
- 1
sf_sidebar_config:
- left-sidebar
sf_left_sidebar:
- Sidebar-2
sf_right_sidebar:
- Sidebar-1
sf_caption_position:
- caption-right
sf_remove_promo_bar:
- 1
categories:
- Scheduling
---
Oozie is one of the [initial major][1] first app in Hue. We are continuously investing in making it better and just did a major jump in its editor (to learn about the improvements in the Dashboard in <a href="https://gethue.com/oozie-dashboard-improvements/" target="_blank" rel="noopener noreferrer">the other post</a>).
This revamp of the Oozie Editor brings a new look and requires much less knowledge of [Oozie][2]! Workflows now support tens of [new functionalities][3] and require just a few clicks to be set up!
{{< youtube ON15jrXpqeI >}}
The files used in the videos comes with the [Oozie Examples][4].
In the new interface, only the most important properties of an action are asked to be filled, and quick-links for verifying path and other jobs are offered. Hive and Pig script files are parsed in order to extract the parameters and directly propose them with autocomplete. The advanced functionalities of an action are available in a new kind of popup with much less frictions, as it just overlaps with the current node.
<figure><a href="https://cdn.gethue.com/uploads/2015/03/new-oozie-1024x557.png"><img src="https://cdn.gethue.com/uploads/2015/03/new-oozie-1024x557.png" /></a><figcaption>New Editor</figcaption></figure>
<figure><a href="https://cdn.gethue.com/uploads/2015/03/oozie-v2-editor-1024x602.png"><img src="https://cdn.gethue.com/uploads/2015/03/oozie-v2-editor-1024x602.png" /></a><figcaption>New Editor (edit mode)</figcaption></figure>
<figure><a href="https://cdn.gethue.com/uploads/2015/03/old-oozie-1024x561.png"><img src="https://cdn.gethue.com/uploads/2015/03/old-oozie-1024x561.png" /></a><figcaption>Old Editor</figcaption></figure>
Two new actions have been added:
- HiveServer2
- Spark
[<img src="https://cdn.gethue.com/uploads/2015/03/new-spark-hs2-actions.png" />][8]
And the user experience o Pig and Sub-workflows is simplified.
Decision node support has been improved, copying an existing action is also now just a way of drag & dropping. Some layout are now possible as the 'ok' and 'end' nodes can be individually changed.
[<img src="https://cdn.gethue.com/uploads/2015/03/oozie-avanced-action-options.png" />][9]
Coordinators have been vastly improved! The notion of Oozie datasets is not needed anymore. The editor pulls the parameters of your workflow and offers 3 types of inputs:
- **parameters**: constant or Oozie EL function like time
- **input path**: parameterize an input path dependency and wait for it to exist, e.g.
- **output path**: like an input path but does not need to exist for starting the job
[<img src="https://cdn.gethue.com/uploads/2015/03/oozie-new-coordinator-1024x376.png" />][10]
The dreaded UTC time zone format is not directly provided either by the calendar or with some helper widgets.
[<img src="https://cdn.gethue.com/uploads/2015/03/oozie-new-submit-popup.png" />][11]
**Sum-up**
In addition to providing a friendlier end user experience, this new architecture opens up for innovations.
First, it makes it easy to add new Oozie actions in the editor. But most importantly, workflows are persisted using the new Hue document model, meaning their [import/export][12] is simplified and will be soon available directly from the UI. This model also enables the future generation of your workflows by just drag & dropping saved Hive, Pig, Spark jobs directly in the workflow. No need to manually duplicate your queries on HDFS!
This also opens the door of one click scheduling of any jobs saved in Hue as the coordinators are much simpler to use now. While we are continuing to polish the new editor, the [Dashboard section][13] of the app will see a major revamp next!
As usual feel free to comment on the [hue-user][14] list or [@gethue][15]!
**Note**
Old workflows are not automatically convert to the new format. Hue will try to import them for you, and open them in the old editor in case of problems.
[<img src="https://cdn.gethue.com/uploads/2015/03/oozie-import-try-1024x566.png" />][16]
A new [export and / export][17] is planned for Hue 4. It will let you export workflows in both XML / JSON Hue format and import from Hue’s format.
[1]: https://gethue.com/category/oozie/
[2]: http://oozie.apache.org/
[3]: https://issues.cloudera.org/browse/HUE-2180
[4]: https://github.com/cloudera/hue/tree/master/apps/oozie/examples/workflows
[5]: https://cdn.gethue.com/uploads/2015/03/new-oozie.png
[6]: https://cdn.gethue.com/uploads/2015/03/oozie-v2-editor.png
[7]: https://cdn.gethue.com/uploads/2015/03/old-oozie.png
[8]: https://cdn.gethue.com/uploads/2015/03/new-spark-hs2-actions.png
[9]: https://cdn.gethue.com/uploads/2015/03/oozie-avanced-action-options.png
[10]: https://cdn.gethue.com/uploads/2015/03/oozie-new-coordinator.png
[11]: https://cdn.gethue.com/uploads/2015/03/oozie-new-submit-popup.png
[12]: https://gethue.com/export-and-import-your-oozie-workflows/
[13]: https://issues.cloudera.org/browse/HUE-2644
[14]: http://groups.google.com/a/cloudera.org/group/hue-user
[15]: https://twitter.com/gethue
[16]: https://cdn.gethue.com/uploads/2015/03/oozie-import-try.png
[17]: https://issues.cloudera.org/browse/HUE-1660
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<keywordspec name="Example"
type="LIBRARY"
scope="GLOBAL"
format="ROBOT"
namedargs="false"
source="Example.py"
lineno="8"
generated="2020-04-01T13:28:56Z"
specversion="2">
<version>42</version>
<scope>THIS IS DEPRECATED AND NOT USED IF THERE IS "source" ATTRIBUTE.</scope>
<namedargs>SAME AS THE ABOVE!</namedargs>
<doc>Library for `libdoc.py` testing purposes.
This library is only used in an example and it doesn't do anything useful.</doc>
<init lineno="12">
<arguments>
</arguments>
<doc>Creates new Example test library 1</doc>
</init>
<init>
<arguments>
<arg>arg</arg>
</arguments>
<doc>Creates new Example test library 2</doc>
</init>
<init>
<arguments>
<arg>i</arg>
</arguments>
<doc>Creates new Example test library 3</doc>
</init>
<kw name="Keyword">
<arguments>
<arg>arg</arg>
</arguments>
<doc>Takes one `arg` and *does nothing* with it.
Example:
| Your Keyword | xxx |
| Your Keyword | yyy |
See `My Keyword` for no more information.</doc>
<tags>
<tag>tag1</tag>
<tag>tag2</tag>
</tags>
</kw>
<kw name="My Keyword" lineno="42" source="Example.py">
<arguments>
</arguments>
<doc>Does nothing & <doc> has "stuff" to 'escape'!! and ignored indentation
Tags: in spec these wont become tags</doc>
<tags/>
</kw>
<kw name="Non Ascii Doc" lineno="666" source="Different!">
<arguments>
</arguments>
<doc>Hyvää yötä.
Спасибо!</doc>
</kw>
</keywordspec>
| {
"pile_set_name": "Github"
} |
//
// main.m
// ZMJSchedule
//
// Created by Jason on 2018/2/6.
// Copyright © 2018年 keshiim. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "AppDelegate.h"
int main(int argc, char * argv[]) {
@autoreleasepool {
return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
}
}
| {
"pile_set_name": "Github"
} |
documentTitle: openFDA › Drugs
label: API categories
title: Drugs
description: "The U.S. Food and Drug Administration (FDA) regulates over-the-counter and prescription drugs in the United States, including biological therapeutics and generic drugs. This work covers more than just medicines. For example, fluoride toothpaste, antiperspirants, dandruff shampoos, and sunscreens are all considered drugs."
path: /apis/drug
type: noun
| {
"pile_set_name": "Github"
} |
#ifndef POSIX_TIME_PRE133_OPERATORS_HPP___
#define POSIX_TIME_PRE133_OPERATORS_HPP___
/* Copyright (c) 2002-2004 CrystalClear Software, Inc.
* Use, modification and distribution is subject to the
* Boost Software License, Version 1.0. (See accompanying
* file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
* Author: Jeff Garland, Bart Garst
* $Date$
*/
/*! @file posix_time_pre133_operators.hpp
* These input and output operators are for use with the
* pre 1.33 version of the date_time libraries io facet code.
* The operators used in version 1.33 and later can be found
* in posix_time_io.hpp */
#include <iostream>
#include <string>
#include <sstream>
#include "boost/date_time/compiler_config.hpp"
#include "boost/date_time/gregorian/gregorian.hpp"
#include "boost/date_time/posix_time/posix_time_duration.hpp"
#include "boost/date_time/posix_time/ptime.hpp"
#include "boost/date_time/posix_time/time_period.hpp"
#include "boost/date_time/time_parsing.hpp"
namespace boost {
namespace posix_time {
//The following code is removed for configurations with poor std::locale support (eg: MSVC6, gcc 2.9x)
#ifndef BOOST_DATE_TIME_NO_LOCALE
#if defined(USE_DATE_TIME_PRE_1_33_FACET_IO)
//! ostream operator for posix_time::time_duration
template <class charT, class traits>
inline
std::basic_ostream<charT, traits>&
operator<<(std::basic_ostream<charT, traits>& os, const time_duration& td)
{
typedef boost::date_time::ostream_time_duration_formatter<time_duration, charT> duration_formatter;
duration_formatter::duration_put(td, os);
return os;
}
//! ostream operator for posix_time::ptime
template <class charT, class traits>
inline
std::basic_ostream<charT, traits>&
operator<<(std::basic_ostream<charT, traits>& os, const ptime& t)
{
typedef boost::date_time::ostream_time_formatter<ptime, charT> time_formatter;
time_formatter::time_put(t, os);
return os;
}
//! ostream operator for posix_time::time_period
template <class charT, class traits>
inline
std::basic_ostream<charT, traits>&
operator<<(std::basic_ostream<charT, traits>& os, const time_period& tp)
{
typedef boost::date_time::ostream_time_period_formatter<time_period, charT> period_formatter;
period_formatter::period_put(tp, os);
return os;
}
#endif // USE_DATE_TIME_PRE_1_33_FACET_IO
/******** input streaming ********/
template<class charT>
inline
std::basic_istream<charT>& operator>>(std::basic_istream<charT>& is, time_duration& td)
{
// need to create a std::string and parse it
std::basic_string<charT> inp_s;
std::stringstream out_ss;
is >> inp_s;
typename std::basic_string<charT>::iterator b = inp_s.begin();
// need to use both iterators because there is no requirement
// for the data held by a std::basic_string<> be terminated with
// any marker (such as '\0').
typename std::basic_string<charT>::iterator e = inp_s.end();
while(b != e){
out_ss << is.narrow(*b, 0);
++b;
}
td = date_time::parse_delimited_time_duration<time_duration>(out_ss.str());
return is;
}
template<class charT>
inline
std::basic_istream<charT>& operator>>(std::basic_istream<charT>& is, ptime& pt)
{
gregorian::date d(not_a_date_time);
time_duration td(0,0,0);
is >> d >> td;
pt = ptime(d, td);
return is;
}
/** operator>> for time_period. time_period must be in
* "[date time_duration/date time_duration]" format. */
template<class charT>
inline
std::basic_istream<charT>& operator>>(std::basic_istream<charT>& is, time_period& tp)
{
gregorian::date d(not_a_date_time);
time_duration td(0,0,0);
ptime beg(d, td);
ptime end(beg);
std::basic_string<charT> s;
// get first date string and remove leading '['
is >> s;
{
std::basic_stringstream<charT> ss;
ss << s.substr(s.find('[')+1);
ss >> d;
}
// get first time_duration & second date string, remove the '/'
// and split into 2 strings
is >> s;
{
std::basic_stringstream<charT> ss;
ss << s.substr(0, s.find('/'));
ss >> td;
}
beg = ptime(d, td);
{
std::basic_stringstream<charT> ss;
ss << s.substr(s.find('/')+1);
ss >> d;
}
// get last time_duration and remove the trailing ']'
is >> s;
{
std::basic_stringstream<charT> ss;
ss << s.substr(0, s.find(']'));
ss >> td;
}
end = ptime(d, td);
tp = time_period(beg,end);
return is;
}
#endif //BOOST_DATE_TIME_NO_LOCALE
} } // namespaces
#endif // POSIX_TIME_PRE133_OPERATORS_HPP___
| {
"pile_set_name": "Github"
} |
<?php
if ( ! defined( 'ABSPATH' ) ) {
exit;
}
class AMPFORWP_Blurb_Widget extends WP_Widget {
/*--------------------------------------------------*/
/* Constructor
/*--------------------------------------------------*/
/**
* Specifies the classname and description, instantiates the widget,
* loads localization files, and includes necessary stylesheets and JavaScript.
*/
public function __construct() {
// Hooks fired when the Widget is activated and deactivated
register_activation_hook( __FILE__, array( $this, 'activate' ) );
register_deactivation_hook( __FILE__, array( $this, 'deactivate' ) );
parent::__construct(
'ampforwp-blurb',
esc_html__( 'AMP Blurb Module', 'accelerated-mobile-pages' ),
array( //
'classname' => 'ampforwp-blurb',
'description' => esc_html__( 'Displays Icon, headline and description. Best for showing features.', 'accelerated-mobile-pages' )
)
);
add_action( 'admin_enqueue_scripts', array( $this, 'register_admin_scripts' ) );
} // end constructor
/*--------------------------------------------------*/
/* Widget API Functions
/*--------------------------------------------------*/
/**
* Outputs the content of the widget.
*
* @param array args The array of form elements
* @param array instance The current instance of the widget
*/
public function widget( $args, $instance ) {
$output = "";
extract( $args, EXTR_SKIP );
$title = ( ! empty( $instance['title'] ) ) ? $instance['title'] : '';
$title = apply_filters( 'widget_title', $title, $instance, $this->id_base );
$features = ( ! empty( $instance['features'] ) ) ? $instance['features'] : array();
echo $before_widget;
$output .= '<div class="amp-wp-content amp_cb_module amp_cb_blurb">';
if ( $title ) {
$output .= '<div class="amp_module_title"><span>';
$output .= esc_html( $title );
$output .= '</span></div>';
}
$output .= '<div class="flex-grid">';
foreach( $features as $feature ) {
$output .= '<div class="clmn">';
if ( $feature['image'] ) {
$output .= '<img src="'. esc_url($feature['image']) .'" height="80" width="80" alt="" />';
}
$output .= '<div class="amp_cb_content">';
$output .= '<h4>'.esc_html( $feature['title']).'</h4>';
$output .= '<p>' .esc_html( $feature['description']).'</p>';
$output .= '</div>';
$output .= '</div>';
}
$output .= '</div></div>'; // flex-grid & amp_cb_module
$sanitizer = new AMPFORWP_Content( $output, array(),
apply_filters( 'ampforwp_content_sanitizers',array( 'AMP_Img_Sanitizer' => array(),'AMP_Style_Sanitizer' => array() ) ) );
$sanitized_output = $sanitizer->get_amp_content();
if( $sanitized_output ) {
echo $sanitized_output; // amphtml content, no kses
}
echo $after_widget;
} // end widget
/**
* Processes the widget's options to be saved.
*
* @param array new_instance The new instance of values to be generated via the update.
* @param array old_instance The previous instance of values before the update.
*/
public function update( $new_instance, $old_instance ) {
$instance = $old_instance;
$instance['title'] = strip_tags($new_instance['title']);
foreach($new_instance['features'] as $feature){
$feature['title'] = strip_tags($feature['title']);
$feature['description'] = strip_tags($feature['description']);
$feature['image'] = strip_tags($feature['image']);
}
$instance['features'] = $new_instance['features'];
return $instance;
} // end widget
/**
* Generates the administration form for the widget.
*
* @param array instance The array of keys and values for the widget.
*/
public function form( $instance ) {
$instance = wp_parse_args(
(array) $instance
);
$title = isset( $instance['title'] ) ? esc_attr( $instance['title'] ) : ''; ?>
<p><label for="<?php echo esc_attr($this->get_field_id( 'title' )); ?>"><?php esc_attr_e( 'Title:' ); ?></label>
<input class="widefat" id="<?php echo esc_attr($this->get_field_id( 'title' )); ?>" name="<?php echo esc_attr($this->get_field_name( 'title' )); ?>" type="text" value="<?php echo esc_attr($title); ?>" /></p>
<?php
$features = ( ! empty( $instance['features'] ) ) ? $instance['features'] : array(); ?>
<span class="ampforwp-blurb-additional">
<?php
$c = 0;
if ( count( $features ) > 0 ) {
foreach( $features as $feature ) {
if ( isset( $feature['title'] ) || isset( $feature['description'] ) ) { ?>
<div class="widget">
<div class="widget-top"><div class="widget-title"><h3><?php echo esc_attr($feature['title']);?><span class="in-widget-title"></span></h3></div>
</div>
<div class="widget-inside">
<p>
<label for="<?php echo esc_attr($this->get_field_name( 'features' )) . '['.$c.'][title]'; ?>"><?php esc_attr_e( 'Title:' ); ?></label>
<input class="widefat" id="<?php echo esc_attr($this->get_field_id( 'features' )) .'-'. $c.'-title'; ?>" name="<?php echo esc_attr($this->get_field_name( 'features' )) . '['.$c.'][title]'; ?>" type="text" value="<?php echo esc_attr($feature['title']); ?>" />
<label for="<?php echo esc_attr($this->get_field_name( 'features' )) . '['.$c.'][description]'; ?>"><?php esc_attr_e( 'Description:' ); ?></label>
<textarea class="widefat" id="<?php echo esc_attr($this->get_field_id( 'features' )) .'-'. $c.'-description'; ?>" name="<?php echo esc_attr($this->get_field_name( 'features' )) . '['.$c.'][description]'; ?>" rows="6" cols="50"><?php echo esc_attr($feature['description']); ?></textarea> <span class="clear"></span>
</p>
<p>
<label for="<?php echo esc_attr($this->get_field_name( 'features' )) . '['.$c.'][image]'; ?>"><?php esc_attr_e( 'Image:' ); ?></label>
<input type="button" class="select-img-<?php echo esc_attr($c);?> button left" style="width:auto;" value="Select Image" onclick="ampSelectImage('<?php echo esc_attr($c);?>');"/>
<input type="button" style="display:none" name="removeimg" id="remove-img-<?php echo esc_attr($c);?>" class="button button-secondary remove-img-button" data-count-type="<?php echo esc_attr($c);?>" value="Remove Image" onclick="removeImage('<?php echo esc_attr($c);?>')">
<img src="<?php echo esc_url($instance['features']["$c"]['image']) ?>" class="preview-image block-image-<?php echo esc_attr($c);?>" >
<input type="hidden" id="amp-img-field-<?php echo esc_attr($c);?>" class="img<?php echo esc_attr($c);?>" style="width:auto;" name="<?php echo esc_attr($this->get_field_name( 'features' )) . '['.$c.'][image]'; ?>" id="<?php echo esc_attr($this->get_field_name( 'features' )) . '['.$c.'][image]';?>'" value="<?php echo esc_attr($instance['features']["$c"]['image']) ?>" />
</p>
<p> <a class="ampforwp-blurb-remove delete button left"><?php esc_attr_e('Remove Feature','accelerated-mobile-pages')?></a> </p>
</div>
</div>
<?php
$c = $c +1;
}
}
} ?>
</span>
<a class="ampforwp-blurb-add button left"> <?php esc_attr_e('Add Feature','accelerated-mobile-pages'); ?> </a>
<?php
} // end form
/*--------------------------------------------------*/
/* Public Functions
/*--------------------------------------------------*/
/**
* Registers and enqueues admin-specific JavaScript.
*/
public function register_admin_scripts() {
wp_enqueue_script('media-upload');
wp_enqueue_script('thickbox');
wp_enqueue_style('thickbox');
$builder_data['amp_icon_check'] = AMPFORWP_IMAGE_DIR . '/amp-icon-check.png';
wp_localize_script( 'ampforwp-builder-script', 'builder_script_data', $builder_data );
wp_enqueue_script( 'ampforwp-builder-script', plugins_url('/modules/js/amp.js' , dirname(__FILE__) ) , array( 'jquery' ), false, true );
} // end register_admin_scripts
} // end class
add_action( 'widgets_init', 'ampforwp_register_blurb_widget' );
function ampforwp_register_blurb_widget(){
register_widget( 'AMPFORWP_Blurb_Widget' );
} | {
"pile_set_name": "Github"
} |
#ifndef helloscrollbarengine_h
#define helloscrollbarengine_h
/*************************************************************************
* Copyright (C) 2014 by Hugo Pereira Da Costa <[email protected]> *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License *
* along with this program; if not, write to the *
* Free Software Foundation, Inc., *
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA . *
*************************************************************************/
#include "helloscrollbardata.h"
#include "hellowidgetstateengine.h"
namespace hello
{
//* stores scrollbar hovered action and timeLine
class ScrollBarEngine: public WidgetStateEngine
{
Q_OBJECT
public:
//* constructor
explicit ScrollBarEngine( QObject* parent ):
WidgetStateEngine( parent )
{}
//* destructor
virtual ~ScrollBarEngine()
{}
//* register scrollbar
virtual bool registerWidget( QWidget*, AnimationModes );
//*@name accessors
//@{
using WidgetStateEngine::isAnimated;
using WidgetStateEngine::opacity;
//* true if widget is animated
virtual bool isAnimated( const QObject*, AnimationMode, QStyle::SubControl control );
//* true if widget is animated
virtual AnimationMode animationMode( const QObject* object, QStyle::SubControl control );
//* animation opacity
virtual qreal opacity( const QObject* object, QStyle::SubControl control );
//* return true if given subcontrol is hovered
virtual bool isHovered( const QObject* object, QStyle::SubControl control )
{
if( DataMap<WidgetStateData>::Value data = this->data( object, AnimationHover ) )
{
return static_cast<const ScrollBarData*>( data.data() )->isHovered( control );
} else return false;
}
//* control rect associated to object
virtual QRect subControlRect( const QObject* object, QStyle::SubControl control )
{
if( DataMap<WidgetStateData>::Value data = this->data( object, AnimationHover ) )
{
return static_cast<const ScrollBarData*>( data.data() )->subControlRect( control );
} else return QRect();
}
//* mouse position
virtual QPoint position( const QObject* object )
{
if( DataMap<WidgetStateData>::Value data = this->data( object, AnimationHover ) )
{
return static_cast<const ScrollBarData*>( data.data() )->position();
} else return QPoint( -1, -1 );
}
//@}
//*@name modifiers
//@{
//* control rect
virtual void setSubControlRect( const QObject* object, QStyle::SubControl control, const QRect& rect )
{
if( DataMap<WidgetStateData>::Value data = this->data( object, AnimationHover ) )
{ static_cast<ScrollBarData*>( data.data() )->setSubControlRect( control, rect ); }
}
//@}
};
}
#endif
| {
"pile_set_name": "Github"
} |
//===-- MCJIT.cpp - MC-based Just-in-Time Compiler ------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "MCJIT.h"
#include "llvm/ADT/STLExtras.h"
#include "llvm/ExecutionEngine/GenericValue.h"
#include "llvm/ExecutionEngine/JITEventListener.h"
#include "llvm/ExecutionEngine/MCJIT.h"
#include "llvm/ExecutionEngine/SectionMemoryManager.h"
#include "llvm/IR/DataLayout.h"
#include "llvm/IR/DerivedTypes.h"
#include "llvm/IR/Function.h"
#include "llvm/IR/LegacyPassManager.h"
#include "llvm/IR/Mangler.h"
#include "llvm/IR/Module.h"
#include "llvm/Object/Archive.h"
#include "llvm/Object/ObjectFile.h"
#include "llvm/Support/DynamicLibrary.h"
#include "llvm/Support/ErrorHandling.h"
#include "llvm/Support/MemoryBuffer.h"
#include <mutex>
using namespace llvm;
namespace {
static struct RegisterJIT {
RegisterJIT() { MCJIT::Register(); }
} JITRegistrator;
}
extern "C" void LLVMLinkInMCJIT() {
}
ExecutionEngine *
MCJIT::createJIT(std::unique_ptr<Module> M, std::string *ErrorStr,
std::shared_ptr<MCJITMemoryManager> MemMgr,
std::shared_ptr<LegacyJITSymbolResolver> Resolver,
std::unique_ptr<TargetMachine> TM) {
// Try to register the program as a source of symbols to resolve against.
//
// FIXME: Don't do this here.
sys::DynamicLibrary::LoadLibraryPermanently(nullptr, nullptr);
if (!MemMgr || !Resolver) {
auto RTDyldMM = std::make_shared<SectionMemoryManager>();
if (!MemMgr)
MemMgr = RTDyldMM;
if (!Resolver)
Resolver = RTDyldMM;
}
return new MCJIT(std::move(M), std::move(TM), std::move(MemMgr),
std::move(Resolver));
}
MCJIT::MCJIT(std::unique_ptr<Module> M, std::unique_ptr<TargetMachine> TM,
std::shared_ptr<MCJITMemoryManager> MemMgr,
std::shared_ptr<LegacyJITSymbolResolver> Resolver)
: ExecutionEngine(TM->createDataLayout(), std::move(M)), TM(std::move(TM)),
Ctx(nullptr), MemMgr(std::move(MemMgr)),
Resolver(*this, std::move(Resolver)), Dyld(*this->MemMgr, this->Resolver),
ObjCache(nullptr) {
// FIXME: We are managing our modules, so we do not want the base class
// ExecutionEngine to manage them as well. To avoid double destruction
// of the first (and only) module added in ExecutionEngine constructor
// we remove it from EE and will destruct it ourselves.
//
// It may make sense to move our module manager (based on SmallStPtr) back
// into EE if the JIT and Interpreter can live with it.
// If so, additional functions: addModule, removeModule, FindFunctionNamed,
// runStaticConstructorsDestructors could be moved back to EE as well.
//
std::unique_ptr<Module> First = std::move(Modules[0]);
Modules.clear();
if (First->getDataLayout().isDefault())
First->setDataLayout(getDataLayout());
OwnedModules.addModule(std::move(First));
RegisterJITEventListener(JITEventListener::createGDBRegistrationListener());
}
MCJIT::~MCJIT() {
std::lock_guard<sys::Mutex> locked(lock);
Dyld.deregisterEHFrames();
for (auto &Obj : LoadedObjects)
if (Obj)
notifyFreeingObject(*Obj);
Archives.clear();
}
void MCJIT::addModule(std::unique_ptr<Module> M) {
std::lock_guard<sys::Mutex> locked(lock);
if (M->getDataLayout().isDefault())
M->setDataLayout(getDataLayout());
OwnedModules.addModule(std::move(M));
}
bool MCJIT::removeModule(Module *M) {
std::lock_guard<sys::Mutex> locked(lock);
return OwnedModules.removeModule(M);
}
void MCJIT::addObjectFile(std::unique_ptr<object::ObjectFile> Obj) {
std::unique_ptr<RuntimeDyld::LoadedObjectInfo> L = Dyld.loadObject(*Obj);
if (Dyld.hasError())
report_fatal_error(Dyld.getErrorString());
notifyObjectLoaded(*Obj, *L);
LoadedObjects.push_back(std::move(Obj));
}
void MCJIT::addObjectFile(object::OwningBinary<object::ObjectFile> Obj) {
std::unique_ptr<object::ObjectFile> ObjFile;
std::unique_ptr<MemoryBuffer> MemBuf;
std::tie(ObjFile, MemBuf) = Obj.takeBinary();
addObjectFile(std::move(ObjFile));
Buffers.push_back(std::move(MemBuf));
}
void MCJIT::addArchive(object::OwningBinary<object::Archive> A) {
Archives.push_back(std::move(A));
}
void MCJIT::setObjectCache(ObjectCache* NewCache) {
std::lock_guard<sys::Mutex> locked(lock);
ObjCache = NewCache;
}
std::unique_ptr<MemoryBuffer> MCJIT::emitObject(Module *M) {
assert(M && "Can not emit a null module");
std::lock_guard<sys::Mutex> locked(lock);
// Materialize all globals in the module if they have not been
// materialized already.
cantFail(M->materializeAll());
// This must be a module which has already been added but not loaded to this
// MCJIT instance, since these conditions are tested by our caller,
// generateCodeForModule.
legacy::PassManager PM;
// The RuntimeDyld will take ownership of this shortly
SmallVector<char, 4096> ObjBufferSV;
raw_svector_ostream ObjStream(ObjBufferSV);
// Turn the machine code intermediate representation into bytes in memory
// that may be executed.
if (TM->addPassesToEmitMC(PM, Ctx, ObjStream, !getVerifyModules()))
report_fatal_error("Target does not support MC emission!");
// Initialize passes.
PM.run(*M);
// Flush the output buffer to get the generated code into memory
std::unique_ptr<MemoryBuffer> CompiledObjBuffer(
new SmallVectorMemoryBuffer(std::move(ObjBufferSV)));
// If we have an object cache, tell it about the new object.
// Note that we're using the compiled image, not the loaded image (as below).
if (ObjCache) {
// MemoryBuffer is a thin wrapper around the actual memory, so it's OK
// to create a temporary object here and delete it after the call.
MemoryBufferRef MB = CompiledObjBuffer->getMemBufferRef();
ObjCache->notifyObjectCompiled(M, MB);
}
return CompiledObjBuffer;
}
void MCJIT::generateCodeForModule(Module *M) {
// Get a thread lock to make sure we aren't trying to load multiple times
std::lock_guard<sys::Mutex> locked(lock);
// This must be a module which has already been added to this MCJIT instance.
assert(OwnedModules.ownsModule(M) &&
"MCJIT::generateCodeForModule: Unknown module.");
// Re-compilation is not supported
if (OwnedModules.hasModuleBeenLoaded(M))
return;
std::unique_ptr<MemoryBuffer> ObjectToLoad;
// Try to load the pre-compiled object from cache if possible
if (ObjCache)
ObjectToLoad = ObjCache->getObject(M);
assert(M->getDataLayout() == getDataLayout() && "DataLayout Mismatch");
// If the cache did not contain a suitable object, compile the object
if (!ObjectToLoad) {
ObjectToLoad = emitObject(M);
assert(ObjectToLoad && "Compilation did not produce an object.");
}
// Load the object into the dynamic linker.
// MCJIT now owns the ObjectImage pointer (via its LoadedObjects list).
Expected<std::unique_ptr<object::ObjectFile>> LoadedObject =
object::ObjectFile::createObjectFile(ObjectToLoad->getMemBufferRef());
if (!LoadedObject) {
std::string Buf;
raw_string_ostream OS(Buf);
logAllUnhandledErrors(LoadedObject.takeError(), OS);
OS.flush();
report_fatal_error(Buf);
}
std::unique_ptr<RuntimeDyld::LoadedObjectInfo> L =
Dyld.loadObject(*LoadedObject.get());
if (Dyld.hasError())
report_fatal_error(Dyld.getErrorString());
notifyObjectLoaded(*LoadedObject.get(), *L);
Buffers.push_back(std::move(ObjectToLoad));
LoadedObjects.push_back(std::move(*LoadedObject));
OwnedModules.markModuleAsLoaded(M);
}
void MCJIT::finalizeLoadedModules() {
std::lock_guard<sys::Mutex> locked(lock);
// Resolve any outstanding relocations.
Dyld.resolveRelocations();
OwnedModules.markAllLoadedModulesAsFinalized();
// Register EH frame data for any module we own which has been loaded
Dyld.registerEHFrames();
// Set page permissions.
MemMgr->finalizeMemory();
}
// FIXME: Rename this.
void MCJIT::finalizeObject() {
std::lock_guard<sys::Mutex> locked(lock);
// Generate code for module is going to move objects out of the 'added' list,
// so we need to copy that out before using it:
SmallVector<Module*, 16> ModsToAdd;
for (auto M : OwnedModules.added())
ModsToAdd.push_back(M);
for (auto M : ModsToAdd)
generateCodeForModule(M);
finalizeLoadedModules();
}
void MCJIT::finalizeModule(Module *M) {
std::lock_guard<sys::Mutex> locked(lock);
// This must be a module which has already been added to this MCJIT instance.
assert(OwnedModules.ownsModule(M) && "MCJIT::finalizeModule: Unknown module.");
// If the module hasn't been compiled, just do that.
if (!OwnedModules.hasModuleBeenLoaded(M))
generateCodeForModule(M);
finalizeLoadedModules();
}
JITSymbol MCJIT::findExistingSymbol(const std::string &Name) {
if (void *Addr = getPointerToGlobalIfAvailable(Name))
return JITSymbol(static_cast<uint64_t>(
reinterpret_cast<uintptr_t>(Addr)),
JITSymbolFlags::Exported);
return Dyld.getSymbol(Name);
}
Module *MCJIT::findModuleForSymbol(const std::string &Name,
bool CheckFunctionsOnly) {
StringRef DemangledName = Name;
if (DemangledName[0] == getDataLayout().getGlobalPrefix())
DemangledName = DemangledName.substr(1);
std::lock_guard<sys::Mutex> locked(lock);
// If it hasn't already been generated, see if it's in one of our modules.
for (ModulePtrSet::iterator I = OwnedModules.begin_added(),
E = OwnedModules.end_added();
I != E; ++I) {
Module *M = *I;
Function *F = M->getFunction(DemangledName);
if (F && !F->isDeclaration())
return M;
if (!CheckFunctionsOnly) {
GlobalVariable *G = M->getGlobalVariable(DemangledName);
if (G && !G->isDeclaration())
return M;
// FIXME: Do we need to worry about global aliases?
}
}
// We didn't find the symbol in any of our modules.
return nullptr;
}
uint64_t MCJIT::getSymbolAddress(const std::string &Name,
bool CheckFunctionsOnly) {
std::string MangledName;
{
raw_string_ostream MangledNameStream(MangledName);
Mangler::getNameWithPrefix(MangledNameStream, Name, getDataLayout());
}
if (auto Sym = findSymbol(MangledName, CheckFunctionsOnly)) {
if (auto AddrOrErr = Sym.getAddress())
return *AddrOrErr;
else
report_fatal_error(AddrOrErr.takeError());
} else if (auto Err = Sym.takeError())
report_fatal_error(Sym.takeError());
return 0;
}
JITSymbol MCJIT::findSymbol(const std::string &Name,
bool CheckFunctionsOnly) {
std::lock_guard<sys::Mutex> locked(lock);
// First, check to see if we already have this symbol.
if (auto Sym = findExistingSymbol(Name))
return Sym;
for (object::OwningBinary<object::Archive> &OB : Archives) {
object::Archive *A = OB.getBinary();
// Look for our symbols in each Archive
auto OptionalChildOrErr = A->findSym(Name);
if (!OptionalChildOrErr)
report_fatal_error(OptionalChildOrErr.takeError());
auto &OptionalChild = *OptionalChildOrErr;
if (OptionalChild) {
// FIXME: Support nested archives?
Expected<std::unique_ptr<object::Binary>> ChildBinOrErr =
OptionalChild->getAsBinary();
if (!ChildBinOrErr) {
// TODO: Actually report errors helpfully.
consumeError(ChildBinOrErr.takeError());
continue;
}
std::unique_ptr<object::Binary> &ChildBin = ChildBinOrErr.get();
if (ChildBin->isObject()) {
std::unique_ptr<object::ObjectFile> OF(
static_cast<object::ObjectFile *>(ChildBin.release()));
// This causes the object file to be loaded.
addObjectFile(std::move(OF));
// The address should be here now.
if (auto Sym = findExistingSymbol(Name))
return Sym;
}
}
}
// If it hasn't already been generated, see if it's in one of our modules.
Module *M = findModuleForSymbol(Name, CheckFunctionsOnly);
if (M) {
generateCodeForModule(M);
// Check the RuntimeDyld table again, it should be there now.
return findExistingSymbol(Name);
}
// If a LazyFunctionCreator is installed, use it to get/create the function.
// FIXME: Should we instead have a LazySymbolCreator callback?
if (LazyFunctionCreator) {
auto Addr = static_cast<uint64_t>(
reinterpret_cast<uintptr_t>(LazyFunctionCreator(Name)));
return JITSymbol(Addr, JITSymbolFlags::Exported);
}
return nullptr;
}
uint64_t MCJIT::getGlobalValueAddress(const std::string &Name) {
std::lock_guard<sys::Mutex> locked(lock);
uint64_t Result = getSymbolAddress(Name, false);
if (Result != 0)
finalizeLoadedModules();
return Result;
}
uint64_t MCJIT::getFunctionAddress(const std::string &Name) {
std::lock_guard<sys::Mutex> locked(lock);
uint64_t Result = getSymbolAddress(Name, true);
if (Result != 0)
finalizeLoadedModules();
return Result;
}
// Deprecated. Use getFunctionAddress instead.
void *MCJIT::getPointerToFunction(Function *F) {
std::lock_guard<sys::Mutex> locked(lock);
Mangler Mang;
SmallString<128> Name;
TM->getNameWithPrefix(Name, F, Mang);
if (F->isDeclaration() || F->hasAvailableExternallyLinkage()) {
bool AbortOnFailure = !F->hasExternalWeakLinkage();
void *Addr = getPointerToNamedFunction(Name, AbortOnFailure);
updateGlobalMapping(F, Addr);
return Addr;
}
Module *M = F->getParent();
bool HasBeenAddedButNotLoaded = OwnedModules.hasModuleBeenAddedButNotLoaded(M);
// Make sure the relevant module has been compiled and loaded.
if (HasBeenAddedButNotLoaded)
generateCodeForModule(M);
else if (!OwnedModules.hasModuleBeenLoaded(M)) {
// If this function doesn't belong to one of our modules, we're done.
// FIXME: Asking for the pointer to a function that hasn't been registered,
// and isn't a declaration (which is handled above) should probably
// be an assertion.
return nullptr;
}
// FIXME: Should the Dyld be retaining module information? Probably not.
//
// This is the accessor for the target address, so make sure to check the
// load address of the symbol, not the local address.
return (void*)Dyld.getSymbol(Name).getAddress();
}
void MCJIT::runStaticConstructorsDestructorsInModulePtrSet(
bool isDtors, ModulePtrSet::iterator I, ModulePtrSet::iterator E) {
for (; I != E; ++I) {
ExecutionEngine::runStaticConstructorsDestructors(**I, isDtors);
}
}
void MCJIT::runStaticConstructorsDestructors(bool isDtors) {
// Execute global ctors/dtors for each module in the program.
runStaticConstructorsDestructorsInModulePtrSet(
isDtors, OwnedModules.begin_added(), OwnedModules.end_added());
runStaticConstructorsDestructorsInModulePtrSet(
isDtors, OwnedModules.begin_loaded(), OwnedModules.end_loaded());
runStaticConstructorsDestructorsInModulePtrSet(
isDtors, OwnedModules.begin_finalized(), OwnedModules.end_finalized());
}
Function *MCJIT::FindFunctionNamedInModulePtrSet(StringRef FnName,
ModulePtrSet::iterator I,
ModulePtrSet::iterator E) {
for (; I != E; ++I) {
Function *F = (*I)->getFunction(FnName);
if (F && !F->isDeclaration())
return F;
}
return nullptr;
}
GlobalVariable *MCJIT::FindGlobalVariableNamedInModulePtrSet(StringRef Name,
bool AllowInternal,
ModulePtrSet::iterator I,
ModulePtrSet::iterator E) {
for (; I != E; ++I) {
GlobalVariable *GV = (*I)->getGlobalVariable(Name, AllowInternal);
if (GV && !GV->isDeclaration())
return GV;
}
return nullptr;
}
Function *MCJIT::FindFunctionNamed(StringRef FnName) {
Function *F = FindFunctionNamedInModulePtrSet(
FnName, OwnedModules.begin_added(), OwnedModules.end_added());
if (!F)
F = FindFunctionNamedInModulePtrSet(FnName, OwnedModules.begin_loaded(),
OwnedModules.end_loaded());
if (!F)
F = FindFunctionNamedInModulePtrSet(FnName, OwnedModules.begin_finalized(),
OwnedModules.end_finalized());
return F;
}
GlobalVariable *MCJIT::FindGlobalVariableNamed(StringRef Name, bool AllowInternal) {
GlobalVariable *GV = FindGlobalVariableNamedInModulePtrSet(
Name, AllowInternal, OwnedModules.begin_added(), OwnedModules.end_added());
if (!GV)
GV = FindGlobalVariableNamedInModulePtrSet(Name, AllowInternal, OwnedModules.begin_loaded(),
OwnedModules.end_loaded());
if (!GV)
GV = FindGlobalVariableNamedInModulePtrSet(Name, AllowInternal, OwnedModules.begin_finalized(),
OwnedModules.end_finalized());
return GV;
}
GenericValue MCJIT::runFunction(Function *F, ArrayRef<GenericValue> ArgValues) {
assert(F && "Function *F was null at entry to run()");
void *FPtr = getPointerToFunction(F);
finalizeModule(F->getParent());
assert(FPtr && "Pointer to fn's code was null after getPointerToFunction");
FunctionType *FTy = F->getFunctionType();
Type *RetTy = FTy->getReturnType();
assert((FTy->getNumParams() == ArgValues.size() ||
(FTy->isVarArg() && FTy->getNumParams() <= ArgValues.size())) &&
"Wrong number of arguments passed into function!");
assert(FTy->getNumParams() == ArgValues.size() &&
"This doesn't support passing arguments through varargs (yet)!");
// Handle some common cases first. These cases correspond to common `main'
// prototypes.
if (RetTy->isIntegerTy(32) || RetTy->isVoidTy()) {
switch (ArgValues.size()) {
case 3:
if (FTy->getParamType(0)->isIntegerTy(32) &&
FTy->getParamType(1)->isPointerTy() &&
FTy->getParamType(2)->isPointerTy()) {
int (*PF)(int, char **, const char **) =
(int(*)(int, char **, const char **))(intptr_t)FPtr;
// Call the function.
GenericValue rv;
rv.IntVal = APInt(32, PF(ArgValues[0].IntVal.getZExtValue(),
(char **)GVTOP(ArgValues[1]),
(const char **)GVTOP(ArgValues[2])));
return rv;
}
break;
case 2:
if (FTy->getParamType(0)->isIntegerTy(32) &&
FTy->getParamType(1)->isPointerTy()) {
int (*PF)(int, char **) = (int(*)(int, char **))(intptr_t)FPtr;
// Call the function.
GenericValue rv;
rv.IntVal = APInt(32, PF(ArgValues[0].IntVal.getZExtValue(),
(char **)GVTOP(ArgValues[1])));
return rv;
}
break;
case 1:
if (FTy->getNumParams() == 1 &&
FTy->getParamType(0)->isIntegerTy(32)) {
GenericValue rv;
int (*PF)(int) = (int(*)(int))(intptr_t)FPtr;
rv.IntVal = APInt(32, PF(ArgValues[0].IntVal.getZExtValue()));
return rv;
}
break;
}
}
// Handle cases where no arguments are passed first.
if (ArgValues.empty()) {
GenericValue rv;
switch (RetTy->getTypeID()) {
default: llvm_unreachable("Unknown return type for function call!");
case Type::IntegerTyID: {
unsigned BitWidth = cast<IntegerType>(RetTy)->getBitWidth();
if (BitWidth == 1)
rv.IntVal = APInt(BitWidth, ((bool(*)())(intptr_t)FPtr)());
else if (BitWidth <= 8)
rv.IntVal = APInt(BitWidth, ((char(*)())(intptr_t)FPtr)());
else if (BitWidth <= 16)
rv.IntVal = APInt(BitWidth, ((short(*)())(intptr_t)FPtr)());
else if (BitWidth <= 32)
rv.IntVal = APInt(BitWidth, ((int(*)())(intptr_t)FPtr)());
else if (BitWidth <= 64)
rv.IntVal = APInt(BitWidth, ((int64_t(*)())(intptr_t)FPtr)());
else
llvm_unreachable("Integer types > 64 bits not supported");
return rv;
}
case Type::VoidTyID:
rv.IntVal = APInt(32, ((int(*)())(intptr_t)FPtr)());
return rv;
case Type::FloatTyID:
rv.FloatVal = ((float(*)())(intptr_t)FPtr)();
return rv;
case Type::DoubleTyID:
rv.DoubleVal = ((double(*)())(intptr_t)FPtr)();
return rv;
case Type::X86_FP80TyID:
case Type::FP128TyID:
case Type::PPC_FP128TyID:
llvm_unreachable("long double not supported yet");
case Type::PointerTyID:
return PTOGV(((void*(*)())(intptr_t)FPtr)());
}
}
report_fatal_error("MCJIT::runFunction does not support full-featured "
"argument passing. Please use "
"ExecutionEngine::getFunctionAddress and cast the result "
"to the desired function pointer type.");
}
void *MCJIT::getPointerToNamedFunction(StringRef Name, bool AbortOnFailure) {
if (!isSymbolSearchingDisabled()) {
if (auto Sym = Resolver.findSymbol(Name)) {
if (auto AddrOrErr = Sym.getAddress())
return reinterpret_cast<void*>(
static_cast<uintptr_t>(*AddrOrErr));
} else if (auto Err = Sym.takeError())
report_fatal_error(std::move(Err));
}
/// If a LazyFunctionCreator is installed, use it to get/create the function.
if (LazyFunctionCreator)
if (void *RP = LazyFunctionCreator(Name))
return RP;
if (AbortOnFailure) {
report_fatal_error("Program used external function '"+Name+
"' which could not be resolved!");
}
return nullptr;
}
void MCJIT::RegisterJITEventListener(JITEventListener *L) {
if (!L)
return;
std::lock_guard<sys::Mutex> locked(lock);
EventListeners.push_back(L);
}
void MCJIT::UnregisterJITEventListener(JITEventListener *L) {
if (!L)
return;
std::lock_guard<sys::Mutex> locked(lock);
auto I = find(reverse(EventListeners), L);
if (I != EventListeners.rend()) {
std::swap(*I, EventListeners.back());
EventListeners.pop_back();
}
}
void MCJIT::notifyObjectLoaded(const object::ObjectFile &Obj,
const RuntimeDyld::LoadedObjectInfo &L) {
uint64_t Key =
static_cast<uint64_t>(reinterpret_cast<uintptr_t>(Obj.getData().data()));
std::lock_guard<sys::Mutex> locked(lock);
MemMgr->notifyObjectLoaded(this, Obj);
for (unsigned I = 0, S = EventListeners.size(); I < S; ++I) {
EventListeners[I]->notifyObjectLoaded(Key, Obj, L);
}
}
void MCJIT::notifyFreeingObject(const object::ObjectFile &Obj) {
uint64_t Key =
static_cast<uint64_t>(reinterpret_cast<uintptr_t>(Obj.getData().data()));
std::lock_guard<sys::Mutex> locked(lock);
for (JITEventListener *L : EventListeners)
L->notifyFreeingObject(Key);
}
JITSymbol
LinkingSymbolResolver::findSymbol(const std::string &Name) {
auto Result = ParentEngine.findSymbol(Name, false);
if (Result)
return Result;
if (ParentEngine.isSymbolSearchingDisabled())
return nullptr;
return ClientResolver->findSymbol(Name);
}
void LinkingSymbolResolver::anchor() {}
| {
"pile_set_name": "Github"
} |
class WizardController {
constructor($log, $scope, mockService, $state, socketService, $translate, uiSettingsService,
$window, matchmediaService, themeManager, modalService, $filter, authService) {
'ngInject';
this.$log = $log;
this.mockService = mockService;
this.$state = $state;
this.$scope = $scope;
this.socketService = socketService;
this.$translate = $translate;
this.$window = $window;
this.uiSettingsService = uiSettingsService;
this.matchmediaService = matchmediaService;
this.modalService = modalService;
this.filteredTranslate = $filter('translate');
this.themeManager = themeManager;
this.authService = authService;
this.init();
}
init() {
this.$window.contentWrapper.style.zIndex = 5;
this.$window.wizardScrim.style.display = 'block';
this.registerListner();
this.initService();
this.wizardDetails = {
steps: []
};
this.wizardData = {
deviceName: '',
showI2sOption: false
};
//Start mock
// this.wizardDetails = this.mockService.get('wizard');
// let browserLanguage = this.setBrowserLangAsDefault();
// console.log(this.wizardDetails);
//End mock
this.$scope.$on('$destroy', () => {
try {
this.$window.contentWrapper.style.zIndex = 1;
this.$window.wizardScrim.style.display = 'none';
} catch(e) {}
});
}
gotoStep(step) {
let emitPayload;
this.$log.log('go from step', this.currentStep.name.toLowerCase());
//From step actions
switch (this.currentStep.name.toLowerCase()) {
case 'language':
this.$log.debug('setLanguage', this.wizardDetails.language);
this.wizardDetails.language.disallowReload = true;
this.socketService.emit('setLanguage', this.wizardDetails.language);
//Prefetching network scan
this.socketService.emit('getWirelessNetworks', '');
this.$translate.use(this.wizardDetails.language.defaultLanguage.code);
break;
case 'name':
if (this.deviceNameform.$valid) {
this.$log.debug('setDeviceName', this.wizardDetails.deviceName);
this.socketService.emit('setDeviceName', this.wizardDetails.deviceName);
}
break;
case 'output':
if (this.wizardDetails.outputDevices.i2s && this.wizardDetails.outputDevices.i2s.enabled) {
emitPayload = {
'i2s': true,
'i2sid': {
'value': this.wizardData.selectedI2s.id,
'label': this.wizardData.selectedI2s.name
},
'output_device': {
'value': 1,
'label': this.wizardData.selectedI2s.name
}
};
} else {
emitPayload = {
'i2s': false,
'output_device': {
'value': this.wizardData.selectedDevice.id,
'label': this.wizardData.selectedDevice.name
}
};
}
this.$log.debug('setOutputDevices', emitPayload);
this.socketService.emit('setOutputDevices', emitPayload);
break;
case 'advancedsettings':
this.$log.debug('setExperienceAdvancedSettings', this.wizardDetails.experienceAdvancedSettings.status.id);
this.socketService.emit('setExperienceAdvancedSettings', this.wizardDetails.experienceAdvancedSettings.status.id);
break;
}
this.currentStep = step;
//To step actions
switch (this.currentStep.name.toLowerCase()) {
case 'name':
if (!this.wizardDetails.deviceName) {
this.socketService.emit('getDeviceName');
}
break;
case 'output':
this.socketService.emit('getOutputDevices');
break;
case 'advancedsettings':
this.socketService.emit('getExperienceAdvancedSettings');
break;
case 'devicecode':
this.socketService.emit('getDeviceActivationStatus');
break;
case 'done':
this.socketService.emit('getDonePage');
break;
}
}
setDonationAmount(amount) {
this.wizardData.donationAmount = amount;
}
subscribeToMailList() {
if (this.mailListForm.$valid) {
this.CgMailChimpService.subscribe(this.wizardData.mailListData)
.then(() => {
this.wizardData.mailListSubscribed = true;
},
(reason) => {
this.$log.log('Mailchimp subscription error', reason);
});
}
}
gotoNextStep() {
this.gotoStep(this.wizardDetails.steps[this.getStepIndex() + 1]);
}
gotoPrevStep() {
this.gotoStep(this.wizardDetails.steps[this.getStepIndex() - 1]);
}
getStepIndex(step = this.currentStep) {
return this.wizardDetails.steps.indexOf(step);
}
isLastStep() {
return this.getStepIndex(this.currentStep) === this.wizardDetails.steps.length - 1;
}
isFirstStep() {
return this.getStepIndex(this.currentStep) === 0;
}
getStepPos() {
return (this.getStepIndex(this.currentStep)+1) + '/' + this.wizardDetails.steps.length;
}
goToAccountSetup() {
this.socketService.emit('setWizardAction', {action: 'close'});
return this.$state.go('myvolumio.signup');
}
isAccountSetupRequired() {
if (this.authService.isEnabled && this.authService.user === null) {
return true;
} else {
return false;
}
}
done() {
this.$state.go('volumio.playback');
this.socketService.emit('setWizardAction', {action: 'close'});
}
skipWizard() {
this.socketService.emit('setWizardAction', {action: 'skip'});
this.$state.go('volumio.playback');
}
addToBookmark(event) {
let bookmarkURL = window.location.href;
let bookmarkTitle = document.title;
if ('addToHomescreen' in window && window.addToHomescreen.isCompatible) {
// Mobile browsers
window.addToHomescreen({autostart: false, startDelay: 0}).show(true);
} else if (window.sidebar && window.sidebar.addPanel) {
// Firefox version < 23
window.sidebar.addPanel(bookmarkTitle, bookmarkURL, '');
} else if ((window.sidebar && /Firefox/i.test(navigator.userAgent)) || (window.opera && window.print)) {
// Firefox version >= 23 and Opera Hotlist
$(this).attr({
href: bookmarkURL,
title: bookmarkTitle,
rel: 'sidebar'
}).off(event);
return true;
} else if (window.external && ('AddFavorite' in window.external)) {
// IE Favorite
window.external.AddFavorite(bookmarkURL, bookmarkTitle);
} else {
// Other browsers (mainly WebKit - Chrome/Safari)
alert('Press ' + (/Mac/i.test(navigator.userAgent) ? 'Cmd' : 'Ctrl') + '+D to bookmark this page.');
}
}
donate(currency) {
let
templateUrl = 'app/components/modals/modal-crypto.html',
controller = 'ModalCryptoController',
params = {name:'Bitcoin',address:'3HxbUDkrQAuX2XPPLg8xYNuEA8tJ2s6UjF'};
switch(currency) {
case 'ltc':
params = {name:'Litecoin',address:'LKjJWNwYMGfhmCRBj11yLFjfM1yG3TKTCZ', alias: 'ltc'};
break;
case 'xrp':
params = {name:'Ripple',address:'rnCRvU45awv3ZySymzLCLFq3HCKfma2Utb', alias: 'xrp'};
break;
default:
params = {name:'Bitcoin',address:'3HxbUDkrQAuX2XPPLg8xYNuEA8tJ2s6UjF', alias: 'btc'};
}
this.modalService.openModal(
controller,
templateUrl,
params,
'md');
}
setBrowserLangAsDefault() {
const browserLang = this.uiSettingsService.getBrowserDefaultLanguage();
let isLangAvailable = this.wizardDetails.language.available.find((lang) => {
return lang.code === browserLang;
});
if (isLangAvailable) {
this.wizardDetails.language.defaultLanguage = isLangAvailable;
this.$translate.use(isLangAvailable.code);
} else {
this.$translate.use('en');
}
}
setDeviceCode() {
var emitPayload = {code: this.wizardDetails.deviceCode.code};
this.socketService.emit('setDeviceActivationCode', emitPayload);
}
registerListner() {
this.socketService.on('pushWizardSteps', (data) => {
this.$log.debug('pushWizardSteps', data);
this.wizardDetails.steps = data;
this.currentStep = this.wizardDetails.steps[0];
});
this.socketService.on('pushAvailableLanguages', (data) => {
this.$log.debug('pushAvailableLanguages', data);
this.wizardDetails.language = data;
this.wizardData.defaultLanguage = this.wizardDetails.language.defaultLanguage.code;
this.setBrowserLangAsDefault();
});
this.socketService.on('pushDeviceName', (data) => {
this.$log.debug('pushDeviceName', data);
this.wizardDetails.deviceName = data;
});
this.socketService.on('pushOutputDevices', (data) => {
this.$log.debug('pushOutputDevices', data);
this.wizardDetails.outputDevices = data;
this.wizardData.selectedDevice = {name: data.devices.active.name, id: data.devices.active.id};
if (data.i2s && data.i2s.enabled) {
this.wizardData.selectedI2s = {name: data.i2s.active};
}
});
this.socketService.on('pushDonePage', (data) => {
this.$log.debug('pushDonePage', data);
this.wizardDetails.done = data;
if (data.donation) {
this.setDonationAmount(data.donationAmount.donationAmount);
}
});
this.socketService.on('pushDeviceActivationCodeResult', (data) => {
this.$log.debug('pushDeviceActivationCodeResult', data);
this.wizardDetails.deviceCode.activated = data.activated;
this.wizardDetails.deviceCode.error = data.error;
});
this.socketService.on('pushDeviceActivationStatus', (data) => {
this.$log.debug('pushDeviceActivationStatus', data);
this.wizardDetails.deviceCode = {};
this.wizardDetails.deviceCode.alreadyActivated = data.alreadyActivated;
this.wizardDetails.deviceCode.message = this.filteredTranslate(data.message) + ' ' + data.plan;
if (data.email) {
this.wizardDetails.deviceCode.message = this.wizardDetails.deviceCode.message + ' ' + this.filteredTranslate('MYVOLUMIO.AND_ASSOCIATED_WITH_ACCOUNT') + ' ' + data.email;
}
});
this.socketService.on('pushExperienceAdvancedSettings', (data) => {
this.$log.debug('pushExperienceAdvancedSettings', data);
this.wizardDetails.experienceAdvancedSettings = data;
});
this.socketService.on('closeWizard', () => {
this.$log.debug('closeWizard');
//this.$state.go('volumio.playback');
});
this.$scope.$on('$destroy', () => {
this.socketService.off('pushWizardSteps');
this.socketService.off('pushAvailableLanguages');
this.socketService.off('pushDeviceName');
});
}
initService() {
this.socketService.emit('getWizardSteps');
this.socketService.emit('getAvailableLanguages');
}
isVolumio3Theme(){
return this.themeManager.theme === 'volumio3';
}
}
export default WizardController;
| {
"pile_set_name": "Github"
} |
fails:Struct.new keyword_init: true option new class instantiation raises ArgumentError when passed not declared keyword argument
fails:Struct.new raises ArgumentError when there is a duplicate member
fails:Struct.new keyword_init: true option raises when there is a duplicate member
| {
"pile_set_name": "Github"
} |
/*
vim-hybrid theme by w0ng (https://github.com/w0ng/vim-hybrid)
*/
/*background color*/
.hljs {
display: block;
overflow-x: auto;
padding: 0.5em;
background: #1d1f21;
}
/*selection color*/
.hljs::selection,
.hljs span::selection {
background: #373b41;
}
.hljs::-moz-selection,
.hljs span::-moz-selection {
background: #373b41;
}
/*foreground color*/
.hljs {
color: #c5c8c6;
}
/*color: fg_yellow*/
.hljs-title,
.hljs-name {
color: #f0c674;
}
/*color: fg_comment*/
.hljs-comment,
.hljs-meta,
.hljs-meta .hljs-keyword {
color: #707880;
}
/*color: fg_red*/
.hljs-number,
.hljs-symbol,
.hljs-literal,
.hljs-deletion,
.hljs-link {
color: #cc6666
}
/*color: fg_green*/
.hljs-string,
.hljs-doctag,
.hljs-addition,
.hljs-regexp,
.hljs-selector-attr,
.hljs-selector-pseudo {
color: #b5bd68;
}
/*color: fg_purple*/
.hljs-attribute,
.hljs-code,
.hljs-selector-id {
color: #b294bb;
}
/*color: fg_blue*/
.hljs-keyword,
.hljs-selector-tag,
.hljs-bullet,
.hljs-tag {
color: #81a2be;
}
/*color: fg_aqua*/
.hljs-subst,
.hljs-variable,
.hljs-template-tag,
.hljs-template-variable {
color: #8abeb7;
}
/*color: fg_orange*/
.hljs-type,
.hljs-built_in,
.hljs-builtin-name,
.hljs-quote,
.hljs-section,
.hljs-selector-class {
color: #de935f;
}
.hljs-emphasis {
font-style: italic;
}
.hljs-strong {
font-weight: bold;
}
| {
"pile_set_name": "Github"
} |
0
SECTION
2
HEADER
9
$ACADVER
1
AC1009
9
$DWGCODEPAGE
3
ansi_1252
9
$INSBASE
10
0.0
20
0.0
30
0.0
9
$EXTMIN
10
1.000000000000000E+20
20
1.000000000000000E+20
30
1.000000000000000E+20
9
$EXTMAX
10
-1.000000000000000E+20
20
-1.000000000000000E+20
30
-1.000000000000000E+20
9
$LIMMIN
10
0.0
20
0.0
9
$LIMMAX
10
12.0
20
9.0
9
$ORTHOMODE
70
0
9
$REGENMODE
70
1
9
$FILLMODE
70
1
9
$QTEXTMODE
70
0
9
$MIRRTEXT
70
1
9
$DRAGMODE
70
2
9
$LTSCALE
40
1.0
9
$OSMODE
70
37
9
$ATTMODE
70
1
9
$TEXTSIZE
40
0.2
9
$TRACEWID
40
0.05
9
$TEXTSTYLE
7
STANDARD
9
$CLAYER
8
DEFAULT
9
$CELTYPE
6
BYLAYER
9
$CECOLOR
62
256
9
$DIMSCALE
40
1.0
9
$DIMASZ
40
0.18
9
$DIMEXO
40
0.0625
9
$DIMDLI
40
0.38
9
$DIMRND
40
0.0
9
$DIMDLE
40
0.0
9
$DIMEXE
40
0.18
9
$DIMTP
40
0.0
9
$DIMTM
40
0.0
9
$DIMTXT
40
0.18
9
$DIMCEN
40
0.09
9
$DIMTSZ
40
0.0
9
$DIMTOL
70
0
9
$DIMLIM
70
0
9
$DIMTIH
70
1
9
$DIMTOH
70
1
9
$DIMSE1
70
0
9
$DIMSE2
70
0
9
$DIMTAD
70
0
9
$DIMZIN
70
0
9
$DIMBLK
1
9
$DIMASO
70
1
9
$DIMSHO
70
1
9
$DIMPOST
1
9
$DIMAPOST
1
9
$DIMALT
70
0
9
$DIMALTD
70
2
9
$DIMALTF
40
25.4
9
$DIMLFAC
40
1.0
9
$DIMTOFL
70
0
9
$DIMTVP
40
0.0
9
$DIMTIX
70
0
9
$DIMSOXD
70
0
9
$DIMSAH
70
0
9
$DIMBLK1
1
9
$DIMBLK2
1
9
$DIMSTYLE
2
STANDARD_ACAD
9
$DIMCLRD
70
0
9
$DIMCLRE
70
0
9
$DIMCLRT
70
0
9
$DIMTFAC
40
1.0
9
$DIMGAP
40
0.09
9
$LUNITS
70
2
9
$LUPREC
70
3
9
$SKETCHINC
40
0.1
9
$FILLETRAD
40
0.5
9
$AUNITS
70
0
9
$AUPREC
70
0
9
$MENU
1
.
9
$ELEVATION
40
0.0
9
$PELEVATION
40
0.0
9
$THICKNESS
40
0.0
9
$LIMCHECK
70
0
9
$CHAMFERA
40
0.0
9
$CHAMFERB
40
0.0
9
$SKPOLY
70
0
9
$TDCREATE
40
2458652.55362765
9
$TDUPDATE
40
2458652.553645012
9
$TDINDWG
40
0.0000000116
9
$TDUSRTIMER
40
0.0000000116
9
$USRTIMER
70
1
9
$ANGBASE
50
0.0
9
$ANGDIR
70
0
9
$PDMODE
70
0
9
$PDSIZE
40
0.0
9
$PLINEWID
40
0.0
9
$COORDS
70
1
9
$SPLFRAME
70
0
9
$SPLINETYPE
70
6
9
$SPLINESEGS
70
8
9
$ATTDIA
70
0
9
$ATTREQ
70
1
9
$HANDLING
70
1
9
$HANDSEED
5
90
9
$SURFTAB1
70
6
9
$SURFTAB2
70
6
9
$SURFTYPE
70
6
9
$SURFU
70
6
9
$SURFV
70
6
9
$UCSNAME
2
9
$UCSORG
10
0.0
20
0.0
30
0.0
9
$UCSXDIR
10
1.0
20
0.0
30
0.0
9
$UCSYDIR
10
0.0
20
1.0
30
0.0
9
$PUCSNAME
2
9
$PUCSORG
10
0.0
20
0.0
30
0.0
9
$PUCSXDIR
10
1.0
20
0.0
30
0.0
9
$PUCSYDIR
10
0.0
20
1.0
30
0.0
9
$USERI1
70
0
9
$USERI2
70
0
9
$USERI3
70
0
9
$USERI4
70
0
9
$USERI5
70
0
9
$USERR1
40
0.0
9
$USERR2
40
0.0
9
$USERR3
40
0.0
9
$USERR4
40
0.0
9
$USERR5
40
0.0
9
$WORLDVIEW
70
1
9
$SHADEDGE
70
3
9
$SHADEDIF
70
70
9
$TILEMODE
70
1
9
$MAXACTVP
70
64
9
$PLIMCHECK
70
0
9
$PEXTMIN
10
1.000000000000000E+20
20
1.000000000000000E+20
30
1.000000000000000E+20
9
$PEXTMAX
10
-1.000000000000000E+20
20
-1.000000000000000E+20
30
-1.000000000000000E+20
9
$PLIMMIN
10
0.0
20
0.0
9
$PLIMMAX
10
12.0
20
9.0
9
$UNITMODE
70
0
9
$VISRETAIN
70
1
9
$PLINEGEN
70
0
9
$PSLTSCALE
70
1
0
ENDSEC
0
SECTION
2
TABLES
0
TABLE
2
VPORT
70
1
0
VPORT
2
*ACTIVE
70
0
10
0.0
20
0.0
11
1.0
21
1.0
12
10.42990654205607
22
4.5
13
0.0
23
0.0
14
0.5
24
0.5
15
0.5
25
0.5
16
0.0
26
0.0
36
1.0
17
0.0
27
0.0
37
0.0
40
9.0
41
1.972972972850329
42
50.0
43
0.0
44
0.0
50
0.0
51
0.0
71
0
72
100
73
1
74
3
75
0
76
0
77
0
78
0
0
ENDTAB
0
TABLE
2
LTYPE
70
1
0
LTYPE
2
CONTINUOUS
70
0
3
Solid line
72
65
73
0
40
0.0
0
ENDTAB
0
TABLE
2
LAYER
70
2
0
LAYER
2
0
70
0
62
7
6
CONTINUOUS
0
LAYER
2
DEFAULT
70
0
62
7
6
CONTINUOUS
0
ENDTAB
0
TABLE
2
STYLE
70
1
0
STYLE
2
STANDARD
70
0
40
0.0
41
1.0
50
0.0
71
0
42
0.2
3
txt
4
0
ENDTAB
0
TABLE
2
VIEW
70
0
0
ENDTAB
0
TABLE
2
UCS
70
0
0
ENDTAB
0
TABLE
2
APPID
70
1
0
APPID
2
ACAD
70
0
0
ENDTAB
0
TABLE
2
DIMSTYLE
70
1
0
DIMSTYLE
2
STANDARD_ACAD
70
0
3
4
5
6
7
40
1.0
41
0.18
42
0.0625
43
0.38
44
0.18
45
0.0
46
0.0
47
0.0
48
0.0
140
0.18
141
0.09
142
0.0
143
25.4
144
1.0
145
0.0
146
1.0
147
0.09
71
0
72
0
73
1
74
1
75
0
76
0
77
0
78
0
170
0
171
2
172
0
173
0
174
0
175
0
176
0
177
0
178
0
0
ENDTAB
0
ENDSEC
0
SECTION
2
BLOCKS
0
BLOCK
8
0
2
$MODEL_SPACE
70
0
10
0.0
20
0.0
30
0.0
3
$MODEL_SPACE
1
0
ENDBLK
5
21
8
0
0
BLOCK
67
1
8
0
2
$PAPER_SPACE
70
0
10
0.0
20
0.0
30
0.0
3
$PAPER_SPACE
1
0
ENDBLK
5
1D
67
1
8
0
0
ENDSEC
0
SECTION
2
ENTITIES
0
POLYLINE
5
6F
8
DEFAULT
66
1
10
0.0
20
0.0
30
0.0
70
1
0
VERTEX
5
70
8
DEFAULT
10
25.0
20
25.0
30
0.0
0
VERTEX
5
71
8
DEFAULT
10
35.0
20
25.0
30
0.0
0
VERTEX
5
72
8
DEFAULT
10
35.0
20
35.0
30
0.0
0
VERTEX
5
73
8
DEFAULT
10
25.0
20
35.0
30
0.0
0
SEQEND
5
74
8
DEFAULT
0
POLYLINE
5
75
8
DEFAULT
66
1
10
0.0
20
0.0
30
0.0
70
1
0
VERTEX
5
76
8
DEFAULT
10
125.0
20
25.0
30
0.0
0
VERTEX
5
77
8
DEFAULT
10
135.0
20
25.0
30
0.0
0
VERTEX
5
78
8
DEFAULT
10
135.0
20
35.0
30
0.0
0
VERTEX
5
79
8
DEFAULT
10
125.0
20
35.0
30
0.0
0
SEQEND
5
7A
8
DEFAULT
0
CIRCLE
5
7B
8
DEFAULT
10
230.0
20
30.0
30
0.0
40
4.999999999999972
0
CIRCLE
5
7C
8
DEFAULT
10
230.0
20
30.0
30
0.0
40
30.0
0
POLYLINE
5
7D
8
DEFAULT
66
1
10
0.0
20
0.0
30
0.0
70
1
0
VERTEX
5
7E
8
DEFAULT
10
130.0
20
-12.42640687119285
30
0.0
0
VERTEX
5
7F
8
DEFAULT
10
172.4264068711929
20
30.0
30
0.0
0
VERTEX
5
80
8
DEFAULT
10
130.0
20
72.42640687119285
30
0.0
0
VERTEX
5
81
8
DEFAULT
10
87.57359312880715
20
30.0
30
0.0
0
SEQEND
5
82
8
DEFAULT
0
POLYLINE
5
83
8
DEFAULT
66
1
10
0.0
20
0.0
30
0.0
70
1
0
VERTEX
5
84
8
DEFAULT
10
0.0
20
0.0
30
0.0
0
VERTEX
5
85
8
DEFAULT
10
60.0
20
0.0
30
0.0
0
VERTEX
5
86
8
DEFAULT
10
60.0
20
60.0
30
0.0
0
VERTEX
5
87
8
DEFAULT
10
0.0
20
60.0
30
0.0
0
SEQEND
5
88
8
DEFAULT
0
ENDSEC
0
EOF
| {
"pile_set_name": "Github"
} |
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package net
import "errors"
var (
errInvalidInterface = errors.New("invalid network interface")
errInvalidInterfaceIndex = errors.New("invalid network interface index")
errInvalidInterfaceName = errors.New("invalid network interface name")
errNoSuchInterface = errors.New("no such network interface")
errNoSuchMulticastInterface = errors.New("no such multicast network interface")
)
// Interface represents a mapping between network interface name
// and index. It also represents network interface facility
// information.
type Interface struct {
Index int // positive integer that starts at one, zero is never used
MTU int // maximum transmission unit
Name string // e.g., "en0", "lo0", "eth0.100"
HardwareAddr HardwareAddr // IEEE MAC-48, EUI-48 and EUI-64 form
Flags Flags // e.g., FlagUp, FlagLoopback, FlagMulticast
}
type Flags uint
const (
FlagUp Flags = 1 << iota // interface is up
FlagBroadcast // interface supports broadcast access capability
FlagLoopback // interface is a loopback interface
FlagPointToPoint // interface belongs to a point-to-point link
FlagMulticast // interface supports multicast access capability
)
var flagNames = []string{
"up",
"broadcast",
"loopback",
"pointtopoint",
"multicast",
}
func (f Flags) String() string {
s := ""
for i, name := range flagNames {
if f&(1<<uint(i)) != 0 {
if s != "" {
s += "|"
}
s += name
}
}
if s == "" {
s = "0"
}
return s
}
// Addrs returns interface addresses for a specific interface.
func (ifi *Interface) Addrs() ([]Addr, error) {
if ifi == nil {
return nil, errInvalidInterface
}
return interfaceAddrTable(ifi)
}
// MulticastAddrs returns multicast, joined group addresses for
// a specific interface.
func (ifi *Interface) MulticastAddrs() ([]Addr, error) {
if ifi == nil {
return nil, errInvalidInterface
}
return interfaceMulticastAddrTable(ifi)
}
// Interfaces returns a list of the system's network interfaces.
func Interfaces() ([]Interface, error) {
return interfaceTable(0)
}
// InterfaceAddrs returns a list of the system's network interface
// addresses.
func InterfaceAddrs() ([]Addr, error) {
return interfaceAddrTable(nil)
}
// InterfaceByIndex returns the interface specified by index.
func InterfaceByIndex(index int) (*Interface, error) {
if index <= 0 {
return nil, errInvalidInterfaceIndex
}
ift, err := interfaceTable(index)
if err != nil {
return nil, err
}
return interfaceByIndex(ift, index)
}
func interfaceByIndex(ift []Interface, index int) (*Interface, error) {
for _, ifi := range ift {
if index == ifi.Index {
return &ifi, nil
}
}
return nil, errNoSuchInterface
}
// InterfaceByName returns the interface specified by name.
func InterfaceByName(name string) (*Interface, error) {
if name == "" {
return nil, errInvalidInterfaceName
}
ift, err := interfaceTable(0)
if err != nil {
return nil, err
}
for _, ifi := range ift {
if name == ifi.Name {
return &ifi, nil
}
}
return nil, errNoSuchInterface
}
| {
"pile_set_name": "Github"
} |
/*
* websupport.js
* ~~~~~~~~~~~~~
*
* sphinx.websupport utilities for all documentation.
*
* :copyright: Copyright 2007-2018 by the Sphinx team, see AUTHORS.
* :license: BSD, see LICENSE for details.
*
*/
(function($) {
$.fn.autogrow = function() {
return this.each(function() {
var textarea = this;
$.fn.autogrow.resize(textarea);
$(textarea)
.focus(function() {
textarea.interval = setInterval(function() {
$.fn.autogrow.resize(textarea);
}, 500);
})
.blur(function() {
clearInterval(textarea.interval);
});
});
};
$.fn.autogrow.resize = function(textarea) {
var lineHeight = parseInt($(textarea).css('line-height'), 10);
var lines = textarea.value.split('\n');
var columns = textarea.cols;
var lineCount = 0;
$.each(lines, function() {
lineCount += Math.ceil(this.length / columns) || 1;
});
var height = lineHeight * (lineCount + 1);
$(textarea).css('height', height);
};
})(jQuery);
(function($) {
var comp, by;
function init() {
initEvents();
initComparator();
}
function initEvents() {
$(document).on("click", 'a.comment-close', function(event) {
event.preventDefault();
hide($(this).attr('id').substring(2));
});
$(document).on("click", 'a.vote', function(event) {
event.preventDefault();
handleVote($(this));
});
$(document).on("click", 'a.reply', function(event) {
event.preventDefault();
openReply($(this).attr('id').substring(2));
});
$(document).on("click", 'a.close-reply', function(event) {
event.preventDefault();
closeReply($(this).attr('id').substring(2));
});
$(document).on("click", 'a.sort-option', function(event) {
event.preventDefault();
handleReSort($(this));
});
$(document).on("click", 'a.show-proposal', function(event) {
event.preventDefault();
showProposal($(this).attr('id').substring(2));
});
$(document).on("click", 'a.hide-proposal', function(event) {
event.preventDefault();
hideProposal($(this).attr('id').substring(2));
});
$(document).on("click", 'a.show-propose-change', function(event) {
event.preventDefault();
showProposeChange($(this).attr('id').substring(2));
});
$(document).on("click", 'a.hide-propose-change', function(event) {
event.preventDefault();
hideProposeChange($(this).attr('id').substring(2));
});
$(document).on("click", 'a.accept-comment', function(event) {
event.preventDefault();
acceptComment($(this).attr('id').substring(2));
});
$(document).on("click", 'a.delete-comment', function(event) {
event.preventDefault();
deleteComment($(this).attr('id').substring(2));
});
$(document).on("click", 'a.comment-markup', function(event) {
event.preventDefault();
toggleCommentMarkupBox($(this).attr('id').substring(2));
});
}
/**
* Set comp, which is a comparator function used for sorting and
* inserting comments into the list.
*/
function setComparator() {
// If the first three letters are "asc", sort in ascending order
// and remove the prefix.
if (by.substring(0,3) == 'asc') {
var i = by.substring(3);
comp = function(a, b) { return a[i] - b[i]; };
} else {
// Otherwise sort in descending order.
comp = function(a, b) { return b[by] - a[by]; };
}
// Reset link styles and format the selected sort option.
$('a.sel').attr('href', '#').removeClass('sel');
$('a.by' + by).removeAttr('href').addClass('sel');
}
/**
* Create a comp function. If the user has preferences stored in
* the sortBy cookie, use those, otherwise use the default.
*/
function initComparator() {
by = 'rating'; // Default to sort by rating.
// If the sortBy cookie is set, use that instead.
if (document.cookie.length > 0) {
var start = document.cookie.indexOf('sortBy=');
if (start != -1) {
start = start + 7;
var end = document.cookie.indexOf(";", start);
if (end == -1) {
end = document.cookie.length;
by = unescape(document.cookie.substring(start, end));
}
}
}
setComparator();
}
/**
* Show a comment div.
*/
function show(id) {
$('#ao' + id).hide();
$('#ah' + id).show();
var context = $.extend({id: id}, opts);
var popup = $(renderTemplate(popupTemplate, context)).hide();
popup.find('textarea[name="proposal"]').hide();
popup.find('a.by' + by).addClass('sel');
var form = popup.find('#cf' + id);
form.submit(function(event) {
event.preventDefault();
addComment(form);
});
$('#s' + id).after(popup);
popup.slideDown('fast', function() {
getComments(id);
});
}
/**
* Hide a comment div.
*/
function hide(id) {
$('#ah' + id).hide();
$('#ao' + id).show();
var div = $('#sc' + id);
div.slideUp('fast', function() {
div.remove();
});
}
/**
* Perform an ajax request to get comments for a node
* and insert the comments into the comments tree.
*/
function getComments(id) {
$.ajax({
type: 'GET',
url: opts.getCommentsURL,
data: {node: id},
success: function(data, textStatus, request) {
var ul = $('#cl' + id);
var speed = 100;
$('#cf' + id)
.find('textarea[name="proposal"]')
.data('source', data.source);
if (data.comments.length === 0) {
ul.html('<li>No comments yet.</li>');
ul.data('empty', true);
} else {
// If there are comments, sort them and put them in the list.
var comments = sortComments(data.comments);
speed = data.comments.length * 100;
appendComments(comments, ul);
ul.data('empty', false);
}
$('#cn' + id).slideUp(speed + 200);
ul.slideDown(speed);
},
error: function(request, textStatus, error) {
showError('Oops, there was a problem retrieving the comments.');
},
dataType: 'json'
});
}
/**
* Add a comment via ajax and insert the comment into the comment tree.
*/
function addComment(form) {
var node_id = form.find('input[name="node"]').val();
var parent_id = form.find('input[name="parent"]').val();
var text = form.find('textarea[name="comment"]').val();
var proposal = form.find('textarea[name="proposal"]').val();
if (text == '') {
showError('Please enter a comment.');
return;
}
// Disable the form that is being submitted.
form.find('textarea,input').attr('disabled', 'disabled');
// Send the comment to the server.
$.ajax({
type: "POST",
url: opts.addCommentURL,
dataType: 'json',
data: {
node: node_id,
parent: parent_id,
text: text,
proposal: proposal
},
success: function(data, textStatus, error) {
// Reset the form.
if (node_id) {
hideProposeChange(node_id);
}
form.find('textarea')
.val('')
.add(form.find('input'))
.removeAttr('disabled');
var ul = $('#cl' + (node_id || parent_id));
if (ul.data('empty')) {
$(ul).empty();
ul.data('empty', false);
}
insertComment(data.comment);
var ao = $('#ao' + node_id);
ao.find('img').attr({'src': opts.commentBrightImage});
if (node_id) {
// if this was a "root" comment, remove the commenting box
// (the user can get it back by reopening the comment popup)
$('#ca' + node_id).slideUp();
}
},
error: function(request, textStatus, error) {
form.find('textarea,input').removeAttr('disabled');
showError('Oops, there was a problem adding the comment.');
}
});
}
/**
* Recursively append comments to the main comment list and children
* lists, creating the comment tree.
*/
function appendComments(comments, ul) {
$.each(comments, function() {
var div = createCommentDiv(this);
ul.append($(document.createElement('li')).html(div));
appendComments(this.children, div.find('ul.comment-children'));
// To avoid stagnating data, don't store the comments children in data.
this.children = null;
div.data('comment', this);
});
}
/**
* After adding a new comment, it must be inserted in the correct
* location in the comment tree.
*/
function insertComment(comment) {
var div = createCommentDiv(comment);
// To avoid stagnating data, don't store the comments children in data.
comment.children = null;
div.data('comment', comment);
var ul = $('#cl' + (comment.node || comment.parent));
var siblings = getChildren(ul);
var li = $(document.createElement('li'));
li.hide();
// Determine where in the parents children list to insert this comment.
for(var i=0; i < siblings.length; i++) {
if (comp(comment, siblings[i]) <= 0) {
$('#cd' + siblings[i].id)
.parent()
.before(li.html(div));
li.slideDown('fast');
return;
}
}
// If we get here, this comment rates lower than all the others,
// or it is the only comment in the list.
ul.append(li.html(div));
li.slideDown('fast');
}
function acceptComment(id) {
$.ajax({
type: 'POST',
url: opts.acceptCommentURL,
data: {id: id},
success: function(data, textStatus, request) {
$('#cm' + id).fadeOut('fast');
$('#cd' + id).removeClass('moderate');
},
error: function(request, textStatus, error) {
showError('Oops, there was a problem accepting the comment.');
}
});
}
function deleteComment(id) {
$.ajax({
type: 'POST',
url: opts.deleteCommentURL,
data: {id: id},
success: function(data, textStatus, request) {
var div = $('#cd' + id);
if (data == 'delete') {
// Moderator mode: remove the comment and all children immediately
div.slideUp('fast', function() {
div.remove();
});
return;
}
// User mode: only mark the comment as deleted
div
.find('span.user-id:first')
.text('[deleted]').end()
.find('div.comment-text:first')
.text('[deleted]').end()
.find('#cm' + id + ', #dc' + id + ', #ac' + id + ', #rc' + id +
', #sp' + id + ', #hp' + id + ', #cr' + id + ', #rl' + id)
.remove();
var comment = div.data('comment');
comment.username = '[deleted]';
comment.text = '[deleted]';
div.data('comment', comment);
},
error: function(request, textStatus, error) {
showError('Oops, there was a problem deleting the comment.');
}
});
}
function showProposal(id) {
$('#sp' + id).hide();
$('#hp' + id).show();
$('#pr' + id).slideDown('fast');
}
function hideProposal(id) {
$('#hp' + id).hide();
$('#sp' + id).show();
$('#pr' + id).slideUp('fast');
}
function showProposeChange(id) {
$('#pc' + id).hide();
$('#hc' + id).show();
var textarea = $('#pt' + id);
textarea.val(textarea.data('source'));
$.fn.autogrow.resize(textarea[0]);
textarea.slideDown('fast');
}
function hideProposeChange(id) {
$('#hc' + id).hide();
$('#pc' + id).show();
var textarea = $('#pt' + id);
textarea.val('').removeAttr('disabled');
textarea.slideUp('fast');
}
function toggleCommentMarkupBox(id) {
$('#mb' + id).toggle();
}
/** Handle when the user clicks on a sort by link. */
function handleReSort(link) {
var classes = link.attr('class').split(/\s+/);
for (var i=0; i<classes.length; i++) {
if (classes[i] != 'sort-option') {
by = classes[i].substring(2);
}
}
setComparator();
// Save/update the sortBy cookie.
var expiration = new Date();
expiration.setDate(expiration.getDate() + 365);
document.cookie= 'sortBy=' + escape(by) +
';expires=' + expiration.toUTCString();
$('ul.comment-ul').each(function(index, ul) {
var comments = getChildren($(ul), true);
comments = sortComments(comments);
appendComments(comments, $(ul).empty());
});
}
/**
* Function to process a vote when a user clicks an arrow.
*/
function handleVote(link) {
if (!opts.voting) {
showError("You'll need to login to vote.");
return;
}
var id = link.attr('id');
if (!id) {
// Didn't click on one of the voting arrows.
return;
}
// If it is an unvote, the new vote value is 0,
// Otherwise it's 1 for an upvote, or -1 for a downvote.
var value = 0;
if (id.charAt(1) != 'u') {
value = id.charAt(0) == 'u' ? 1 : -1;
}
// The data to be sent to the server.
var d = {
comment_id: id.substring(2),
value: value
};
// Swap the vote and unvote links.
link.hide();
$('#' + id.charAt(0) + (id.charAt(1) == 'u' ? 'v' : 'u') + d.comment_id)
.show();
// The div the comment is displayed in.
var div = $('div#cd' + d.comment_id);
var data = div.data('comment');
// If this is not an unvote, and the other vote arrow has
// already been pressed, unpress it.
if ((d.value !== 0) && (data.vote === d.value * -1)) {
$('#' + (d.value == 1 ? 'd' : 'u') + 'u' + d.comment_id).hide();
$('#' + (d.value == 1 ? 'd' : 'u') + 'v' + d.comment_id).show();
}
// Update the comments rating in the local data.
data.rating += (data.vote === 0) ? d.value : (d.value - data.vote);
data.vote = d.value;
div.data('comment', data);
// Change the rating text.
div.find('.rating:first')
.text(data.rating + ' point' + (data.rating == 1 ? '' : 's'));
// Send the vote information to the server.
$.ajax({
type: "POST",
url: opts.processVoteURL,
data: d,
error: function(request, textStatus, error) {
showError('Oops, there was a problem casting that vote.');
}
});
}
/**
* Open a reply form used to reply to an existing comment.
*/
function openReply(id) {
// Swap out the reply link for the hide link
$('#rl' + id).hide();
$('#cr' + id).show();
// Add the reply li to the children ul.
var div = $(renderTemplate(replyTemplate, {id: id})).hide();
$('#cl' + id)
.prepend(div)
// Setup the submit handler for the reply form.
.find('#rf' + id)
.submit(function(event) {
event.preventDefault();
addComment($('#rf' + id));
closeReply(id);
})
.find('input[type=button]')
.click(function() {
closeReply(id);
});
div.slideDown('fast', function() {
$('#rf' + id).find('textarea').focus();
});
}
/**
* Close the reply form opened with openReply.
*/
function closeReply(id) {
// Remove the reply div from the DOM.
$('#rd' + id).slideUp('fast', function() {
$(this).remove();
});
// Swap out the hide link for the reply link
$('#cr' + id).hide();
$('#rl' + id).show();
}
/**
* Recursively sort a tree of comments using the comp comparator.
*/
function sortComments(comments) {
comments.sort(comp);
$.each(comments, function() {
this.children = sortComments(this.children);
});
return comments;
}
/**
* Get the children comments from a ul. If recursive is true,
* recursively include childrens' children.
*/
function getChildren(ul, recursive) {
var children = [];
ul.children().children("[id^='cd']")
.each(function() {
var comment = $(this).data('comment');
if (recursive)
comment.children = getChildren($(this).find('#cl' + comment.id), true);
children.push(comment);
});
return children;
}
/** Create a div to display a comment in. */
function createCommentDiv(comment) {
if (!comment.displayed && !opts.moderator) {
return $('<div class="moderate">Thank you! Your comment will show up '
+ 'once it is has been approved by a moderator.</div>');
}
// Prettify the comment rating.
comment.pretty_rating = comment.rating + ' point' +
(comment.rating == 1 ? '' : 's');
// Make a class (for displaying not yet moderated comments differently)
comment.css_class = comment.displayed ? '' : ' moderate';
// Create a div for this comment.
var context = $.extend({}, opts, comment);
var div = $(renderTemplate(commentTemplate, context));
// If the user has voted on this comment, highlight the correct arrow.
if (comment.vote) {
var direction = (comment.vote == 1) ? 'u' : 'd';
div.find('#' + direction + 'v' + comment.id).hide();
div.find('#' + direction + 'u' + comment.id).show();
}
if (opts.moderator || comment.text != '[deleted]') {
div.find('a.reply').show();
if (comment.proposal_diff)
div.find('#sp' + comment.id).show();
if (opts.moderator && !comment.displayed)
div.find('#cm' + comment.id).show();
if (opts.moderator || (opts.username == comment.username))
div.find('#dc' + comment.id).show();
}
return div;
}
/**
* A simple template renderer. Placeholders such as <%id%> are replaced
* by context['id'] with items being escaped. Placeholders such as <#id#>
* are not escaped.
*/
function renderTemplate(template, context) {
var esc = $(document.createElement('div'));
function handle(ph, escape) {
var cur = context;
$.each(ph.split('.'), function() {
cur = cur[this];
});
return escape ? esc.text(cur || "").html() : cur;
}
return template.replace(/<([%#])([\w\.]*)\1>/g, function() {
return handle(arguments[2], arguments[1] == '%' ? true : false);
});
}
/** Flash an error message briefly. */
function showError(message) {
$(document.createElement('div')).attr({'class': 'popup-error'})
.append($(document.createElement('div'))
.attr({'class': 'error-message'}).text(message))
.appendTo('body')
.fadeIn("slow")
.delay(2000)
.fadeOut("slow");
}
/** Add a link the user uses to open the comments popup. */
$.fn.comment = function() {
return this.each(function() {
var id = $(this).attr('id').substring(1);
var count = COMMENT_METADATA[id];
var title = count + ' comment' + (count == 1 ? '' : 's');
var image = count > 0 ? opts.commentBrightImage : opts.commentImage;
var addcls = count == 0 ? ' nocomment' : '';
$(this)
.append(
$(document.createElement('a')).attr({
href: '#',
'class': 'sphinx-comment-open' + addcls,
id: 'ao' + id
})
.append($(document.createElement('img')).attr({
src: image,
alt: 'comment',
title: title
}))
.click(function(event) {
event.preventDefault();
show($(this).attr('id').substring(2));
})
)
.append(
$(document.createElement('a')).attr({
href: '#',
'class': 'sphinx-comment-close hidden',
id: 'ah' + id
})
.append($(document.createElement('img')).attr({
src: opts.closeCommentImage,
alt: 'close',
title: 'close'
}))
.click(function(event) {
event.preventDefault();
hide($(this).attr('id').substring(2));
})
);
});
};
var opts = {
processVoteURL: '/_process_vote',
addCommentURL: '/_add_comment',
getCommentsURL: '/_get_comments',
acceptCommentURL: '/_accept_comment',
deleteCommentURL: '/_delete_comment',
commentImage: '/static/_static/comment.png',
closeCommentImage: '/static/_static/comment-close.png',
loadingImage: '/static/_static/ajax-loader.gif',
commentBrightImage: '/static/_static/comment-bright.png',
upArrow: '/static/_static/up.png',
downArrow: '/static/_static/down.png',
upArrowPressed: '/static/_static/up-pressed.png',
downArrowPressed: '/static/_static/down-pressed.png',
voting: false,
moderator: false
};
if (typeof COMMENT_OPTIONS != "undefined") {
opts = jQuery.extend(opts, COMMENT_OPTIONS);
}
var popupTemplate = '\
<div class="sphinx-comments" id="sc<%id%>">\
<p class="sort-options">\
Sort by:\
<a href="#" class="sort-option byrating">best rated</a>\
<a href="#" class="sort-option byascage">newest</a>\
<a href="#" class="sort-option byage">oldest</a>\
</p>\
<div class="comment-header">Comments</div>\
<div class="comment-loading" id="cn<%id%>">\
loading comments... <img src="<%loadingImage%>" alt="" /></div>\
<ul id="cl<%id%>" class="comment-ul"></ul>\
<div id="ca<%id%>">\
<p class="add-a-comment">Add a comment\
(<a href="#" class="comment-markup" id="ab<%id%>">markup</a>):</p>\
<div class="comment-markup-box" id="mb<%id%>">\
reStructured text markup: <i>*emph*</i>, <b>**strong**</b>, \
<code>``code``</code>, \
code blocks: <code>::</code> and an indented block after blank line</div>\
<form method="post" id="cf<%id%>" class="comment-form" action="">\
<textarea name="comment" cols="80"></textarea>\
<p class="propose-button">\
<a href="#" id="pc<%id%>" class="show-propose-change">\
Propose a change ▹\
</a>\
<a href="#" id="hc<%id%>" class="hide-propose-change">\
Propose a change ▿\
</a>\
</p>\
<textarea name="proposal" id="pt<%id%>" cols="80"\
spellcheck="false"></textarea>\
<input type="submit" value="Add comment" />\
<input type="hidden" name="node" value="<%id%>" />\
<input type="hidden" name="parent" value="" />\
</form>\
</div>\
</div>';
var commentTemplate = '\
<div id="cd<%id%>" class="sphinx-comment<%css_class%>">\
<div class="vote">\
<div class="arrow">\
<a href="#" id="uv<%id%>" class="vote" title="vote up">\
<img src="<%upArrow%>" />\
</a>\
<a href="#" id="uu<%id%>" class="un vote" title="vote up">\
<img src="<%upArrowPressed%>" />\
</a>\
</div>\
<div class="arrow">\
<a href="#" id="dv<%id%>" class="vote" title="vote down">\
<img src="<%downArrow%>" id="da<%id%>" />\
</a>\
<a href="#" id="du<%id%>" class="un vote" title="vote down">\
<img src="<%downArrowPressed%>" />\
</a>\
</div>\
</div>\
<div class="comment-content">\
<p class="tagline comment">\
<span class="user-id"><%username%></span>\
<span class="rating"><%pretty_rating%></span>\
<span class="delta"><%time.delta%></span>\
</p>\
<div class="comment-text comment"><#text#></div>\
<p class="comment-opts comment">\
<a href="#" class="reply hidden" id="rl<%id%>">reply ▹</a>\
<a href="#" class="close-reply" id="cr<%id%>">reply ▿</a>\
<a href="#" id="sp<%id%>" class="show-proposal">proposal ▹</a>\
<a href="#" id="hp<%id%>" class="hide-proposal">proposal ▿</a>\
<a href="#" id="dc<%id%>" class="delete-comment hidden">delete</a>\
<span id="cm<%id%>" class="moderation hidden">\
<a href="#" id="ac<%id%>" class="accept-comment">accept</a>\
</span>\
</p>\
<pre class="proposal" id="pr<%id%>">\
<#proposal_diff#>\
</pre>\
<ul class="comment-children" id="cl<%id%>"></ul>\
</div>\
<div class="clearleft"></div>\
</div>\
</div>';
var replyTemplate = '\
<li>\
<div class="reply-div" id="rd<%id%>">\
<form id="rf<%id%>">\
<textarea name="comment" cols="80"></textarea>\
<input type="submit" value="Add reply" />\
<input type="button" value="Cancel" />\
<input type="hidden" name="parent" value="<%id%>" />\
<input type="hidden" name="node" value="" />\
</form>\
</div>\
</li>';
$(document).ready(function() {
init();
});
})(jQuery);
$(document).ready(function() {
// add comment anchors for all paragraphs that are commentable
$('.sphinx-has-comment').comment();
// highlight search words in search results
$("div.context").each(function() {
var params = $.getQueryParameters();
var terms = (params.q) ? params.q[0].split(/\s+/) : [];
var result = $(this);
$.each(terms, function() {
result.highlightText(this.toLowerCase(), 'highlighted');
});
});
// directly open comment window if requested
var anchor = document.location.hash;
if (anchor.substring(0, 9) == '#comment-') {
$('#ao' + anchor.substring(9)).click();
document.location.hash = '#s' + anchor.substring(9);
}
});
| {
"pile_set_name": "Github"
} |
@model Kooboo.CMS.Sites.Models.FileResource
@{
ViewBag.Title = "Create file";
Layout = "~/Views/Shared/Blank.cshtml";
}
@section Panel{
<ul class="panel">
<li>
<a data-ajaxform="">
@Html.IconImage("save") @("Save".Localize())</a>
</li>
@foreach (var group in Kooboo.CMS.Sites.Extension.UI.TopToolbar.ToolbarButtons.GetToolbarButtons(ViewContext.RequestContext))
{
@Html.Partial("_ToolbarGroupButtons", group)
}
<li>
<a href="@ViewContext.RequestContext.GetRequestValue("return")">
@Html.IconImage("cancel") @("Back".Localize())</a>
</li>
</ul>
}
<div class="common-form">
<h1 class="title">@ViewBag.Title</h1>
@using (Html.BeginForm())
{
@Html.ValidationSummary(true)
@Html.Hidden("folderPath", ViewContext.RequestContext.GetRequestValue("folderPath"))
@Html.Hidden("type", ViewContext.RequestContext.GetRequestValue("type"))
@Html.Hidden("SiteName", ViewContext.RequestContext.GetRequestValue("SiteName"))
<table>
<tbody>
@Html.EditorFor(m => m.Name)
@Html.EditorFor(m => m.FileExtension)
</tbody>
</table>
<table>
<tbody>
@Html.EditorFor(m => m.Body)
</tbody>
</table>
}
</div>
| {
"pile_set_name": "Github"
} |
import de.fayard.versions.setupVersionPlaceholdersResolving
buildscript {
repositories {
gradlePluginPortal()
mavenCentral()
}
// Didn't find a way to use classpath from composite build, so we are hardcoding a
// version expected to be released in one of the configured repositories.
dependencies.classpath("de.fayard:refreshVersions:0.8.3")
}
pluginManagement {
plugins {
id("com.gradle.enterprise").version("3.0")
}
repositories {
gradlePluginPortal()
mavenLocal()
mavenCentral()
maven("https://dl.bintray.com/kotlin/kotlin-eap")
}
}
setupVersionPlaceholdersResolving()
rootProject.name = "sample-bleeding-edge"
| {
"pile_set_name": "Github"
} |
/*
* This file is part of RskJ
* Copyright (C) 2018 RSK Labs Ltd.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package co.rsk.jsonrpc;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Test;
import java.io.IOException;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.*;
public class JsonRpcResultResponseTest {
private ObjectMapper serializer = new ObjectMapper();
@Test
public void serializeResponseWithResult() throws IOException {
String message = "{\"jsonrpc\":\"2.0\",\"id\":48,\"result\":true}";
assertThat(
serializer.writeValueAsString(
new JsonRpcResultResponse(48, new JsonRpcBooleanResult(true))
),
is(message)
);
}
} | {
"pile_set_name": "Github"
} |
/****************************************************************************
**
** Copyright (C) 2016 Klaralvdalens Datakonsult AB (KDAB).
** Contact: https://www.qt.io/licensing/
**
** This file is part of the Qt3D module of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see https://www.qt.io/terms-conditions. For further
** information use the contact form at https://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 3 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL3 included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 3 requirements
** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 2.0 or (at your option) the GNU General
** Public license version 3 or any later version approved by the KDE Free
** Qt Foundation. The licenses are as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
** included in the packaging of this file. Please review the following
** information to ensure the GNU General Public License requirements will
** be met: https://www.gnu.org/licenses/gpl-2.0.html and
** https://www.gnu.org/licenses/gpl-3.0.html.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QT3DRENDER_QPAINTEDTEXTURE_P_H
#define QT3DRENDER_QPAINTEDTEXTURE_P_H
//
// W A R N I N G
// -------------
//
// This file is not part of the Qt API. It exists for the convenience
// of other Qt classes. This header file may change from version to
// version without notice, or even be removed.
//
// We mean it.
//
#include <Qt3DRender/private/qabstracttextureimage_p.h>
#include <Qt3DRender/qtextureimagedatagenerator.h>
#include <Qt3DRender/qpaintedtextureimage.h>
#include <Qt3DRender/private/qt3drender_global_p.h>
QT_BEGIN_NAMESPACE
class QImage;
class QPainter;
namespace Qt3DRender {
class Q_3DRENDERSHARED_PRIVATE_EXPORT QPaintedTextureImagePrivate : public QAbstractTextureImagePrivate
{
public:
QPaintedTextureImagePrivate();
~QPaintedTextureImagePrivate();
Q_DECLARE_PUBLIC(QPaintedTextureImage)
QSize m_imageSize;
qreal m_devicePixelRatio;
QScopedPointer<QImage> m_image;
QTextureImageDataGeneratorPtr m_currentGenerator;
// gets increased each time the image is re-painted.
// used to distinguish between different generators
quint64 m_generation;
void repaint();
};
class QPaintedTextureImageDataGenerator : public QTextureImageDataGenerator
{
public:
QPaintedTextureImageDataGenerator(const QImage &image, int gen, Qt3DCore::QNodeId texId);
~QPaintedTextureImageDataGenerator();
// Will be executed from within a QAspectJob
QTextureImageDataPtr operator ()() final;
bool operator ==(const QTextureImageDataGenerator &other) const final;
QT3D_FUNCTOR(QPaintedTextureImageDataGenerator)
private:
QImage m_image;
quint64 m_generation;
Qt3DCore::QNodeId m_paintedTextureImageId;
};
} // namespace Qt3DRender
QT_END_NAMESPACE
#endif // QT3DRENDER_QPAINTEDTEXTURE_P_H
| {
"pile_set_name": "Github"
} |
**Senate Calendar API**
=======================
.. note:: Assembly calendar data is currently not sent to us at this time. chamber must be 'senate'.
Get a single calendar
---------------------
**Usage**
::
Full calendar:
/api/3/calendars/{year}/{calendarNumber}
Floor calendar:
/api/3/calendars/{year}/{calendarNumber}/floor
Supplemental calendar:
/api/3/calendars/{year}/{calendarNumber}/{versionCharacter}
Active list:
/api/3/calendars/{year}/{calendarNumber}/{sequenceNumber}
**Optional Params**
+-----------+---------+---------------------------------------------------------------------------------------------+
| Parameter | Values | Description |
+===========+=========+=============================================================================================+
| full | boolean | (default true) Set to true to get a full calendar response instead of a summary. |
+-----------+---------+---------------------------------------------------------------------------------------------+
**Examples**
::
/api/3/calendars/2014/54 (Get calendar 54 of 2014)
/api/3/calendars/2014/54?&full=false (Get a summary of calendar 54)
/api/3/calendars/2014/54/0 (Get the base active list for calendar 54)
/api/3/calendars/2014/54/floor (Get the floor calendar for calendar 54)
/api/3/calendars/2014/54/B (Get supplemental calendar B of calendar 54)
**Sample Responses**
Full calendar:
.. code-block:: javascript
{
"success": true,
"message": "",
"responseType": "calendar",
"result": {
"year": 2014, // Year the calendar was published
"calendarNumber": 54, // Incremental identifier for calendars within a year
"floorCalendar": {...}, // See supplemental/floor calendar response result
"supplementalCalendars": {
"items": {...}, // Map of supplemental version characters to
// supplemental calendar response results
"size": 2
},
"activeLists": {
"items": {...}, // Map of sequence numbers to active list response results
"size": 3
},
"calDate": "2014-06-20" // The date this calendar was active for
}
}
Supplemental/Floor calendar:
.. code-block:: javascript
{
"success": true,
"message": "",
"responseType": "calendar-floor", // "calendar-supplemental" if the response is a supplemental
"result": {
"year": 2014, // The year the calendar was released
"calendarNumber": 54, // Incremental identifier for calendars within a year
"version": "floor", // The supplemental version, "floor" or
// a single capital character
"calDate": "2014-06-20",
"releaseDateTime": "2014-06-20T02:01", // The date this supplemental was released
"entriesBySection": { // A listing of bills mapped to their floor status
"items": {
"THIRD_READING": { // List of bills on their third reading
"items": [
{ // Modified bill response (link below)
"basePrintNo": "A5625",
"session": 2013,
"printNo": "A5625A",
"billType": {
"chamber": "ASSEMBLY",
"desc": "Assembly",
"resolution": false
},
"title": "Extends the expiration of the New York state French and Indian war 250th anniversary commemoration commission until December 31, 2015",
"activeVersion": "A",
"year": 2013,
"publishedDateTime": "2013-03-04T14:32:46",
"substitutedBy": null,
"sponsor": {
"member": {
"memberId": 466,
"shortName": "ENGLEBRIGHT",
"sessionYear": 2013,
"fullName": "Steven Englebright",
"districtCode": 4
},
"budget": false,
"rules": false
},
"billCalNo": 1090, // The calendar number that ids this bill
// within all calendars
"sectionType": "THIRD_READING", // The floor status of this bill
"subBillInfo": { // Bill info response for a substituted bill
"basePrintNo": "S7605",
"session": 2013,
"printNo": "S7605",
"billType": {
"chamber": "SENATE",
"desc": "Senate",
"resolution": false
},
"title": "Extends the expiration of the New York state French and Indian war 250th anniversary commemoration commission until December 31, 2015",
"activeVersion": "",
"year": 2014,
"publishedDateTime": "2014-05-15T18:17:31",
"substitutedBy": null,
"sponsor": {
"member": {
"memberId": 385,
"shortName": "ESPAILLAT",
"sessionYear": 2013,
"fullName": "Adriano Espaillat",
"districtCode": 31
},
"budget": false,
"rules": false
}
},
"billHigh": false // Set to true if this is a high priority bill
},
...
],
"size": 284
},
"STARRED_ON_THIRD_READING": { // Another floor status. All statuses include:
// ORDER_OF_THE_FIRST_REPORT, ORDER_OF_THE_SECOND_REPORT,
// ORDER_OF_THE_SPECIAL_REPORT, THIRD_READING,
// THIRD_READING_FROM_SPECIAL_REPORT,
// STARRED_ON_THIRD_READING
"items": [...],
"size": 3
}
},
"size": 2
}
}
}
Active List:
.. code-block:: javascript
{
"success": true,
"message": "",
"responseType": "calendar-activelist",
"result": {
"year": 2014, // The year the calendar was released
"calendarNumber": 54, // Incremental identifier for calendars within a year
"sequenceNumber": 0, // Indicates publish sequence of active lists
"calDate": "2014-06-20", // The date this calendar was active
"releaseDateTime": "2014-06-20T04:28:48", // The date and time this active list was released
"notes": null, // Notes regarding the active list, pretty much always null
"entries": { // List of bills on this active list
"items": [
{ // Modified bill response (see above link)
"basePrintNo": "S4779",
"session": 2013,
"printNo": "S4779B",
"billType": {
"chamber": "SENATE",
"desc": "Senate",
"resolution": false
},
"title": "Relates to inheritance by children conceived after the death of a genetic parent",
"activeVersion": "B",
"year": 2013,
"publishedDateTime": "2013-04-23T15:04:37",
"substitutedBy": {
"basePrintNo": "A7461",
"session": 2013
},
"sponsor": {
"member": {
"memberId": 413,
"shortName": "BONACIC",
"sessionYear": 2013,
"fullName": "John J. Bonacic",
"districtCode": 42
},
"budget": false,
"rules": false
},
"billCalNo": 192 // The calendar number that ids this bill
// within all calendars
},
...
],
"size": 31
}
}
}
Get a listing of calendars
--------------------------
**Usage**
::
Full calendars:
/api/3/calendars/{year}
Supplemental/Floor calendars:
/api/3/calendars/{year}/supplemental
Active lists:
/api/3/calendars/{year}/activelist
.. _cal-list-params:
**Optional Params**
+-----------+---------+---------------------------------------------------------------------------------------------+
| Parameter | Values | Description |
+===========+=========+=============================================================================================+
| full | boolean | (default false) Set to true to see the full calendar responses instead of the summaries. |
+-----------+---------+---------------------------------------------------------------------------------------------+
| limit | string | (default 100) Limit the number of results |
+-----------+---------+---------------------------------------------------------------------------------------------+
| offset | number | (default 1) Start results from offset |
+-----------+---------+---------------------------------------------------------------------------------------------+
| order | string | (default 'ASC') Determines the order the calendar responses. Responses are ordered by |
| | | calendar number and then either sequenceNumber or version if they are active lists or |
| | | supplementals respectively. |
+-----------+---------+---------------------------------------------------------------------------------------------+
**Examples**
::
/api/3/calendars/2014?full=true (Get all calendar data from 2014)
/api/3/calendars/2014?limit=1&order=DESC (Get the latest calendar from 2014)
/api/3/calendars/2014/activelist?limit=5 (Get the first 5 active lists of 2014)
/api/3/calendars/2014/supplemental?limit=5&offset=5 (Get the second 5 supplementals of 2014)
Search for calendars
--------------------
Read our :doc:`search API docs<search_api>` for info on how to construct search terms.
The calendar search index is comprised of full calendar responses
(i.e. the json response returned when requesting a single calendar) so query and sort strings will be based on that response
structure.
**Usage**
Search across all session years
::
(GET) /api/3/calendars/search?term=YOUR_TERM
Search within a year
::
(GET) /api/3/calendars/{year}/search?term=YOUR_TERM
**Required Params**
+-----------+--------------------+--------------------------------------------------------+
| Parameter | Values | Description |
+===========+====================+========================================================+
| term | string | :ref:`ElasticSearch query string<search-term>` |
+-----------+--------------------+--------------------------------------------------------+
**Optional Params**
+--------------+--------------------+---------------------------------------------------------------------------------+
| Parameter | Values | Description |
+==============+====================+=================================================================================+
| sort | string | :ref:`ElasticSearch sort string<search-sort>` |
+--------------+--------------------+---------------------------------------------------------------------------------+
Also takes all :ref:`calendar listing optional params<cal-list-params>` with the exception of order
Get calendar updates
--------------------
To identify which calendars have received updates within a given time period you can use the calendar updates api.
**Usage**
List of calendars updated within the past seven days
::
(GET) /api/3/calendars/updates
List of calendars updated after the given date/time
::
(GET) /api/3/calendars/updates/{fromDateTime}
List of calendars updated during the given date/time range
::
(GET) /api/3/calendars/updates/{fromDateTime}/{toDateTime}
.. note:: The 'fromDateTime' and 'toDateTime' parameters should be formatted as the ISO 8601 Date Time format.
For example December 10, 2014, 1:30:02 PM should be inputted as 2014-12-10T13:30:02.
The fromDateTime and toDateTime range is exclusive/inclusive respectively.
**Optional Params**
+-----------+--------------------+--------------------------------------------------------+
| Parameter | Values | Description |
+===========+====================+========================================================+
| detail | boolean | Set to true to see `detailed update digests`_ |
+-----------+--------------------+--------------------------------------------------------+
| order | string (asc|desc) | Order the results by update date/time |
+-----------+--------------------+--------------------------------------------------------+
**Examples**
Calendars that were updated between January 1st and January 20th of 2019
::
(GET) /api/3/calendars/updates/2019-01-01T00:00:00/2019-01-20T00:00:00
.. _calendar-update-token-response:
**Response (detail=false)**
.. code-block:: javascript
{
"success" : true,
"message" : "",
"responseType" : "update-token list",
"total" : 4,
"offsetStart" : 1,
"offsetEnd" : 4,
"limit" : 100,
"result" : {
"items" : [
{
"id" : { // The year and calendar number of the updated calendar
"year" : 2019,
"calendarNumber" : 1
},
// The id of the reference that triggered the update
"sourceId" : "2019-01-14-15.55.39.563595_SENCAL_00001.XML-1-CALENDAR",
// The publish date time of the reference source
"sourceDateTime" : "2019-01-14T15:55:39.563595",
// The date and time that the reference was processed
"processedDateTime" : "2019-01-14T16:01:20.389704"
},
... (truncated)
],
"size" : 2
}
}
Get specific calendar updates
-----------------------------
**Usage**
Get updates for a calendar within a datetime range
::
(GET) /api/3/calendars/{year}/{calendarNumber}/updates/{fromDateTime}/{toDateTime}
Get all updates for a calendar
::
(GET) /api/3/calendars/{year}/{calendarNumber}/updates
**Optional Params**
+-----------+----------------------+---------------------------------------------------------------+
| Parameter | Values | Description |
+===========+======================+===============================================================+
| type |(processed|published) | The type of bill update (see below for explanation) |
+-----------+----------------------+---------------------------------------------------------------+
| order | string (asc|desc) | Order the results by update date/time |
+-----------+----------------------+---------------------------------------------------------------+
| limit | string | (default 100) Limit the number of results |
+-----------+----------------------+---------------------------------------------------------------+
| offset | number | (default 1) Start results from offset |
+-----------+----------------------+---------------------------------------------------------------+
**Examples**
Get updates for calendar 54 of 2014 that occurred between 9 AM and 5 PM on June 20th, 2014
::
(GET) /api/3/calendars/2014/54/updates/2014-06-20T09:00:00/2014-06-20T17:00:00
.. _calendar-update-digest-response:
**Response (type=published)**
.. _`detailed update digests`:
.. code-block:: javascript
{
"success" : true,
"message" : "",
"responseType" : "update-digest list",
"total" : 3,
"offsetStart" : 1,
"offsetEnd" : 3,
"limit" : 0,
"result" : {
"items" : [
{
"id" : {
"year" : 2014,
"calendarNumber" : 54
},
"sourceId" : "SOBI.D140620.T153915.TXT-1-CALENDAR",
"sourceDateTime" : "2014-06-20T15:39:15",
"processedDateTime" : "2014-12-15T15:21:34.786472",
"action" : "INSERT", // The update action that was performed
"scope" : "Calendar Active List", // The type of sub calendar that was updated
"fields" : { // Updated fields
"publishedDateTime" : "2014-06-20 05:28:51",
"notes" : "",
"sequenceNo" : "0",
"createdDateTime" : "2014-12-15 15:21:34.786472",
"id" : "302",
"calendarDate" : "2014-06-20",
"releaseDateTime" : "2014-06-20 04:28:48"
}
},
... (truncated)
],
"size" : 3
}
}
.. warning:: By default the type is set to 'processed'. As we reprocess our data periodically, it's possible this specific api call may not produce the result shown. However, the response you receive will follow the format in the example | {
"pile_set_name": "Github"
} |
<?php
namespace Illuminate\Queue\Failed;
class NullFailedJobProvider implements FailedJobProviderInterface
{
/**
* Log a failed job into storage.
*
* @param string $connection
* @param string $queue
* @param string $payload
* @param \Exception $exception
* @return int|null
*/
public function log($connection, $queue, $payload, $exception)
{
//
}
/**
* Get a list of all of the failed jobs.
*
* @return array
*/
public function all()
{
return [];
}
/**
* Get a single failed job.
*
* @param mixed $id
* @return object|null
*/
public function find($id)
{
//
}
/**
* Delete a single failed job from storage.
*
* @param mixed $id
* @return bool
*/
public function forget($id)
{
return true;
}
/**
* Flush all of the failed jobs from storage.
*
* @return void
*/
public function flush()
{
//
}
}
| {
"pile_set_name": "Github"
} |
/*
* Tencent is pleased to support the open source community by making TKEStack
* available.
*
* Copyright (C) 2012-2020 Tencent. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at
*
* https://opensource.org/licenses/Apache-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
// Code generated by informer-gen. DO NOT EDIT.
package auth
import (
v1 "tkestack.io/tke/api/client/informers/externalversions/auth/v1"
internalinterfaces "tkestack.io/tke/api/client/informers/externalversions/internalinterfaces"
)
// Interface provides access to each of this group's versions.
type Interface interface {
// V1 provides access to shared informers for resources in V1.
V1() v1.Interface
}
type group struct {
factory internalinterfaces.SharedInformerFactory
namespace string
tweakListOptions internalinterfaces.TweakListOptionsFunc
}
// New returns a new Interface.
func New(f internalinterfaces.SharedInformerFactory, namespace string, tweakListOptions internalinterfaces.TweakListOptionsFunc) Interface {
return &group{factory: f, namespace: namespace, tweakListOptions: tweakListOptions}
}
// V1 returns a new v1.Interface.
func (g *group) V1() v1.Interface {
return v1.New(g.factory, g.namespace, g.tweakListOptions)
}
| {
"pile_set_name": "Github"
} |
/*! -*-c++-*-
@file FaceDetectorAndTrackerNN.h
@author David Hirvonen
@brief A nearest neighbor (noop) face tracking variant.
\copyright Copyright 2014-2016 Elucideye, Inc. All rights reserved.
\license{This project is released under the 3 Clause BSD License.}
*/
#include "drishti/face/FaceDetectorAndTrackerNN.h"
DRISHTI_FACE_NAMESPACE_BEGIN
// =============================
TrackerNN::TrackerNN()
= default;
TrackerNN::~TrackerNN()
= default;
std::vector<cv::Point2f> TrackerNN::getFeatures() const
{
std::vector<cv::Point2f> features;
return features;
}
void TrackerNN::initialize(const cv::Mat1b& image, const FaceModel& face)
{
m_face = face;
}
bool TrackerNN::update(const cv::Mat1b& image, FaceModel& face)
{
face = m_face;
return true;
}
DRISHTI_FACE_NAMESPACE_END
| {
"pile_set_name": "Github"
} |
{
"created_at": "2015-02-27T22:28:21.336740",
"description": "shadowsocks client for OSX and non-jailbroken iOS",
"fork": false,
"full_name": "shadowsocks/shadowsocks-iOS",
"language": "Objective-C",
"updated_at": "2015-03-10T07:02:11.957622"
} | {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="Windows-1252"?>
<VisualStudioProject
Name="HTTPTimeServer"
Version="9.00"
ProjectType="Visual C++"
ProjectGUID="{18A0143A-444A-38E3-838C-1ACFBE4EE18C}"
RootNamespace="HTTPTimeServer"
Keyword="Win32Proj">
<Platforms>
<Platform
Name="Win32"/>
</Platforms>
<ToolFiles/>
<Configurations>
<Configuration
Name="debug_shared|Win32"
OutputDirectory="obj\$(ConfigurationName)"
IntermediateDirectory="obj\$(ConfigurationName)"
ConfigurationType="1"
CharacterSet="2">
<Tool
Name="VCPreBuildEventTool"/>
<Tool
Name="VCCustomBuildTool"/>
<Tool
Name="VCXMLDataGeneratorTool"/>
<Tool
Name="VCWebServiceProxyGeneratorTool"/>
<Tool
Name="VCMIDLTool"/>
<Tool
Name="VCCLCompilerTool"
Optimization="0"
AdditionalIncludeDirectories=".\include;..\..\..\Foundation\include;..\..\..\XML\include;..\..\..\Util\include;..\..\..\Net\include"
PreprocessorDefinitions="WIN32;_DEBUG;_WINDOWS;WINVER=0x0500;"
StringPooling="true"
MinimalRebuild="true"
BasicRuntimeChecks="3"
RuntimeLibrary="3"
BufferSecurityCheck="true"
TreatWChar_tAsBuiltInType="true"
ForceConformanceInForLoopScope="true"
RuntimeTypeInfo="true"
UsePrecompiledHeader="0"
WarningLevel="3"
Detect64BitPortabilityProblems="false"
DebugInformationFormat="3"
CompileAs="0"
DisableSpecificWarnings=""
AdditionalOptions=""/>
<Tool
Name="VCManagedResourceCompilerTool"/>
<Tool
Name="VCResourceCompilerTool"/>
<Tool
Name="VCPreLinkEventTool"/>
<Tool
Name="VCLinkerTool"
AdditionalDependencies="ws2_32.lib iphlpapi.lib"
OutputFile="bin\HTTPTimeServerd.exe"
LinkIncremental="2"
AdditionalLibraryDirectories="..\..\..\lib"
SuppressStartupBanner="true"
GenerateDebugInformation="true"
ProgramDatabaseFile="bin\HTTPTimeServerd.pdb"
SubSystem="1"
TargetMachine="1"
AdditionalOptions=""/>
<Tool
Name="VCALinkTool"/>
<Tool
Name="VCManifestTool"/>
<Tool
Name="VCXDCMakeTool"/>
<Tool
Name="VCBscMakeTool"/>
<Tool
Name="VCFxCopTool"/>
<Tool
Name="VCAppVerifierTool"/>
<Tool
Name="VCPostBuildEventTool"/>
</Configuration>
<Configuration
Name="release_shared|Win32"
OutputDirectory="obj\$(ConfigurationName)"
IntermediateDirectory="obj\$(ConfigurationName)"
ConfigurationType="1"
CharacterSet="2">
<Tool
Name="VCPreBuildEventTool"/>
<Tool
Name="VCCustomBuildTool"/>
<Tool
Name="VCXMLDataGeneratorTool"/>
<Tool
Name="VCWebServiceProxyGeneratorTool"/>
<Tool
Name="VCMIDLTool"/>
<Tool
Name="VCCLCompilerTool"
Optimization="4"
InlineFunctionExpansion="1"
EnableIntrinsicFunctions="true"
FavorSizeOrSpeed="1"
OmitFramePointers="true"
AdditionalIncludeDirectories=".\include;..\..\..\Foundation\include;..\..\..\XML\include;..\..\..\Util\include;..\..\..\Net\include"
PreprocessorDefinitions="WIN32;NDEBUG;_WINDOWS;WINVER=0x0500;"
StringPooling="true"
RuntimeLibrary="2"
BufferSecurityCheck="false"
TreatWChar_tAsBuiltInType="true"
ForceConformanceInForLoopScope="true"
RuntimeTypeInfo="true"
UsePrecompiledHeader="0"
WarningLevel="3"
Detect64BitPortabilityProblems="false"
DebugInformationFormat="0"
CompileAs="0"
DisableSpecificWarnings=""
AdditionalOptions=""/>
<Tool
Name="VCManagedResourceCompilerTool"/>
<Tool
Name="VCResourceCompilerTool"/>
<Tool
Name="VCPreLinkEventTool"/>
<Tool
Name="VCLinkerTool"
AdditionalDependencies="ws2_32.lib iphlpapi.lib"
OutputFile="bin\HTTPTimeServer.exe"
LinkIncremental="1"
AdditionalLibraryDirectories="..\..\..\lib"
GenerateDebugInformation="false"
SubSystem="1"
OptimizeReferences="2"
EnableCOMDATFolding="2"
TargetMachine="1"
AdditionalOptions=""/>
<Tool
Name="VCALinkTool"/>
<Tool
Name="VCManifestTool"/>
<Tool
Name="VCXDCMakeTool"/>
<Tool
Name="VCBscMakeTool"/>
<Tool
Name="VCFxCopTool"/>
<Tool
Name="VCAppVerifierTool"/>
<Tool
Name="VCPostBuildEventTool"/>
</Configuration>
<Configuration
Name="debug_static_mt|Win32"
OutputDirectory="obj\$(ConfigurationName)"
IntermediateDirectory="obj\$(ConfigurationName)"
ConfigurationType="1"
CharacterSet="2">
<Tool
Name="VCPreBuildEventTool"/>
<Tool
Name="VCCustomBuildTool"/>
<Tool
Name="VCXMLDataGeneratorTool"/>
<Tool
Name="VCWebServiceProxyGeneratorTool"/>
<Tool
Name="VCMIDLTool"/>
<Tool
Name="VCCLCompilerTool"
Optimization="4"
AdditionalIncludeDirectories=".\include;..\..\..\Foundation\include;..\..\..\XML\include;..\..\..\Util\include;..\..\..\Net\include"
PreprocessorDefinitions="WIN32;_DEBUG;_WINDOWS;WINVER=0x0500;POCO_STATIC;"
StringPooling="true"
MinimalRebuild="true"
BasicRuntimeChecks="3"
RuntimeLibrary="1"
BufferSecurityCheck="true"
TreatWChar_tAsBuiltInType="true"
ForceConformanceInForLoopScope="true"
RuntimeTypeInfo="true"
UsePrecompiledHeader="0"
WarningLevel="3"
Detect64BitPortabilityProblems="false"
DebugInformationFormat="3"
CompileAs="0"
DisableSpecificWarnings=""
AdditionalOptions=""/>
<Tool
Name="VCManagedResourceCompilerTool"/>
<Tool
Name="VCResourceCompilerTool"/>
<Tool
Name="VCPreLinkEventTool"/>
<Tool
Name="VCLinkerTool"
AdditionalDependencies="iphlpapi.lib winmm.lib ws2_32.lib iphlpapi.lib"
OutputFile="bin\static_mt\HTTPTimeServerd.exe"
LinkIncremental="2"
AdditionalLibraryDirectories="..\..\..\lib"
SuppressStartupBanner="true"
GenerateDebugInformation="true"
ProgramDatabaseFile="bin\static_mt\HTTPTimeServerd.pdb"
SubSystem="1"
TargetMachine="1"
AdditionalOptions=""/>
<Tool
Name="VCALinkTool"/>
<Tool
Name="VCManifestTool"/>
<Tool
Name="VCXDCMakeTool"/>
<Tool
Name="VCBscMakeTool"/>
<Tool
Name="VCFxCopTool"/>
<Tool
Name="VCAppVerifierTool"/>
<Tool
Name="VCPostBuildEventTool"/>
</Configuration>
<Configuration
Name="release_static_mt|Win32"
OutputDirectory="obj\$(ConfigurationName)"
IntermediateDirectory="obj\$(ConfigurationName)"
ConfigurationType="1"
CharacterSet="2">
<Tool
Name="VCPreBuildEventTool"/>
<Tool
Name="VCCustomBuildTool"/>
<Tool
Name="VCXMLDataGeneratorTool"/>
<Tool
Name="VCWebServiceProxyGeneratorTool"/>
<Tool
Name="VCMIDLTool"/>
<Tool
Name="VCCLCompilerTool"
Optimization="4"
InlineFunctionExpansion="1"
EnableIntrinsicFunctions="true"
FavorSizeOrSpeed="1"
OmitFramePointers="true"
AdditionalIncludeDirectories=".\include;..\..\..\Foundation\include;..\..\..\XML\include;..\..\..\Util\include;..\..\..\Net\include"
PreprocessorDefinitions="WIN32;NDEBUG;_WINDOWS;WINVER=0x0500;POCO_STATIC;"
StringPooling="true"
RuntimeLibrary="0"
BufferSecurityCheck="false"
TreatWChar_tAsBuiltInType="true"
ForceConformanceInForLoopScope="true"
RuntimeTypeInfo="true"
UsePrecompiledHeader="0"
WarningLevel="3"
Detect64BitPortabilityProblems="false"
DebugInformationFormat="0"
CompileAs="0"
DisableSpecificWarnings=""
AdditionalOptions=""/>
<Tool
Name="VCManagedResourceCompilerTool"/>
<Tool
Name="VCResourceCompilerTool"/>
<Tool
Name="VCPreLinkEventTool"/>
<Tool
Name="VCLinkerTool"
AdditionalDependencies="iphlpapi.lib winmm.lib ws2_32.lib iphlpapi.lib"
OutputFile="bin\static_mt\HTTPTimeServer.exe"
LinkIncremental="1"
AdditionalLibraryDirectories="..\..\..\lib"
GenerateDebugInformation="false"
SubSystem="1"
OptimizeReferences="2"
EnableCOMDATFolding="2"
TargetMachine="1"
AdditionalOptions=""/>
<Tool
Name="VCALinkTool"/>
<Tool
Name="VCManifestTool"/>
<Tool
Name="VCXDCMakeTool"/>
<Tool
Name="VCBscMakeTool"/>
<Tool
Name="VCFxCopTool"/>
<Tool
Name="VCAppVerifierTool"/>
<Tool
Name="VCPostBuildEventTool"/>
</Configuration>
<Configuration
Name="debug_static_md|Win32"
OutputDirectory="obj\$(ConfigurationName)"
IntermediateDirectory="obj\$(ConfigurationName)"
ConfigurationType="1"
CharacterSet="2">
<Tool
Name="VCPreBuildEventTool"/>
<Tool
Name="VCCustomBuildTool"/>
<Tool
Name="VCXMLDataGeneratorTool"/>
<Tool
Name="VCWebServiceProxyGeneratorTool"/>
<Tool
Name="VCMIDLTool"/>
<Tool
Name="VCCLCompilerTool"
Optimization="4"
AdditionalIncludeDirectories=".\include;..\..\..\Foundation\include;..\..\..\XML\include;..\..\..\Util\include;..\..\..\Net\include"
PreprocessorDefinitions="WIN32;_DEBUG;_WINDOWS;WINVER=0x0500;POCO_STATIC;"
StringPooling="true"
MinimalRebuild="true"
BasicRuntimeChecks="3"
RuntimeLibrary="3"
BufferSecurityCheck="true"
TreatWChar_tAsBuiltInType="true"
ForceConformanceInForLoopScope="true"
RuntimeTypeInfo="true"
UsePrecompiledHeader="0"
WarningLevel="3"
Detect64BitPortabilityProblems="false"
DebugInformationFormat="3"
CompileAs="0"
DisableSpecificWarnings=""
AdditionalOptions=""/>
<Tool
Name="VCManagedResourceCompilerTool"/>
<Tool
Name="VCResourceCompilerTool"/>
<Tool
Name="VCPreLinkEventTool"/>
<Tool
Name="VCLinkerTool"
AdditionalDependencies="iphlpapi.lib winmm.lib ws2_32.lib iphlpapi.lib"
OutputFile="bin\static_md\HTTPTimeServerd.exe"
LinkIncremental="2"
AdditionalLibraryDirectories="..\..\..\lib"
SuppressStartupBanner="true"
GenerateDebugInformation="true"
ProgramDatabaseFile="bin\static_md\HTTPTimeServerd.pdb"
SubSystem="1"
TargetMachine="1"
AdditionalOptions=""/>
<Tool
Name="VCALinkTool"/>
<Tool
Name="VCManifestTool"/>
<Tool
Name="VCXDCMakeTool"/>
<Tool
Name="VCBscMakeTool"/>
<Tool
Name="VCFxCopTool"/>
<Tool
Name="VCAppVerifierTool"/>
<Tool
Name="VCPostBuildEventTool"/>
</Configuration>
<Configuration
Name="release_static_md|Win32"
OutputDirectory="obj\$(ConfigurationName)"
IntermediateDirectory="obj\$(ConfigurationName)"
ConfigurationType="1"
CharacterSet="2">
<Tool
Name="VCPreBuildEventTool"/>
<Tool
Name="VCCustomBuildTool"/>
<Tool
Name="VCXMLDataGeneratorTool"/>
<Tool
Name="VCWebServiceProxyGeneratorTool"/>
<Tool
Name="VCMIDLTool"/>
<Tool
Name="VCCLCompilerTool"
Optimization="4"
InlineFunctionExpansion="1"
EnableIntrinsicFunctions="true"
FavorSizeOrSpeed="1"
OmitFramePointers="true"
AdditionalIncludeDirectories=".\include;..\..\..\Foundation\include;..\..\..\XML\include;..\..\..\Util\include;..\..\..\Net\include"
PreprocessorDefinitions="WIN32;NDEBUG;_WINDOWS;WINVER=0x0500;POCO_STATIC;"
StringPooling="true"
RuntimeLibrary="2"
BufferSecurityCheck="false"
TreatWChar_tAsBuiltInType="true"
ForceConformanceInForLoopScope="true"
RuntimeTypeInfo="true"
UsePrecompiledHeader="0"
WarningLevel="3"
Detect64BitPortabilityProblems="false"
DebugInformationFormat="0"
CompileAs="0"
DisableSpecificWarnings=""
AdditionalOptions=""/>
<Tool
Name="VCManagedResourceCompilerTool"/>
<Tool
Name="VCResourceCompilerTool"/>
<Tool
Name="VCPreLinkEventTool"/>
<Tool
Name="VCLinkerTool"
AdditionalDependencies="iphlpapi.lib winmm.lib ws2_32.lib iphlpapi.lib"
OutputFile="bin\static_md\HTTPTimeServer.exe"
LinkIncremental="1"
AdditionalLibraryDirectories="..\..\..\lib"
GenerateDebugInformation="false"
SubSystem="1"
OptimizeReferences="2"
EnableCOMDATFolding="2"
TargetMachine="1"
AdditionalOptions=""/>
<Tool
Name="VCALinkTool"/>
<Tool
Name="VCManifestTool"/>
<Tool
Name="VCXDCMakeTool"/>
<Tool
Name="VCBscMakeTool"/>
<Tool
Name="VCFxCopTool"/>
<Tool
Name="VCAppVerifierTool"/>
<Tool
Name="VCPostBuildEventTool"/>
</Configuration>
</Configurations>
<References/>
<Files>
<Filter
Name="Page Files">
<File
RelativePath=".\src\TimeHandler.cpsp"/>
</Filter>
<Filter
Name="Source Files">
<File
RelativePath=".\src\HTTPTimeServerApp.cpp"/>
</Filter>
<Filter
Name="Generated Files">
<File
RelativePath=".\src\TimeHandler.cpp"/>
<File
RelativePath=".\src\TimeHandler.h"/>
</Filter>
</Files>
<Globals/>
</VisualStudioProject>
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.