text
stringlengths 2
100k
| meta
dict |
---|---|
package nme.media;
#if (!flash)
class SoundLoaderContext
{
public function new()
{
}
}
#else
typedef SoundLoaderContext = flash.media.SoundLoaderContext;
#end
| {
"pile_set_name": "Github"
} |
/**
* Select2 Georgian (Kartuli) translation.
*
* Author: Dimitri Kurashvili [email protected]
*/
(function ($) {
"use strict";
$.fn.select2.locales['ka'] = {
formatNoMatches: function () { return "แแแ แแแแซแแแแ"; },
formatInputTooShort: function (input, min) { var n = min - input.length; return "แแแฎแแแ แจแแแงแแแแแ แแแแแ " + n + " แกแแแแแแ"; },
formatInputTooLong: function (input, max) { var n = input.length - max; return "แแแฎแแแ แฌแแจแแแแ " + n + " แกแแแแแแ"; },
formatSelectionTooBig: function (limit) { return "แแฅแแแ แจแแแแซแแแแ แแฎแแแแ " + limit + " แฉแแแแฌแแ แแก แแแแแจแแแ"; },
formatLoadMore: function (pageNumber) { return "แจแแแแแแก แฉแแขแแแ แแแโฆ"; },
formatSearching: function () { return "แซแแแแโฆ"; }
};
$.extend($.fn.select2.defaults, $.fn.select2.locales['ka']);
})(jQuery);
| {
"pile_set_name": "Github"
} |
/*
* (C) Copyright 2003
* [email protected]
*
* SPDX-License-Identifier: GPL-2.0+
*/
#include <common.h>
#include <command.h>
#include <mach/au1x00.h>
#include <asm/mipsregs.h>
#include <asm/io.h>
DECLARE_GLOBAL_DATA_PTR;
int dram_init(void)
{
/* Sdram is setup by assembler code */
/* If memory could be changed, we should return the true value here */
gd->ram_size = 64 * 1024 * 1024;
return 0;
}
#define BCSR_PCMCIA_PC0DRVEN 0x0010
#define BCSR_PCMCIA_PC0RST 0x0080
/* In arch/mips/cpu/cpu.c */
void write_one_tlb( int index, u32 pagemask, u32 hi, u32 low0, u32 low1 );
int checkboard (void)
{
#if defined(CONFIG_IDE_PCMCIA) && 0
u16 status;
#endif
/* volatile u32 *pcmcia_bcsr = (u32*)(DB1000_BCSR_ADDR+0x10); */
volatile u32 *sys_counter = (volatile u32*)SYS_COUNTER_CNTRL;
u32 proc_id;
*sys_counter = 0x100; /* Enable 32 kHz oscillator for RTC/TOY */
proc_id = read_c0_prid();
switch (proc_id >> 24) {
case 0:
puts ("Board: Pb1000\n");
printf ("CPU: Au1000 396 MHz, id: 0x%02x, rev: 0x%02x\n",
(proc_id >> 8) & 0xFF, proc_id & 0xFF);
break;
case 1:
puts ("Board: Pb1500\n");
printf ("CPU: Au1500, id: 0x%02x, rev: 0x%02x\n",
(proc_id >> 8) & 0xFF, proc_id & 0xFF);
break;
case 2:
puts ("Board: Pb1100\n");
printf ("CPU: Au1100, id: 0x%02x, rev: 0x%02x\n",
(proc_id >> 8) & 0xFF, proc_id & 0xFF);
break;
default:
printf ("Unsupported cpu %d, proc_id=0x%x\n", proc_id >> 24, proc_id);
}
set_io_port_base(0);
#if defined(CONFIG_IDE_PCMCIA) && 0
/* Enable 3.3 V on slot 0 ( VCC )
No 5V */
status = 4;
*pcmcia_bcsr = status;
status |= BCSR_PCMCIA_PC0DRVEN;
*pcmcia_bcsr = status;
au_sync();
udelay(300*1000);
status |= BCSR_PCMCIA_PC0RST;
*pcmcia_bcsr = status;
au_sync();
udelay(100*1000);
/* PCMCIA is on a 36 bit physical address.
We need to map it into a 32 bit addresses */
#if 0
/* We dont need theese unless we run whole pcmcia package */
write_one_tlb(20, /* index */
0x01ffe000, /* Pagemask, 16 MB pages */
CONFIG_SYS_PCMCIA_IO_BASE, /* Hi */
0x3C000017, /* Lo0 */
0x3C200017); /* Lo1 */
write_one_tlb(21, /* index */
0x01ffe000, /* Pagemask, 16 MB pages */
CONFIG_SYS_PCMCIA_ATTR_BASE, /* Hi */
0x3D000017, /* Lo0 */
0x3D200017); /* Lo1 */
#endif /* 0 */
write_one_tlb(22, /* index */
0x01ffe000, /* Pagemask, 16 MB pages */
CONFIG_SYS_PCMCIA_MEM_ADDR, /* Hi */
0x3E000017, /* Lo0 */
0x3E200017); /* Lo1 */
#endif /* CONFIG_IDE_PCMCIA */
return 0;
}
| {
"pile_set_name": "Github"
} |
/******************************************************************************
*
* Module Name: exoparg6 - AML execution - opcodes with 6 arguments
*
*****************************************************************************/
/*
* Copyright (C) 2000 - 2007, R. Byron Moore
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions, and the following disclaimer,
* without modification.
* 2. Redistributions in binary form must reproduce at minimum a disclaimer
* substantially similar to the "NO WARRANTY" disclaimer below
* ("Disclaimer") and any redistribution must be conditioned upon
* including a substantially similar Disclaimer requirement for further
* binary redistribution.
* 3. Neither the names of the above-listed copyright holders nor the names
* of any contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* Alternatively, this software may be distributed under the terms of the
* GNU General Public License ("GPL") version 2 as published by the Free
* Software Foundation.
*
* NO WARRANTY
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* HOLDERS OR CONTRIBUTORS BE LIABLE FOR SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
* IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGES.
*/
#include <acpi/acpi.h>
#include <acpi/acinterp.h>
#include <acpi/acparser.h>
#include <acpi/amlcode.h>
#define _COMPONENT ACPI_EXECUTER
ACPI_MODULE_NAME("exoparg6")
/*!
* Naming convention for AML interpreter execution routines.
*
* The routines that begin execution of AML opcodes are named with a common
* convention based upon the number of arguments, the number of target operands,
* and whether or not a value is returned:
*
* AcpiExOpcode_xA_yT_zR
*
* Where:
*
* xA - ARGUMENTS: The number of arguments (input operands) that are
* required for this opcode type (1 through 6 args).
* yT - TARGETS: The number of targets (output operands) that are required
* for this opcode type (0, 1, or 2 targets).
* zR - RETURN VALUE: Indicates whether this opcode type returns a value
* as the function return (0 or 1).
*
* The AcpiExOpcode* functions are called via the Dispatcher component with
* fully resolved operands.
!*/
/* Local prototypes */
static u8
acpi_ex_do_match(u32 match_op,
union acpi_operand_object *package_obj,
union acpi_operand_object *match_obj);
/*******************************************************************************
*
* FUNCTION: acpi_ex_do_match
*
* PARAMETERS: match_op - The AML match operand
* package_obj - Object from the target package
* match_obj - Object to be matched
*
* RETURN: TRUE if the match is successful, FALSE otherwise
*
* DESCRIPTION: Implements the low-level match for the ASL Match operator.
* Package elements will be implicitly converted to the type of
* the match object (Integer/Buffer/String).
*
******************************************************************************/
static u8
acpi_ex_do_match(u32 match_op,
union acpi_operand_object *package_obj,
union acpi_operand_object *match_obj)
{
u8 logical_result = TRUE;
acpi_status status;
/*
* Note: Since the package_obj/match_obj ordering is opposite to that of
* the standard logical operators, we have to reverse them when we call
* do_logical_op in order to make the implicit conversion rules work
* correctly. However, this means we have to flip the entire equation
* also. A bit ugly perhaps, but overall, better than fussing the
* parameters around at runtime, over and over again.
*
* Below, P[i] refers to the package element, M refers to the Match object.
*/
switch (match_op) {
case MATCH_MTR:
/* Always true */
break;
case MATCH_MEQ:
/*
* True if equal: (P[i] == M)
* Change to: (M == P[i])
*/
status =
acpi_ex_do_logical_op(AML_LEQUAL_OP, match_obj, package_obj,
&logical_result);
if (ACPI_FAILURE(status)) {
return (FALSE);
}
break;
case MATCH_MLE:
/*
* True if less than or equal: (P[i] <= M) (P[i] not_greater than M)
* Change to: (M >= P[i]) (M not_less than P[i])
*/
status =
acpi_ex_do_logical_op(AML_LLESS_OP, match_obj, package_obj,
&logical_result);
if (ACPI_FAILURE(status)) {
return (FALSE);
}
logical_result = (u8) ! logical_result;
break;
case MATCH_MLT:
/*
* True if less than: (P[i] < M)
* Change to: (M > P[i])
*/
status =
acpi_ex_do_logical_op(AML_LGREATER_OP, match_obj,
package_obj, &logical_result);
if (ACPI_FAILURE(status)) {
return (FALSE);
}
break;
case MATCH_MGE:
/*
* True if greater than or equal: (P[i] >= M) (P[i] not_less than M)
* Change to: (M <= P[i]) (M not_greater than P[i])
*/
status =
acpi_ex_do_logical_op(AML_LGREATER_OP, match_obj,
package_obj, &logical_result);
if (ACPI_FAILURE(status)) {
return (FALSE);
}
logical_result = (u8) ! logical_result;
break;
case MATCH_MGT:
/*
* True if greater than: (P[i] > M)
* Change to: (M < P[i])
*/
status =
acpi_ex_do_logical_op(AML_LLESS_OP, match_obj, package_obj,
&logical_result);
if (ACPI_FAILURE(status)) {
return (FALSE);
}
break;
default:
/* Undefined */
return (FALSE);
}
return logical_result;
}
/*******************************************************************************
*
* FUNCTION: acpi_ex_opcode_6A_0T_1R
*
* PARAMETERS: walk_state - Current walk state
*
* RETURN: Status
*
* DESCRIPTION: Execute opcode with 6 arguments, no target, and a return value
*
******************************************************************************/
acpi_status acpi_ex_opcode_6A_0T_1R(struct acpi_walk_state * walk_state)
{
union acpi_operand_object **operand = &walk_state->operands[0];
union acpi_operand_object *return_desc = NULL;
acpi_status status = AE_OK;
acpi_integer index;
union acpi_operand_object *this_element;
ACPI_FUNCTION_TRACE_STR(ex_opcode_6A_0T_1R,
acpi_ps_get_opcode_name(walk_state->opcode));
switch (walk_state->opcode) {
case AML_MATCH_OP:
/*
* Match (search_pkg[0], match_op1[1], match_obj1[2],
* match_op2[3], match_obj2[4], start_index[5])
*/
/* Validate both Match Term Operators (MTR, MEQ, etc.) */
if ((operand[1]->integer.value > MAX_MATCH_OPERATOR) ||
(operand[3]->integer.value > MAX_MATCH_OPERATOR)) {
ACPI_ERROR((AE_INFO, "Match operator out of range"));
status = AE_AML_OPERAND_VALUE;
goto cleanup;
}
/* Get the package start_index, validate against the package length */
index = operand[5]->integer.value;
if (index >= operand[0]->package.count) {
ACPI_ERROR((AE_INFO,
"Index (%X%8.8X) beyond package end (%X)",
ACPI_FORMAT_UINT64(index),
operand[0]->package.count));
status = AE_AML_PACKAGE_LIMIT;
goto cleanup;
}
/* Create an integer for the return value */
return_desc = acpi_ut_create_internal_object(ACPI_TYPE_INTEGER);
if (!return_desc) {
status = AE_NO_MEMORY;
goto cleanup;
}
/* Default return value if no match found */
return_desc->integer.value = ACPI_INTEGER_MAX;
/*
* Examine each element until a match is found. Both match conditions
* must be satisfied for a match to occur. Within the loop,
* "continue" signifies that the current element does not match
* and the next should be examined.
*
* Upon finding a match, the loop will terminate via "break" at
* the bottom. If it terminates "normally", match_value will be
* ACPI_INTEGER_MAX (Ones) (its initial value) indicating that no
* match was found.
*/
for (; index < operand[0]->package.count; index++) {
/* Get the current package element */
this_element = operand[0]->package.elements[index];
/* Treat any uninitialized (NULL) elements as non-matching */
if (!this_element) {
continue;
}
/*
* Both match conditions must be satisfied. Execution of a continue
* (proceed to next iteration of enclosing for loop) signifies a
* non-match.
*/
if (!acpi_ex_do_match((u32) operand[1]->integer.value,
this_element, operand[2])) {
continue;
}
if (!acpi_ex_do_match((u32) operand[3]->integer.value,
this_element, operand[4])) {
continue;
}
/* Match found: Index is the return value */
return_desc->integer.value = index;
break;
}
break;
case AML_LOAD_TABLE_OP:
status = acpi_ex_load_table_op(walk_state, &return_desc);
break;
default:
ACPI_ERROR((AE_INFO, "Unknown AML opcode %X",
walk_state->opcode));
status = AE_AML_BAD_OPCODE;
goto cleanup;
}
walk_state->result_obj = return_desc;
cleanup:
/* Delete return object on error */
if (ACPI_FAILURE(status)) {
acpi_ut_remove_reference(return_desc);
}
return_ACPI_STATUS(status);
}
| {
"pile_set_name": "Github"
} |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
#cython: language_level=3
"""
The functions defined here allow one to determine the exact area of
overlap of a rectangle and a circle (written by Thomas Robitaille).
"""
import numpy as np
cimport numpy as np
__all__ = ['circular_overlap_grid']
cdef extern from "math.h":
double asin(double x)
double sin(double x)
double sqrt(double x)
DTYPE = np.float64
ctypedef np.float64_t DTYPE_t
# NOTE: Here we need to make sure we use cimport to import the C functions from
# core (since these were defined with cdef). This also requires the core.pxd
# file to exist with the function signatures.
from .core cimport area_arc, area_triangle, floor_sqrt
def circular_overlap_grid(double xmin, double xmax, double ymin, double ymax,
int nx, int ny, double r, int use_exact,
int subpixels):
"""
circular_overlap_grid(xmin, xmax, ymin, ymax, nx, ny, r,
use_exact, subpixels)
Area of overlap between a circle and a pixel grid. The circle is centered
on the origin.
Parameters
----------
xmin, xmax, ymin, ymax : float
Extent of the grid in the x and y direction.
nx, ny : int
Grid dimensions.
r : float
The radius of the circle.
use_exact : 0 or 1
If ``1`` calculates exact overlap, if ``0`` uses ``subpixel`` number
of subpixels to calculate the overlap.
subpixels : int
Each pixel resampled by this factor in each dimension, thus each
pixel is divided into ``subpixels ** 2`` subpixels.
Returns
-------
frac : `~numpy.ndarray` (float)
2-d array of shape (ny, nx) giving the fraction of the overlap.
"""
cdef unsigned int i, j
cdef double x, y, dx, dy, d, pixel_radius
cdef double bxmin, bxmax, bymin, bymax
cdef double pxmin, pxcen, pxmax, pymin, pycen, pymax
# Define output array
cdef np.ndarray[DTYPE_t, ndim=2] frac = np.zeros([ny, nx], dtype=DTYPE)
# Find the width of each element in x and y
dx = (xmax - xmin) / nx
dy = (ymax - ymin) / ny
# Find the radius of a single pixel
pixel_radius = 0.5 * sqrt(dx * dx + dy * dy)
# Define bounding box
bxmin = -r - 0.5 * dx
bxmax = +r + 0.5 * dx
bymin = -r - 0.5 * dy
bymax = +r + 0.5 * dy
for i in range(nx):
pxmin = xmin + i * dx # lower end of pixel
pxcen = pxmin + dx * 0.5
pxmax = pxmin + dx # upper end of pixel
if pxmax > bxmin and pxmin < bxmax:
for j in range(ny):
pymin = ymin + j * dy
pycen = pymin + dy * 0.5
pymax = pymin + dy
if pymax > bymin and pymin < bymax:
# Distance from circle center to pixel center.
d = sqrt(pxcen * pxcen + pycen * pycen)
# If pixel center is "well within" circle, count full
# pixel.
if d < r - pixel_radius:
frac[j, i] = 1.
# If pixel center is "close" to circle border, find
# overlap.
elif d < r + pixel_radius:
# Either do exact calculation or use subpixel
# sampling:
if use_exact:
frac[j, i] = circular_overlap_single_exact(
pxmin, pymin, pxmax, pymax, r) / (dx * dy)
else:
frac[j, i] = circular_overlap_single_subpixel(
pxmin, pymin, pxmax, pymax, r, subpixels)
# Otherwise, it is fully outside circle.
# No action needed.
return frac
# NOTE: The following two functions use cdef because they are not
# intended to be called from the Python code. Using def makes them
# callable from outside, but also slower. In any case, these aren't useful
# to call from outside because they only operate on a single pixel.
cdef double circular_overlap_single_subpixel(double x0, double y0,
double x1, double y1,
double r, int subpixels):
"""Return the fraction of overlap between a circle and a single pixel
with given extent, using a sub-pixel sampling method."""
cdef unsigned int i, j
cdef double x, y, dx, dy, r_squared
cdef double frac = 0. # Accumulator.
dx = (x1 - x0) / subpixels
dy = (y1 - y0) / subpixels
r_squared = r ** 2
x = x0 - 0.5 * dx
for i in range(subpixels):
x += dx
y = y0 - 0.5 * dy
for j in range(subpixels):
y += dy
if x * x + y * y < r_squared:
frac += 1.
return frac / (subpixels * subpixels)
cdef double circular_overlap_single_exact(double xmin, double ymin,
double xmax, double ymax,
double r):
"""
Area of overlap of a rectangle and a circle
"""
if 0. <= xmin:
if 0. <= ymin:
return circular_overlap_core(xmin, ymin, xmax, ymax, r)
elif 0. >= ymax:
return circular_overlap_core(-ymax, xmin, -ymin, xmax, r)
else:
return circular_overlap_single_exact(xmin, ymin, xmax, 0., r) \
+ circular_overlap_single_exact(xmin, 0., xmax, ymax, r)
elif 0. >= xmax:
if 0. <= ymin:
return circular_overlap_core(-xmax, ymin, -xmin, ymax, r)
elif 0. >= ymax:
return circular_overlap_core(-xmax, -ymax, -xmin, -ymin, r)
else:
return circular_overlap_single_exact(xmin, ymin, xmax, 0., r) \
+ circular_overlap_single_exact(xmin, 0., xmax, ymax, r)
else:
if 0. <= ymin:
return circular_overlap_single_exact(xmin, ymin, 0., ymax, r) \
+ circular_overlap_single_exact(0., ymin, xmax, ymax, r)
if 0. >= ymax:
return circular_overlap_single_exact(xmin, ymin, 0., ymax, r) \
+ circular_overlap_single_exact(0., ymin, xmax, ymax, r)
else:
return circular_overlap_single_exact(xmin, ymin, 0., 0., r) \
+ circular_overlap_single_exact(0., ymin, xmax, 0., r) \
+ circular_overlap_single_exact(xmin, 0., 0., ymax, r) \
+ circular_overlap_single_exact(0., 0., xmax, ymax, r)
cdef double circular_overlap_core(double xmin, double ymin, double xmax, double ymax,
double r):
"""
Assumes that the center of the circle is <= xmin,
ymin (can always modify input to conform to this).
"""
cdef double area, d1, d2, x1, x2, y1, y2
if xmin * xmin + ymin * ymin > r * r:
area = 0.
elif xmax * xmax + ymax * ymax < r * r:
area = (xmax - xmin) * (ymax - ymin)
else:
area = 0.
d1 = floor_sqrt(xmax * xmax + ymin * ymin)
d2 = floor_sqrt(xmin * xmin + ymax * ymax)
if d1 < r and d2 < r:
x1, y1 = floor_sqrt(r * r - ymax * ymax), ymax
x2, y2 = xmax, floor_sqrt(r * r - xmax * xmax)
area = ((xmax - xmin) * (ymax - ymin) -
area_triangle(x1, y1, x2, y2, xmax, ymax) +
area_arc(x1, y1, x2, y2, r))
elif d1 < r:
x1, y1 = xmin, floor_sqrt(r * r - xmin * xmin)
x2, y2 = xmax, floor_sqrt(r * r - xmax * xmax)
area = (area_arc(x1, y1, x2, y2, r) +
area_triangle(x1, y1, x1, ymin, xmax, ymin) +
area_triangle(x1, y1, x2, ymin, x2, y2))
elif d2 < r:
x1, y1 = floor_sqrt(r * r - ymin * ymin), ymin
x2, y2 = floor_sqrt(r * r - ymax * ymax), ymax
area = (area_arc(x1, y1, x2, y2, r) +
area_triangle(x1, y1, xmin, y1, xmin, ymax) +
area_triangle(x1, y1, xmin, y2, x2, y2))
else:
x1, y1 = floor_sqrt(r * r - ymin * ymin), ymin
x2, y2 = xmin, floor_sqrt(r * r - xmin * xmin)
area = (area_arc(x1, y1, x2, y2, r) +
area_triangle(x1, y1, x2, y2, xmin, ymin))
return area
| {
"pile_set_name": "Github"
} |
{
"version": 2,
"name": "Extruder 1",
"inherits": "fdmextruder",
"metadata": {
"machine": "Mark2_for_Ultimaker2",
"position": "0"
},
"overrides": {
"extruder_nr": {
"default_value": 0,
"maximum_value": "1"
},
"machine_nozzle_offset_x": { "default_value": 0.0 },
"machine_nozzle_offset_y": { "default_value": 0.0 }
}
}
| {
"pile_set_name": "Github"
} |
; RUN: llc -mtriple=thumb-eabi -mcpu=cortex-a8 %s -o - | FileCheck %s -check-prefix=ARMv7A
; RUN: llc -mtriple=thumb-eabi -mcpu=cortex-m3 %s -o - | FileCheck %s -check-prefix=ARMv7M
define i32 @test1(i32 %x) {
; ARMv7A: test1
; ARMv7A: uxtb16 r0, r0
; ARMv7M: test1
; ARMv7M: bic r0, r0, #-16711936
%tmp1 = and i32 %x, 16711935 ; <i32> [#uses=1]
ret i32 %tmp1
}
; PR7503
define i32 @test2(i32 %x) {
; ARMv7A: test2
; ARMv7A: uxtb16 r0, r0, ror #8
; ARMv7M: test2
; ARMv7M: mov.w r1, #16711935
; ARMv7M: and.w r0, r1, r0, lsr #8
%tmp1 = lshr i32 %x, 8 ; <i32> [#uses=1]
%tmp2 = and i32 %tmp1, 16711935 ; <i32> [#uses=1]
ret i32 %tmp2
}
define i32 @test3(i32 %x) {
; ARMv7A: test3
; ARMv7A: uxtb16 r0, r0, ror #8
; ARMv7M: test3
; ARMv7M: mov.w r1, #16711935
; ARMv7M: and.w r0, r1, r0, lsr #8
%tmp1 = lshr i32 %x, 8 ; <i32> [#uses=1]
%tmp2 = and i32 %tmp1, 16711935 ; <i32> [#uses=1]
ret i32 %tmp2
}
define i32 @test4(i32 %x) {
; ARMv7A: test4
; ARMv7A: uxtb16 r0, r0, ror #8
; ARMv7M: test4
; ARMv7M: mov.w r1, #16711935
; ARMv7M: and.w r0, r1, r0, lsr #8
%tmp1 = lshr i32 %x, 8 ; <i32> [#uses=1]
%tmp6 = and i32 %tmp1, 16711935 ; <i32> [#uses=1]
ret i32 %tmp6
}
define i32 @test5(i32 %x) {
; ARMv7A: test5
; ARMv7A: uxtb16 r0, r0, ror #8
; ARMv7M: test5
; ARMv7M: mov.w r1, #16711935
; ARMv7M: and.w r0, r1, r0, lsr #8
%tmp1 = lshr i32 %x, 8 ; <i32> [#uses=1]
%tmp2 = and i32 %tmp1, 16711935 ; <i32> [#uses=1]
ret i32 %tmp2
}
define i32 @test6(i32 %x) {
; ARMv7A: test6
; ARMv7A: uxtb16 r0, r0, ror #16
; ARMv7M: test6
; ARMv7M: mov.w r1, #16711935
; ARMv7M: and.w r0, r1, r0, ror #16
%tmp1 = lshr i32 %x, 16 ; <i32> [#uses=1]
%tmp2 = and i32 %tmp1, 255 ; <i32> [#uses=1]
%tmp4 = shl i32 %x, 16 ; <i32> [#uses=1]
%tmp5 = and i32 %tmp4, 16711680 ; <i32> [#uses=1]
%tmp6 = or i32 %tmp2, %tmp5 ; <i32> [#uses=1]
ret i32 %tmp6
}
define i32 @test7(i32 %x) {
; ARMv7A: test7
; ARMv7A: uxtb16 r0, r0, ror #16
; ARMv7M: test7
; ARMv7M: mov.w r1, #16711935
; ARMv7M: and.w r0, r1, r0, ror #16
%tmp1 = lshr i32 %x, 16 ; <i32> [#uses=1]
%tmp2 = and i32 %tmp1, 255 ; <i32> [#uses=1]
%tmp4 = shl i32 %x, 16 ; <i32> [#uses=1]
%tmp5 = and i32 %tmp4, 16711680 ; <i32> [#uses=1]
%tmp6 = or i32 %tmp2, %tmp5 ; <i32> [#uses=1]
ret i32 %tmp6
}
define i32 @test8(i32 %x) {
; ARMv7A: test8
; ARMv7A: uxtb16 r0, r0, ror #24
; ARMv7M: test8
; ARMv7M: mov.w r1, #16711935
; ARMv7M: and.w r0, r1, r0, ror #24
%tmp1 = shl i32 %x, 8 ; <i32> [#uses=1]
%tmp2 = and i32 %tmp1, 16711680 ; <i32> [#uses=1]
%tmp5 = lshr i32 %x, 24 ; <i32> [#uses=1]
%tmp6 = or i32 %tmp2, %tmp5 ; <i32> [#uses=1]
ret i32 %tmp6
}
define i32 @test9(i32 %x) {
; ARMv7A: test9
; ARMv7A: uxtb16 r0, r0, ror #24
; ARMv7M: test9
; ARMv7M: mov.w r1, #16711935
; ARMv7M: and.w r0, r1, r0, ror #24
%tmp1 = lshr i32 %x, 24 ; <i32> [#uses=1]
%tmp4 = shl i32 %x, 8 ; <i32> [#uses=1]
%tmp5 = and i32 %tmp4, 16711680 ; <i32> [#uses=1]
%tmp6 = or i32 %tmp5, %tmp1 ; <i32> [#uses=1]
ret i32 %tmp6
}
define i32 @test10(i32 %p0) {
; ARMv7A: test10
; ARMv7A: mov.w r1, #16253176
; ARMv7A: and.w r0, r1, r0, lsr #7
; ARMv7A: lsrs r1, r0, #5
; ARMv7A: uxtb16 r1, r1
; ARMv7A: orrs r0, r1
; ARMv7M: test10
; ARMv7M: mov.w r1, #16253176
; ARMv7M: and.w r0, r1, r0, lsr #7
; ARMv7M: mov.w r1, #458759
; ARMv7M: and.w r1, r1, r0, lsr #5
; ARMv7M: orrs r0, r1
%tmp1 = lshr i32 %p0, 7 ; <i32> [#uses=1]
%tmp2 = and i32 %tmp1, 16253176 ; <i32> [#uses=2]
%tmp4 = lshr i32 %tmp2, 5 ; <i32> [#uses=1]
%tmp5 = and i32 %tmp4, 458759 ; <i32> [#uses=1]
%tmp7 = or i32 %tmp5, %tmp2 ; <i32> [#uses=1]
ret i32 %tmp7
}
| {
"pile_set_name": "Github"
} |
/*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css */
//
// 1. Set default font family to sans-serif.
// 2. Prevent iOS and IE text size adjust after device orientation change,
// without disabling user zoom.
//
html {
font-family: sans-serif; // 1
-ms-text-size-adjust: 100%; // 2
-webkit-text-size-adjust: 100%; // 2
}
//
// Remove default margin.
//
body {
margin: 0;
}
// HTML5 display definitions
// ==========================================================================
//
// Correct `block` display not defined for any HTML5 element in IE 8/9.
// Correct `block` display not defined for `details` or `summary` in IE 10/11
// and Firefox.
// Correct `block` display not defined for `main` in IE 11.
//
article,
aside,
details,
figcaption,
figure,
footer,
header,
hgroup,
main,
menu,
nav,
section,
summary {
display: block;
}
//
// 1. Correct `inline-block` display not defined in IE 8/9.
// 2. Normalize vertical alignment of `progress` in Chrome, Firefox, and Opera.
//
audio,
canvas,
progress,
video {
display: inline-block; // 1
vertical-align: baseline; // 2
}
//
// Prevent modern browsers from displaying `audio` without controls.
// Remove excess height in iOS 5 devices.
//
audio:not([controls]) {
display: none;
height: 0;
}
//
// Address `[hidden]` styling not present in IE 8/9/10.
// Hide the `template` element in IE 8/9/10/11, Safari, and Firefox < 22.
//
[hidden],
template {
display: none;
}
// Links
// ==========================================================================
//
// Remove the gray background color from active links in IE 10.
//
a {
background-color: transparent;
}
//
// Improve readability of focused elements when they are also in an
// active/hover state.
//
a:active,
a:hover {
outline: 0;
}
// Text-level semantics
// ==========================================================================
//
// Address styling not present in IE 8/9/10/11, Safari, and Chrome.
//
abbr[title] {
border-bottom: 1px dotted;
}
//
// Address style set to `bolder` in Firefox 4+, Safari, and Chrome.
//
b,
strong {
font-weight: bold;
}
//
// Address styling not present in Safari and Chrome.
//
dfn {
font-style: italic;
}
//
// Address variable `h1` font-size and margin within `section` and `article`
// contexts in Firefox 4+, Safari, and Chrome.
//
h1 {
font-size: 2em;
margin: 0.67em 0;
}
//
// Address styling not present in IE 8/9.
//
mark {
background: #ff0;
color: #000;
}
//
// Address inconsistent and variable font size in all browsers.
//
small {
font-size: 80%;
}
//
// Prevent `sub` and `sup` affecting `line-height` in all browsers.
//
sub,
sup {
font-size: 75%;
line-height: 0;
position: relative;
vertical-align: baseline;
}
sup {
top: -0.5em;
}
sub {
bottom: -0.25em;
}
// Embedded content
// ==========================================================================
//
// Remove border when inside `a` element in IE 8/9/10.
//
img {
border: 0;
}
//
// Correct overflow not hidden in IE 9/10/11.
//
svg:not(:root) {
overflow: hidden;
}
// Grouping content
// ==========================================================================
//
// Address margin not present in IE 8/9 and Safari.
//
figure {
margin: 1em 40px;
}
//
// Address differences between Firefox and other browsers.
//
hr {
box-sizing: content-box;
height: 0;
}
//
// Contain overflow in all browsers.
//
pre {
overflow: auto;
}
//
// Address odd `em`-unit font size rendering in all browsers.
//
code,
kbd,
pre,
samp {
font-family: monospace, monospace;
font-size: 1em;
}
// Forms
// ==========================================================================
//
// Known limitation: by default, Chrome and Safari on OS X allow very limited
// styling of `select`, unless a `border` property is set.
//
//
// 1. Correct color not being inherited.
// Known issue: affects color of disabled elements.
// 2. Correct font properties not being inherited.
// 3. Address margins set differently in Firefox 4+, Safari, and Chrome.
//
button,
input,
optgroup,
select,
textarea {
color: inherit; // 1
font: inherit; // 2
margin: 0; // 3
}
//
// Address `overflow` set to `hidden` in IE 8/9/10/11.
//
button {
overflow: visible;
}
//
// Address inconsistent `text-transform` inheritance for `button` and `select`.
// All other form control elements do not inherit `text-transform` values.
// Correct `button` style inheritance in Firefox, IE 8/9/10/11, and Opera.
// Correct `select` style inheritance in Firefox.
//
button,
select {
text-transform: none;
}
//
// 1. Avoid the WebKit bug in Android 4.0.* where (2) destroys native `audio`
// and `video` controls.
// 2. Correct inability to style clickable `input` types in iOS.
// 3. Improve usability and consistency of cursor style between image-type
// `input` and others.
//
button,
html input[type="button"], // 1
input[type="reset"],
input[type="submit"] {
-webkit-appearance: button; // 2
cursor: pointer; // 3
}
//
// Re-set default cursor for disabled elements.
//
button[disabled],
html input[disabled] {
cursor: default;
}
//
// Remove inner padding and border in Firefox 4+.
//
button::-moz-focus-inner,
input::-moz-focus-inner {
border: 0;
padding: 0;
}
//
// Address Firefox 4+ setting `line-height` on `input` using `!important` in
// the UA stylesheet.
//
input {
line-height: normal;
}
//
// It's recommended that you don't attempt to style these elements.
// Firefox's implementation doesn't respect box-sizing, padding, or width.
//
// 1. Address box sizing set to `content-box` in IE 8/9/10.
// 2. Remove excess padding in IE 8/9/10.
//
input[type="checkbox"],
input[type="radio"] {
box-sizing: border-box; // 1
padding: 0; // 2
}
//
// Fix the cursor style for Chrome's increment/decrement buttons. For certain
// `font-size` values of the `input`, it causes the cursor style of the
// decrement button to change from `default` to `text`.
//
input[type="number"]::-webkit-inner-spin-button,
input[type="number"]::-webkit-outer-spin-button {
height: auto;
}
//
// 1. Address `appearance` set to `searchfield` in Safari and Chrome.
// 2. Address `box-sizing` set to `border-box` in Safari and Chrome.
//
input[type="search"] {
-webkit-appearance: textfield; // 1
box-sizing: content-box; //2
}
//
// Remove inner padding and search cancel button in Safari and Chrome on OS X.
// Safari (but not Chrome) clips the cancel button when the search input has
// padding (and `textfield` appearance).
//
input[type="search"]::-webkit-search-cancel-button,
input[type="search"]::-webkit-search-decoration {
-webkit-appearance: none;
}
//
// Define consistent border, margin, and padding.
//
fieldset {
border: 1px solid #c0c0c0;
margin: 0 2px;
padding: 0.35em 0.625em 0.75em;
}
//
// 1. Correct `color` not being inherited in IE 8/9/10/11.
// 2. Remove padding so people aren't caught out if they zero out fieldsets.
//
legend {
border: 0; // 1
padding: 0; // 2
}
//
// Remove default vertical scrollbar in IE 8/9/10/11.
//
textarea {
overflow: auto;
}
//
// Don't inherit the `font-weight` (applied by a rule above).
// NOTE: the default cannot safely be changed in Chrome and Safari on OS X.
//
optgroup {
font-weight: bold;
}
// Tables
// ==========================================================================
//
// Remove most spacing between table cells.
//
table {
border-collapse: collapse;
border-spacing: 0;
}
td,
th {
padding: 0;
}
| {
"pile_set_name": "Github"
} |
(** First step of a splitter refinement; indexed representation, and handle all rules with at most one nonterminal; leave a reflective goal *)
Require Import Coq.Strings.String.
Require Import Fiat.Common.List.ListFacts.
Require Import Fiat.ADTNotation.BuildADT Fiat.ADTNotation.BuildADTSig.
Require Import Fiat.ADT.ComputationalADT.
Require Import Fiat.ADTRefinement.
Require Import Fiat.ADTRefinement.BuildADTRefinements.HoneRepresentation.
Require Import Fiat.Parsers.ParserADTSpecification.
Require Import Fiat.Parsers.Refinement.IndexedAndAtMostOneNonTerminalReflective.
Require Import Fiat.Parsers.StringLike.Core.
Require Import Fiat.Parsers.ContextFreeGrammar.Precompute.
Require Import Fiat.Parsers.BaseTypes.
Require Import Fiat.Parsers.Splitters.RDPList.
Require Import Fiat.Parsers.ContextFreeGrammar.Carriers.
Require Import Fiat.Parsers.ContextFreeGrammar.PreNotations.
Local Open Scope string_scope.
(* TODO: find a better place for this *)
Instance match_item_Proper {Char A}
: Proper (eq ==> pointwise_relation _ eq ==> pointwise_relation _ eq ==> eq)
(fun (it : Core.item Char) T NT
=> match it return A with
| Core.Terminal t => T t
| Core.NonTerminal nt => NT nt
end).
Proof.
intros []; repeat intro; subst; auto.
Qed.
Instance option_rect_Proper {A P}
: Proper (pointwise_relation _ eq ==> eq ==> eq ==> eq)
(@option_rect A (fun _ => P)).
Proof.
intros ?????? []; repeat intro; subst; simpl; auto.
Qed.
Module opt0.
Definition fst {A B} := Eval compute in @fst A B.
Definition snd {A B} := Eval compute in @snd A B.
End opt0.
Global Arguments opt0.fst : simpl never.
Global Arguments opt0.snd : simpl never.
Module opt2.
Definition id {A} := Eval compute in @id A.
Definition fold_right {A B} := Eval compute in @List.fold_right A B.
Definition uniquize_sig {A} : { v : _ | v = @Operations.List.uniquize A }.
Proof.
eexists.
cbv [Operations.List.uniquize Equality.list_bin orb].
change @List.fold_right with @fold_right.
reflexivity.
Defined.
Definition uniquize {A} := Eval cbv [proj1_sig uniquize_sig] in proj1_sig (@uniquize_sig A).
Definition ret_cases_BoolDecR := Eval compute in ret_cases_BoolDecR.
End opt2.
Module opt3.
Definition fold_right {A B} := Eval compute in @List.fold_right A B.
End opt3.
Module opt.
Definition map {A B} := Eval compute in @List.map A B.
Definition flat_map {A B} := Eval compute in @List.flat_map A B.
Definition up_to := Eval compute in @Operations.List.up_to.
Definition length {A} := Eval compute in @List.length A.
Definition nth {A} := Eval compute in @List.nth A.
Definition id {A} := Eval compute in @id A.
Definition combine {A B} := Eval compute in @List.combine A B.
Definition string_beq := Eval compute in Equality.string_beq.
Definition first_index_default {A} := Eval compute in @Operations.List.first_index_default A.
Definition list_bin_eq := Eval compute in list_bin_eq.
Definition filter_out_eq := Eval compute in filter_out_eq.
Definition find {A} := Eval compute in @List.find A.
Definition leb := Eval compute in Compare_dec.leb.
Definition minus := Eval compute in minus.
Definition drop {A} := Eval compute in @Operations.List.drop A.
Definition andb := Eval compute in andb.
Definition nat_rect {A} := Eval compute in @nat_rect A.
Definition option_rect {A} := Eval compute in @option_rect A.
Definition has_only_terminals {Char} := Eval compute in @has_only_terminals Char.
Definition sumbool_of_bool := Eval compute in Sumbool.sumbool_of_bool.
Local Declare Reduction red_fp := cbv [FromAbstractInterpretationDefinitions.fixedpoint_by_abstract_interpretation Fix.aggregate_state Definitions.prestate Definitions.lattice_data Definitions.state FromAbstractInterpretation.fold_grammar initial_nonterminals_data rdp_list_predata rdp_list_initial_nonterminals_data pregrammar_productions FromAbstractInterpretationDefinitions.fold_constraints FromAbstractInterpretationDefinitions.fold_productions' FromAbstractInterpretationDefinitions.fold_production' FromAbstractInterpretationDefinitions.fold_item' of_nonterminal rdp_list_of_nonterminal default_of_nonterminal Lookup_idx FromAbstractInterpretationDefinitions.fold_constraints_Proper FromAbstractInterpretationDefinitions.fold_constraints_ext FromAbstractInterpretationDefinitions.fold_productions'_ext FromAbstractInterpretationDefinitions.fold_production'_ext FromAbstractInterpretationDefinitions.fold_item'_ext FromAbstractInterpretationDefinitions.fold_constraints_extR FromAbstractInterpretationDefinitions.fold_productions'_extR FromAbstractInterpretationDefinitions.fold_production'_extR FromAbstractInterpretationDefinitions.fold_item'_extR Fix.lookup_state FromAbstractInterpretationDefinitions.fold_constraints_Proper_state_beq FromAbstractInterpretationDefinitions.fold_constraints_ProperR].
Definition lookup_state' {G} compiled_productions :=
Eval red_fp in
@Fix.lookup_state
(@FromAbstractInterpretationDefinitions.fixedpoint_by_abstract_interpretation
Ascii.ascii nat FixedLengthLemmas.length_result_lattice
(@FixedLengthLemmas.length_result_aidata Ascii.ascii) G
compiled_productions).
Definition fold_grammar' pp unix
:= Eval red_fp in
let G := {| pregrammar_productions := pp ; nonterminals_unique := unix |} in
(@FromAbstractInterpretation.fold_grammar
Ascii.ascii nat
FixedLengthLemmas.length_result_lattice
(@FixedLengthLemmas.length_result_aidata Ascii.ascii) G).
Definition lookup_state : FMapPositive.PositiveMap.tree (Definitions.lattice_for nat) -> nat -> Definitions.lattice_for nat.
Proof.
let term := match (eval cbv [lookup_state'] in @lookup_state') with
| fun _ _ => ?term => term
end in
exact term.
Defined.
Definition fold_grammar (pp : list (string * Core.productions Ascii.ascii)) : list (opt.productions (Definitions.lattice_for nat)) -> FMapPositive.PositiveMap.t (Definitions.lattice_for nat).
Proof.
let term := match (eval cbv [fold_grammar'] in (@fold_grammar' pp)) with
| fun _ => ?term => term
end in
exact term.
Defined.
Definition collapse_length_result := Eval compute in FixedLengthLemmas.collapse_length_result.
Definition expanded_fallback_list'_body_sig {G} : { b : _ | b = @expanded_fallback_list'_body G }.
Proof.
eexists.
cbv [id
expanded_fallback_list'_body to_production_opt production_carrier_valid production_carrierT rdp_list_predata default_production_carrierT default_nonterminal_carrierT rdp_list_production_carrier_valid default_production_carrier_valid Lookup_idx FixedLengthLemmas.length_of_any FixedLengthLemmas.length_of_any nonterminals_listT rdp_list_nonterminals_listT nonterminals_length initial_nonterminals_data rdp_list_initial_nonterminals_data is_valid_nonterminal of_nonterminal remove_nonterminal ContextFreeGrammar.Core.Lookup rdp_list_of_nonterminal default_of_nonterminal grammar_of_pregrammar rdp_list_remove_nonterminal Lookup_string list_to_productions rdp_list_is_valid_nonterminal If_Then_Else
opt.compile_productions opt.compile_production opt.compile_item opt.compile_nonterminal opt.nonterminal_names FromAbstractInterpretationDefinitions.compile_item_data_of_abstract_interpretation opt.compile_grammar
pregrammar_nonterminals
opt.on_terminal FromAbstractInterpretationDefinitions.on_terminal FixedLengthLemmas.length_result_aidata].
change (@Fix.lookup_state ?v) with (@lookup_state).
change (FromAbstractInterpretation.fold_grammar G) with (fold_grammar (pregrammar_productions G)).
change @fst with @opt.fst.
change @snd with @opt.snd.
change @List.map with @map.
change @List.length with @length.
change Equality.string_beq with string_beq.
change @Operations.List.first_index_default with @first_index_default.
change RDPList.list_bin_eq with list_bin_eq.
change RDPList.filter_out_eq with filter_out_eq.
change @Operations.List.up_to with @up_to.
change @List.find with @find.
change @List.nth with @nth.
change Compare_dec.leb with leb.
change Datatypes.andb with andb.
change (?x - ?y) with (minus x y).
change @Operations.List.drop with @drop.
change @Datatypes.nat_rect with @nat_rect.
change @Datatypes.option_rect with @option_rect.
change @Sumbool.sumbool_of_bool with sumbool_of_bool.
change @FixedLengthLemmas.collapse_length_result with collapse_length_result.
change @IndexedAndAtMostOneNonTerminalReflective.has_only_terminals with @has_only_terminals.
reflexivity.
Defined.
Definition expanded_fallback_list'_body' (ps : list (String.string * Core.productions Ascii.ascii)) : default_production_carrierT -> ret_cases.
Proof.
let term := constr:(fun H : NoDupR _ _ => proj1_sig (@expanded_fallback_list'_body_sig {| pregrammar_productions := ps |})) in
let term := (eval cbv [proj1_sig expanded_fallback_list'_body_sig pregrammar_productions] in term) in
let term := match term with
| (fun _ => ?term) => term
end in
exact term.
Defined.
Definition expanded_fallback_list'_body (ps : list (String.string * Core.productions Ascii.ascii))
: FMapPositive.PositiveMap.t (Definitions.lattice_for nat) -> default_production_carrierT -> ret_cases.
Proof.
let term := (eval cbv [expanded_fallback_list'_body'] in (@expanded_fallback_list'_body' ps)) in
let fg := lazymatch term with context[fold_grammar ps ?v] => constr:(fold_grammar ps v) end in
let term := match (eval pattern fg in term) with
| ?term _ => term
end in
exact term.
Defined.
Definition ret_cases_to_comp_sig {HSLM G} : { r : _ | r = @ret_cases_to_comp HSLM G }.
Proof.
eexists.
cbv [ret_cases_to_comp to_production rdp_list_predata rdp_list_to_production default_to_production Lookup_idx].
change @Operations.List.drop with @drop.
change @snd with @opt.snd.
change @fst with @opt.fst.
change @List.nth with @nth.
change (?x - ?y) with (minus x y).
change @List.length with @length.
change @List.map with @map.
reflexivity.
Defined.
Definition ret_cases_to_comp {HSLM G}
:= Eval cbv [proj1_sig ret_cases_to_comp_sig] in proj1_sig (@ret_cases_to_comp_sig HSLM G).
Definition premap_expanded_fallback_list'_body (G : pregrammar' Ascii.ascii) : list (nat * (nat * nat)).
Proof.
let term := match (eval cbv [rindexed_spec rindexed_spec' default_production_carrierT default_nonterminal_carrierT expanded_fallback_list' forall_reachable_productions_if_eq] in (fun HSLM HSL => @rindexed_spec HSLM HSL G)) with
| context[List.map (fun x => (x, ?f x)) ?ls] => ls
end in
exact term.
Defined.
Definition Let_In {A B} (x : A) (f : A -> B) := let y := x in f y.
Definition map_expanded_fallback_list'_body (G : pregrammar' Ascii.ascii) fg : list ((nat * (nat * nat)) * ret_cases)
:= map (fun x => (x, @expanded_fallback_list'_body G fg x)) (premap_expanded_fallback_list'_body G).
Local Hint Immediate FromAbstractInterpretationDefinitions.compile_item_data_of_abstract_interpretation : typeclass_instances.
Definition expanded_fallback_list_body (G : pregrammar' Ascii.ascii) : list ((nat * (nat * nat)) * ret_cases)
:= Let_In (opt.compile_grammar G)
(fun compiled_productions
=> Let_In
(fold_grammar G compiled_productions)
(map_expanded_fallback_list'_body G)).
End opt.
Class opt_of {T} (term : T) := mk_opt_of : T.
(*Class do_idtac {T} (x : T) := dummy_idtac : True.
Local Hint Extern 0 (do_idtac ?msg) => idtac "<infomsg>" msg "</infomsg>"; exact I : typeclass_instances.
Local Ltac cidtac term := constr:(_ : do_idtac term).
Local Ltac opt_of_context term :=
match (eval cbv beta iota zeta in term) with
| context G[map snd (opt.id ?ls)]
=> let G' := context G[opt.id (opt.map opt.snd ls)] in
opt_of_context G'
| context G[List.length (opt.id ?ls)]
=> let G' := context G[opt.id (opt.length ls)] in
opt_of_context G'
| context G[minus (opt.id ?x) (opt.id ?y)]
=> let G' := context G[opt.id (opt.minus x y)] in
opt_of_context G'
| context G[Operations.List.up_to (opt.id ?n)]
=> let G' := context G[opt.id (opt.up_to n)] in
opt_of_context G'
| context G[S (opt.id ?n)]
=> let G' := context G[opt.id (S n)] in
opt_of_context G'
| context G[ret (opt.id ?n)]
=> let G' := context G[opt.id (ret n)] in
opt_of_context G'
| context G[pair (opt.id ?x) (opt.id ?y)]
=> let G' := context G[opt.id (pair x y)] in
opt_of_context G'
| context G[cons (opt.id ?x) (opt.id ?y)]
=> let G' := context G[opt.id (cons x y)] in
opt_of_context G'
| context G[Operations.List.uniquize (opt.id ?beq) (opt.id ?ls)]
=> let G' := context G[opt.id (opt.uniquize beq ls)] in
opt_of_context G'
| context G[nth (opt.id ?n) (opt.id ?ls) (opt.id ?d)]
=> let G' := context G[opt.id (opt.nth n ls d)] in
opt_of_context G'
| context G[List.combine (opt.id ?a) (opt.id ?b)]
=> let G' := context G[opt.id (opt.combine a b)] in
opt_of_context G'
| context G[map (opt.id ?f) (opt.id ?ls)]
=> let G' := context G[opt.id (opt.map f ls)] in
let G' := (eval cbv beta in G') in
opt_of_context G'
| context G[flat_map (opt.id ?f) (opt.id ?ls)]
=> let G' := context G[opt.id (opt.flat_map f ls)] in
let G' := (eval cbv beta in G') in
opt_of_context G'
| context G[opt.flat_map (opt.id ?f) ?ls]
=> let G' := context G[opt.flat_map f ls] in
opt_of_context G'
| context G[opt.map (opt.id ?f) ?ls]
=> let G' := context G[opt.map f ls] in
opt_of_context G'
| context G[fun x => opt.id (@?f x)]
=> let G' := context G[opt.id f] in
opt_of_context G'
| context G[flat_map ?f (opt.id ?ls)]
=> let f' := constr:(fun x => _ : opt_of (f (opt.id x))) in
let G' := context G[opt.id (opt.flat_map f' ls)] in
let G' := (eval cbv beta in G') in
opt_of_context G'
| context G[map ?f (opt.id ?ls)]
=> let f' := constr:(fun x => _ : opt_of (f (opt.id x))) in
let G' := context G[opt.id (opt.map f' ls)] in
let G' := (eval cbv beta in G') in
opt_of_context G'
| context G[fold_right ?f (opt.id ?d) (opt.id ?ls)]
=> let f' := constr:(fun x => _ : opt_of (f (opt.id x))) in
let G' := context G[opt.id (opt.fold_right f' d ls)] in
let G' := (eval cbv beta in G') in
opt_of_context G'
| ?term' => term'
end.*)
Class constr_eq {A} {B} (x : A) (y : B) := make_constr_eq : True.
Local Hint Extern 0 (constr_eq ?x ?y) => constr_eq x y; exact I : typeclass_instances.
Local Ltac opt_of term :=
let retv :=
lazymatch (eval cbv beta iota zeta in term) with
| List.map snd (opt.id ?ls)
=> constr:(opt.id (opt.map opt.snd ls))
| List.length (opt.id ?ls)
=> constr:(opt.id (opt.length ls))
| minus (opt.id ?x) (opt.id ?y)
=> constr:(opt.id (opt.minus x y))
| Operations.List.up_to (opt.id ?n)
=> constr:(opt.id (opt.up_to n))
| @fst ?A ?B (opt.id ?n)
=> constr:(opt.id (@opt.fst A B n))
| @snd ?A ?B (opt.id ?n)
=> constr:(opt.id (@opt.snd A B n))
| S (opt.id ?n)
=> constr:(opt.id (S n))
| Core.ret (opt.id ?n)
=> constr:(opt.id (Core.ret n))
| pair (opt.id ?x) (opt.id ?y)
=> constr:(opt.id (pair x y))
| cons (opt.id ?x) (opt.id ?y)
=> constr:(opt.id (cons x y))
| Operations.List.uniquize (opt2.id ?beq) (opt.id ?ls)
=> constr:(opt2.id (opt2.uniquize beq ls))
| Operations.List.uniquize (opt.id ?beq) (opt.id ?ls)
=> constr:(opt2.id (opt2.uniquize beq ls))
| List.nth (opt.id ?n) (opt.id ?ls) (opt.id ?d)
=> constr:(opt.id (opt.nth n ls d))
| List.combine (opt.id ?a) (opt.id ?b)
=> constr:(opt.id (opt.combine a b))
| List.map (fun x : ?T => opt.id ?f) (opt.id ?ls)
=> constr:(opt.id (opt.map (fun x : T => f) ls))
| List.map (opt.id ?f) (opt.id ?ls)
=> constr:(opt.id (opt.map f ls))
| List.flat_map (opt.id ?f) (opt.id ?ls)
=> constr:(opt.id (opt.flat_map f ls))
| opt.flat_map (opt.id ?f) ?ls
=> constr:(opt.flat_map f ls)
| opt.map (opt.id ?f) ?ls
=> constr:(opt.map f ls)
| fun x => opt.id (@?f x)
=> constr:(opt.id f)
| List.flat_map ?f (opt.id ?ls)
=> let f' := constr:(fun x => _ : opt_of (f (opt.id x))) in
let G' := constr:(opt.id (opt.flat_map f' ls)) in
(eval cbv beta in G')
| @List.map ?A ?B ?f (opt.id ?ls)
=> let f' := constr:(fun x => _ : opt_of (f (opt.id x))) in
let G' := constr:(opt.id (@opt.map A B f' ls)) in
let G' := (eval cbv beta in G') in
G'
| List.fold_right orb false (opt.id ?ls)
=> constr:(opt2.fold_right orb false ls)
| List.fold_right orb false (opt2.id ?ls)
=> constr:(opt2.fold_right orb false ls)
| List.fold_right ?f (opt.id ?d) (opt.id ?ls)
=> let f' := constr:(fun x => _ : opt_of (f (opt2.id x))) in
let G' := constr:(opt2.id (opt2.fold_right f' d ls)) in
(eval cbv beta in G')
| List.fold_right ?f (opt.id ?d) (opt2.id ?ls)
=> let f' := constr:(fun x => _ : opt_of (f (opt2.id x))) in
let G' := constr:(opt2.id (opt2.fold_right f' d ls)) in
(eval cbv beta in G')
| opt.id ?f ?x
=> constr:(opt.id (f x))
| opt2.id ?f ?x
=> constr:(opt2.id (f x))
| ?f (opt.id ?x)
=> let f' := opt_of f in
let term' := (eval cbv beta iota zeta in (f' (opt.id x))) in
match constr:(Set) with
| _
=> let dummy := constr:(_ : constr_eq term' (f (opt.id x))) in
term'
| _
=> opt_of term'
end
| ?f (opt2.id ?x)
=> let f' := opt_of f in
let term' := (eval cbv beta iota zeta in (f' (opt2.id x))) in
match constr:(Set) with
| _
=> let dummy := constr:(_ : constr_eq term' (f (opt2.id x))) in
term'
| _
=> opt_of term'
end
| ?f ?x => let f' := opt_of f in
let x' := opt_of x in
let term' := (eval cbv beta iota zeta in (f' x')) in
lazymatch x' with
| opt.id ?x''
=> opt_of term'
| _ => term'
end(*
match term' with
| f x => term'
| ?term'' => opt_of term''
end*)
| (fun x : ?T => ?f)
=> (eval cbv beta iota zeta in (fun x : T => _ : opt_of f))
| if ?b then ?x else ?y
=> let b' := opt_of b in
let x' := opt_of x in
let y' := opt_of y in
constr:(if b' then x' else y')
| ?term'
=> term'
end in
(*let term' := (eval cbv beta iota zeta in term) in
let dummy := match constr:(Set) with
| _ => let dummy := constr:(_ : constr_eq retv term') in constr:(Set)
| _ => cidtac retv
end in*)
retv.
Local Hint Extern 0 (opt_of ?term) => (let x := opt_of term in exact x) : typeclass_instances.
Section IndexedImpl_opt.
Context {HSLM : StringLikeMin Ascii.ascii} {HSL : StringLike Ascii.ascii} {HSI : StringIso Ascii.ascii}
{HSLP : StringLikeProperties Ascii.ascii} {HSIP : StringIsoProperties Ascii.ascii}
{HSEP : StringEqProperties Ascii.ascii}.
Context (G : pregrammar' Ascii.ascii).
Let predata := @rdp_list_predata _ G.
Local Existing Instance predata.
Definition opt_rindexed_spec_sig : { a : ADT (string_rep Ascii.ascii String default_production_carrierT) | a = rindexed_spec G }.
Proof.
eexists.
cbv [rindexed_spec rindexed_spec' default_production_carrierT default_nonterminal_carrierT expanded_fallback_list' forall_reachable_productions_if_eq].
simpl @production_carrierT.
cbv [default_production_carrierT default_nonterminal_carrierT].
lazymatch goal with
| [ |- context g[List.map (fun x => (x, expanded_fallback_list'_body x))?ls] ]
=> idtac;
let G' := context g[opt.id (opt.expanded_fallback_list_body G)] in
change G'
end.
change (@nil ?A) with (opt.id (@nil A)).
do 2 (idtac;
let G := match goal with |- ?G => G end in
let G' := opt_of G in
change G').
unfold ret_cases_to_comp.
reflexivity.
Defined.
Definition opt_rindexed_spec'
:= Eval cbv [proj1_sig opt_rindexed_spec_sig] in proj1_sig opt_rindexed_spec_sig.
Lemma opt_rindexed_spec'_correct
: opt_rindexed_spec' = rindexed_spec G.
Proof.
exact (proj2_sig opt_rindexed_spec_sig).
Qed.
Lemma FirstStep'
: refineADT (string_spec G HSL) opt_rindexed_spec'.
Proof.
rewrite opt_rindexed_spec'_correct.
apply FirstStep_preopt.
Qed.
Local Arguments opt.leb !_ !_.
Local Arguments opt.minus !_ !_.
Definition FirstStep0_sig
: { sp : _ & refineADT (string_spec G HSL) sp }.
Proof.
eexists.
eapply transitivityT; [ apply FirstStep' | ].
unfold opt_rindexed_spec'.
hone method "splits".
{
setoid_rewrite refineEquiv_pick_eq'.
simplify with monad laws.
simpl; subst_body; subst.
eapply refine_under_bind_both; [ | solve [ intros; higher_order_reflexivity ] ].
match goal with
| [ |- ?R ?x ?y ]
=> cut (x = y);
[ let H := fresh in
generalize y;
let y' := fresh in
intros y' H; subst y'; reflexivity
| ]
end.
etransitivity.
{ unfold opt.id, opt2.id.
repeat match goal with
| [ |- opt.length (opt.nth ?n ?ls nil) = _ ]
=> etransitivity;
[ symmetry;
eexact (List.map_nth opt.length ls nil n
: opt.nth _ (opt.map _ _) 0 = opt.length (opt.nth _ _ _))
| ]
| [ |- opt.map (fun x : ?T => opt.minus (opt.length (opt.snd x)) ?v) (pregrammar_productions G) = _ ]
=> transitivity (opt.map (fun x => opt.minus x v) (opt.map opt.length (opt.map opt.snd (pregrammar_productions G))));
[ change @opt.map with @List.map;
rewrite !List.map_map;
reflexivity
| reflexivity ]
| [ |- context[opt.length (opt.map ?f ?ls)] ]
=> replace (opt.length (opt.map f ls)) with (opt.length ls)
by (change @opt.length with @List.length;
change @opt.map with @List.map;
rewrite List.map_length;
reflexivity)
| [ |- context[opt.length (opt.up_to ?n)] ]
=> replace (opt.length (opt.up_to n)) with n
by (change @opt.length with @List.length;
change @opt.up_to with @Operations.List.up_to;
rewrite length_up_to; reflexivity)
| [ |- opt.map opt.length (opt.nth ?n ?ls nil) = _ ]
=> etransitivity;
[ symmetry;
eexact (List.map_nth (opt.map opt.length) ls nil n
: opt.nth _ (opt.map _ _) nil = opt.map opt.length (opt.nth _ _ _))
| ]
(*| [ |- opt.id _ = _ ] => apply f_equal*)
| [ |- ret _ = _ ] => apply f_equal
| [ |- fst _ = _ ] => apply f_equal
| [ |- snd _ = _ ] => apply f_equal
| [ |- opt.fst _ = _ ] => apply f_equal
| [ |- opt.snd _ = _ ] => apply f_equal
| [ |- S _ = _ ] => apply f_equal
| [ |- opt.collapse_length_result _ = _ ] => apply f_equal
| [ |- ret_length_less _ = _ ] => apply f_equal
| [ |- ret_nat _ = _ ] => apply f_equal
| [ |- ret_nat _ = _ ] => eapply (f_equal ret_nat)
| [ |- ret_pick _ = _ ] => eapply (f_equal ret_pick)
| [ |- opt.has_only_terminals _ = _ ] => apply f_equal
| [ |- opt.up_to _ = _ ] => apply f_equal
| [ |- cons _ _ = _ ] => apply f_equal2
| [ |- pair _ _ = _ ] => apply f_equal2
| [ |- cons _ _ = _ ] => eapply (f_equal2 cons)
| [ |- pair _ _ = _ ] => eapply (f_equal2 pair)
| [ |- orb _ _ = _ ] => apply f_equal2
| [ |- andb _ _ = _ ] => apply f_equal2
| [ |- opt.andb _ _ = _ ] => apply f_equal2
| [ |- opt.drop _ _ = _ ] => apply f_equal2
| [ |- opt.leb _ _ = _ ] => apply f_equal2
| [ |- opt.minus _ _ = _ ] => apply f_equal2
| [ |- opt.combine _ _ = _ ] => apply f_equal2
| [ |- opt2.ret_cases_BoolDecR _ _ = _ ] => apply f_equal2
| [ |- EqNat.beq_nat _ _ = _ ] => apply f_equal2
| [ |- opt.nth _ _ _ = _ ] => apply f_equal3
| [ |- 0 = _ ] => reflexivity
| [ |- opt.length (pregrammar_productions G) = _ ] => reflexivity
| [ |- opt.length ?x = _ ] => is_var x; reflexivity
| [ |- opt.map opt.length ?x = _ ] => is_var x; reflexivity
| [ |- nil = _ ] => reflexivity
| [ |- false = _ ] => reflexivity
| [ |- ret_dummy = _ ] => reflexivity
| [ |- invalid = _ ] => reflexivity
| [ |- ?x = _ ] => is_var x; reflexivity
| [ |- opt.map opt.snd (pregrammar_productions G) = _ ] => reflexivity
| [ |- opt.map opt.length (opt.map opt.snd (pregrammar_productions G)) = _ ] => reflexivity
| [ |- opt2.uniquize opt2.ret_cases_BoolDecR _ = _ ] => apply f_equal
| [ |- (If _ Then _ Else _) = _ ] => apply (f_equal3 If_Then_Else)
| [ |- (match _ with true => _ | false => _ end) = _ ]
=> apply (f_equal3 (fun (b : bool) A B => if b then A else B))
| [ |- match ?v with nil => _ | cons x xs => _ end = _ :> ?P ]
=> let T := type of v in
let A := match (eval hnf in T) with list ?A => A end in
etransitivity;
[ refine (@ListMorphisms.list_caset_Proper' A P _ _ _ _ _ _ _ _ _
: _ = match _ with nil => _ | cons x xs => _ end);
[ | intros ?? | ]
| reflexivity ]
| [ |- @opt2.fold_right ?A ?B _ _ _ = _ ]
=> refine (((_ : Proper (pointwise_relation _ _ ==> _ ==> _ ==> eq) (@List.fold_right A B)) : Proper _ (@opt2.fold_right A B)) _ _ _ _ _ _ _ _ _);
[ intros ?? | | ]
| [ |- @opt.map ?A ?B ?f ?v = _ ]
=> not constr_eq v (pregrammar_productions G);
let f' := head f in
not constr_eq f' (@opt.length);
refine (((_ : Proper (pointwise_relation _ _ ==> _ ==> eq) (@List.map A B)) : Proper _ (@opt.map A B)) _ _ _ _ _ _);
[ intro | ]
| [ |- @opt.flat_map ?A ?B _ ?v = _ ]
=> not constr_eq v (pregrammar_productions G);
refine (((_ : Proper (pointwise_relation _ _ ==> _ ==> eq) (@List.flat_map A B)) : Proper _ (@opt.flat_map A B)) _ _ _ _ _ _);
[ intro | ]
| [ |- match ?v with Core.Terminal t => _ | Core.NonTerminal nt => _ end = _ :> ?P ]
=> apply match_item_Proper; [ | intro | intro ]
| [ |- opt.option_rect _ _ _ _ = _ ]
=> eapply (option_rect_Proper : Proper _ (opt.option_rect _));
[ intro | | ]
| _ => progress cbv beta
end.
reflexivity.
reflexivity. }
change orb with Common.opt2.orb.
let d := match goal with d : (nat * (nat * nat))%type |- _ => d end in
change (fst d) with (Common.opt2.fst d);
change (snd d) with (Common.opt2.snd d);
change (fst (Common.opt2.snd d)) with (Common.opt2.fst (Common.opt2.snd d));
change (snd (Common.opt2.snd d)) with (Common.opt2.snd (Common.opt2.snd d)).
change EqNat.beq_nat with Common.opt2.beq_nat.
change andb with Common.opt2.andb.
reflexivity.
}
cbv beta.
apply reflexivityT.
Defined.
Definition opt_rindexed_spec0
:= Eval cbv [projT1 FirstStep0_sig] in projT1 FirstStep0_sig.
Lemma FirstStep0
: refineADT (string_spec G HSL) opt_rindexed_spec0.
Proof.
apply (projT2 FirstStep0_sig).
Qed.
Definition opt_rindexed_spec_method_default : String -> nat * (nat * nat) -> nat -> nat -> Comp (String * list nat).
Proof.
let c := (eval cbv [opt_rindexed_spec0] in opt_rindexed_spec0) in
let c := lazymatch c with
| context[fun r_n d d0 d1 => Bind (opt2.fold_right (@?f r_n d d0 d1) ?init ?ls) (fun a => @?retv r_n d d0 d1 a)]
=> (eval cbv beta in (fun r_n d d0 d1 => Bind (opt2.fold_right (f r_n d d0 d1) init ls) (fun a => retv r_n d d0 d1 a)))
end in
exact c.
Defined.
Section gen.
Context c
(Href : forall str d d0 d1, refine (opt_rindexed_spec_method_default str d d0 d1) (c str d d0 d1)).
Definition FirstStep_sig_gen
: { sp : _ & refineADT (string_spec G HSL) sp }.
Proof.
eexists.
eapply transitivityT; [ apply FirstStep0 | ].
unfold opt_rindexed_spec0.
hone method "splits".
{
setoid_rewrite refineEquiv_pick_eq'.
simplify with monad laws.
simpl; subst_body; subst.
cbv [opt_rindexed_spec_method_default] in Href.
apply Href.
}
cbv beta.
apply reflexivityT.
Defined.
Definition opt_rindexed_spec_gen
:= Eval cbv [projT1 FirstStep_sig_gen] in projT1 FirstStep_sig_gen.
Lemma FirstStep_gen
: refineADT (string_spec G HSL) opt_rindexed_spec_gen.
Proof.
apply (projT2 FirstStep_sig_gen).
Qed.
End gen.
Definition FirstStep_sig
: { sp : _ & refineADT (string_spec G HSL) sp }
:= FirstStep_sig_gen _ (fun _ _ _ _ => reflexivity _).
Definition opt_rindexed_spec
:= Eval cbv [projT1 FirstStep_sig FirstStep_sig_gen opt_rindexed_spec_method_default] in projT1 FirstStep_sig.
Lemma FirstStep
: refineADT (string_spec G HSL) opt_rindexed_spec.
Proof.
apply (projT2 FirstStep_sig).
Qed.
End IndexedImpl_opt.
Declare Reduction opt_red_FirstStep := cbv [opt_rindexed_spec opt.map opt.flat_map opt.up_to opt.length opt.nth opt.id opt.combine opt.expanded_fallback_list'_body opt.minus opt.drop opt.string_beq opt.first_index_default opt.list_bin_eq opt.filter_out_eq opt.find opt.leb opt.andb opt.nat_rect opt.option_rect opt.has_only_terminals opt.sumbool_of_bool opt.collapse_length_result opt.fst opt.snd].
Ltac opt_red_FirstStep :=
cbv [opt_rindexed_spec opt.map opt.flat_map opt.up_to opt.length opt.nth opt.id opt.combine opt.expanded_fallback_list'_body opt.minus opt.drop opt.string_beq opt.first_index_default opt.list_bin_eq opt.filter_out_eq opt.find opt.leb opt.andb opt.nat_rect opt.option_rect opt.has_only_terminals opt.sumbool_of_bool opt.collapse_length_result opt.fst opt.snd].
Section tower.
Context {A B}
(proj : A -> ret_cases)
(r_o : list nat -> Comp B)
(retv : Comp B)
(test : A -> bool)
(test_true : A -> Comp (list nat)).
Fixpoint make_tower base (ls : list A) new_comp old_comp
:= match ls with
| nil => refine (x0 <- new_comp base; r_o x0) retv
-> refine (x0 <- old_comp base; r_o x0) retv
| cons x xs
=> match proj x with
| ret_pick _
=> forall part_retv,
(test x -> refine (test_true x) part_retv)
-> make_tower
base
xs
(fun new_comp' => new_comp (If test x Then part_retv Else new_comp'))
(fun old_comp' => old_comp (If test x Then test_true x Else old_comp'))
| _
=> make_tower
base
xs
(fun new_comp' => new_comp (If test x Then test_true x Else new_comp'))
(fun old_comp' => old_comp (If test x Then test_true x Else old_comp'))
end
end.
Lemma refine_opt2_fold_right' base ls new_comp old_comp
(H : forall base base',
refine base' base
-> refine (x0 <- new_comp base; r_o x0) retv
-> refine (x0 <- old_comp base'; r_o x0) retv)
: make_tower base ls new_comp old_comp.
Proof.
revert base new_comp old_comp H; induction ls as [|x xs IHxs]; simpl; intros.
{ eapply H; [ | eassumption ].
reflexivity. }
{ destruct (proj x); simpl; intros;
apply IHxs; clear IHxs; try intros ?? H';
apply H;
edestruct test; specialize_by (exact eq_refl); simpl;
try setoid_rewrite_hyp'; reflexivity. }
Qed.
Lemma refine_opt2_fold_right base ls
: make_tower base ls (fun x => x) (fun x => x).
Proof.
apply refine_opt2_fold_right'.
intros.
setoid_rewrite_hyp; reflexivity.
Qed.
Fixpoint make_tower_no_unif base (ls : list A) new_comp concl
:= match ls with
| nil => refine (x0 <- new_comp base; r_o x0) retv
-> concl
| cons x xs
=> match proj x with
| ret_pick _
=> forall part_retv,
(test x -> refine (test_true x) part_retv)
-> make_tower_no_unif
base
xs
(fun new_comp' => new_comp (If test x Then part_retv Else new_comp'))
concl
| _
=> make_tower_no_unif
base
xs
(fun new_comp' => new_comp (If test x Then test_true x Else new_comp'))
concl
end
end.
Lemma make_tower_const base ls v v'
(H : refine v' v)
: make_tower base ls (fun _ => v) (fun _ => v').
Proof.
induction ls as [|x xs IHxs]; simpl.
{ rewrite H; intro; assumption. }
{ destruct (proj x); intros; assumption. }
Qed.
Lemma make_tower_no_unif_const base ls v v'
(H : refine v' v)
: make_tower_no_unif base ls (fun _ => v) (refine (x0 <- v'; r_o x0) retv).
Proof.
induction ls as [|x xs IHxs]; simpl.
{ rewrite H; intro; assumption. }
{ destruct (proj x); intros; assumption. }
Qed.
Lemma refine_opt2_fold_right_no_unif base ls
: make_tower_no_unif base ls (fun x => x) (refine (x0 <- opt2.fold_right (fun x else_case => If test x Then test_true x Else else_case) base ls; r_o x0) retv).
Proof.
pose proof (refine_opt2_fold_right base ls) as H.
induction ls as [|x xs IHxs].
{ exact H. }
{ simpl in *.
repeat match goal with
| _ => assumption
| [ |- context[If test ?x Then _ Else _] ] => destruct (test x) eqn:?
| _ => progress simpl in *
| [ |- context[match ?e with _ => _ end] ] => destruct e eqn:?
| _ => apply make_tower_const; reflexivity
| _ => apply make_tower_no_unif_const; first [ reflexivity | assumption ]
| _ => progress intros
| _ => progress specialize_by (exact eq_refl)
| _ => progress specialize_by assumption
| _ => solve [ eauto with nocore ]
end. }
Qed.
End tower.
Section step_tower.
Context {G' : pregrammar' Ascii.ascii}
{HSLM : StringLikeMin Ascii.ascii}
{HSL : StringLike Ascii.ascii}
{HSLP : StringLikeProperties Ascii.ascii}
{HSEP : StringEqProperties Ascii.ascii}.
Let A := (nat * (nat * nat) * ret_cases)%type.
Let B := (@StringLike.String Ascii.ascii HSLM * list nat)%type.
Let proj : A -> ret_cases := @opt0.snd _ _.
Let pre_r_o : String -> list nat -> Comp B
:= fun str => (fun x0 : list nat => ret (str, x0)).
Context (pre_retv : String -> nat * (nat * nat) -> nat -> nat -> Comp (String * list nat)).
Let test_test_true : String -> nat * (nat * nat) -> nat -> nat
-> (A -> bool) * (A -> Comp (list nat)).
Proof.
intros r_o d d0 d1.
lazymatch (eval cbv [opt_rindexed_spec_method_default] in (opt_rindexed_spec_method_default G' r_o d d0 d1)) with
| context[opt2.fold_right
(fun a a0 => If @?test a Then @?test_true a Else a0)
?base
?ls]
=> exact (test, test_true)
end.
Defined.
Let pre_test : String -> nat * (nat * nat) -> nat -> nat -> A -> bool
:= fun r_o d d0 d1 => let '(test, test_true) := test_test_true r_o d d0 d1 in test.
Let pre_test_true : String -> nat * (nat * nat) -> nat -> nat -> A -> Comp (list nat)
:= fun r_o d d0 d1 => let '(test, test_true) := test_test_true r_o d d0 d1 in test_true.
Lemma FirstStep_splits
(H : forall r_o d d0 d1,
refine (x0 <- opt2.fold_right (fun x else_case => If pre_test r_o d d0 d1 x Then pre_test_true r_o d d0 d1 x Else else_case) (ret nil) (opt.expanded_fallback_list_body G'); ret (r_o, x0)) (pre_retv r_o d d0 d1))
res
(Hres : refineADT (opt_rindexed_spec_gen pre_retv) (ComputationalADT.LiftcADT res))
: FullySharpened (@string_spec G' G' eq_refl HSLM HSL).
Proof.
eexists.
eapply transitivityT; [ | exact Hres ].
eapply transitivityT; [ apply (@FirstStep _ _ _ _) | ].
cbv [opt_rindexed_spec opt_rindexed_spec_gen].
hone method "splits".
2:apply reflexivityT.
{
setoid_rewrite General.refineEquiv_pick_eq'.
simplify with monad laws.
simpl; subst_body; subst.
move H at bottom.
cbv beta in H.
apply H.
}
Defined.
End step_tower.
| {
"pile_set_name": "Github"
} |
// SuperTux
// Copyright (C) 2015 Ingo Ruhnke <[email protected]>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
#ifndef HEADER_SUPERTUX_SUPERTUX_SECTOR_PARSER_HPP
#define HEADER_SUPERTUX_SUPERTUX_SECTOR_PARSER_HPP
#include <memory>
#include <string>
class GameObject;
class Level;
class ReaderMapping;
class Sector;
class SectorParser final
{
public:
static std::unique_ptr<Sector> from_reader(Level& level, const ReaderMapping& sector, bool editable);
static std::unique_ptr<Sector> from_reader_old_format(Level& level, const ReaderMapping& sector, bool editable);
static std::unique_ptr<Sector> from_nothing(Level& level);
private:
SectorParser(Sector& sector, bool editable);
void parse_old_format(const ReaderMapping& reader);
void parse(const ReaderMapping& sector);
void create_sector();
std::unique_ptr<GameObject> parse_object(const std::string& name_, const ReaderMapping& reader);
private:
Sector& m_sector;
bool m_editable;
private:
SectorParser(const SectorParser&) = delete;
SectorParser& operator=(const SectorParser&) = delete;
};
#endif
/* EOF */
| {
"pile_set_name": "Github"
} |
#include "float_math.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include "planetri.h"
/*----------------------------------------------------------------------
Copyright (c) 2004 Open Dynamics Framework Group
www.physicstools.org
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided
that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of conditions
and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
Neither the name of the Open Dynamics Framework Group nor the names of its contributors may
be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 'AS IS' AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE INTEL OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-----------------------------------------------------------------------*/
// http://codesuppository.blogspot.com
//
// mailto: [email protected]
//
// http://www.amillionpixels.us
//
static inline float DistToPt(const float *p,const float *plane)
{
float x = p[0];
float y = p[1];
float z = p[2];
float d = x*plane[0] + y*plane[1] + z*plane[2] + plane[3];
return d;
}
static PlaneTriResult getSidePlane(const float *p,const float *plane,float epsilon)
{
float d = DistToPt(p,plane);
if ( (d+epsilon) > 0 )
return PTR_FRONT; // it is 'in front' within the provided epsilon value.
return PTR_BACK;
}
static void add(const float *p,float *dest,unsigned int tstride,unsigned int &pcount)
{
char *d = (char *) dest;
d = d + pcount*tstride;
dest = (float *) d;
dest[0] = p[0];
dest[1] = p[1];
dest[2] = p[2];
pcount++;
assert( pcount <= 4 );
}
// assumes that the points are on opposite sides of the plane!
static void intersect(const float *p1,const float *p2,float *split,const float *plane)
{
float dp1 = DistToPt(p1,plane);
float dir[3];
dir[0] = p2[0] - p1[0];
dir[1] = p2[1] - p1[1];
dir[2] = p2[2] - p1[2];
float dot1 = dir[0]*plane[0] + dir[1]*plane[1] + dir[2]*plane[2];
float dot2 = dp1 - plane[3];
float t = -(plane[3] + dot2 ) / dot1;
split[0] = (dir[0]*t)+p1[0];
split[1] = (dir[1]*t)+p1[1];
split[2] = (dir[2]*t)+p1[2];
}
PlaneTriResult planeTriIntersection(const float *plane, // the plane equation in Ax+By+Cz+D format
const float *triangle, // the source triangle.
unsigned int tstride, // stride in bytes of the input and output triangles
float epsilon, // the co-planer epsilon value.
float *front, // the triangle in front of the
unsigned int &fcount, // number of vertices in the 'front' triangle
float *back, // the triangle in back of the plane
unsigned int &bcount) // the number of vertices in the 'back' triangle.
{
fcount = 0;
bcount = 0;
const char *tsource = (const char *) triangle;
// get the three vertices of the triangle.
const float *p1 = (const float *) (tsource);
const float *p2 = (const float *) (tsource+tstride);
const float *p3 = (const float *) (tsource+tstride*2);
PlaneTriResult r1 = getSidePlane(p1,plane,epsilon); // compute the side of the plane each vertex is on
PlaneTriResult r2 = getSidePlane(p2,plane,epsilon);
PlaneTriResult r3 = getSidePlane(p3,plane,epsilon);
if ( r1 == r2 && r1 == r3 ) // if all three vertices are on the same side of the plane.
{
if ( r1 == PTR_FRONT ) // if all three are in front of the plane, then copy to the 'front' output triangle.
{
add(p1,front,tstride,fcount);
add(p2,front,tstride,fcount);
add(p3,front,tstride,fcount);
}
else
{
add(p1,back,tstride,bcount); // if all three are in 'abck' then copy to the 'back' output triangle.
add(p2,back,tstride,bcount);
add(p3,back,tstride,bcount);
}
return r1; // if all three points are on the same side of the plane return result
}
// ok.. we need to split the triangle at the plane.
// First test ray segment P1 to P2
if ( r1 == r2 ) // if these are both on the same side...
{
if ( r1 == PTR_FRONT )
{
add( p1, front, tstride, fcount );
add( p2, front, tstride, fcount );
}
else
{
add( p1, back, tstride, bcount );
add( p2, back, tstride, bcount );
}
}
else
{
float split[3]; // split the point
intersect(p1,p2,split,plane);
if ( r1 == PTR_FRONT )
{
add(p1, front, tstride, fcount );
add(split, front, tstride, fcount );
add(split, back, tstride, bcount );
add(p2, back, tstride, bcount );
}
else
{
add(p1, back, tstride, bcount );
add(split, back, tstride, bcount );
add(split, front, tstride, fcount );
add(p2, front, tstride, fcount );
}
}
// Next test ray segment P2 to P3
if ( r2 == r3 ) // if these are both on the same side...
{
if ( r3 == PTR_FRONT )
{
add( p3, front, tstride, fcount );
}
else
{
add( p3, back, tstride, bcount );
}
}
else
{
float split[3]; // split the point
intersect(p2,p3,split,plane);
if ( r3 == PTR_FRONT )
{
add(split, front, tstride, fcount );
add(split, back, tstride, bcount );
add(p3, front, tstride, fcount );
}
else
{
add(split, front, tstride, fcount );
add(split, back, tstride, bcount );
add(p3, back, tstride, bcount );
}
}
// Next test ray segment P3 to P1
if ( r3 != r1 ) // if these are both on the same side...
{
float split[3]; // split the point
intersect(p3,p1,split,plane);
if ( r1 == PTR_FRONT )
{
add(split, front, tstride, fcount );
add(split, back, tstride, bcount );
}
else
{
add(split, front, tstride, fcount );
add(split, back, tstride, bcount );
}
}
return PTR_SPLIT;
}
| {
"pile_set_name": "Github"
} |
-- in.test
--
-- execsql {SELECT a+ 100*(a BETWEEN 1 and 3) FROM t1 ORDER BY b}
SELECT a+ 100*(a BETWEEN 1 and 3) FROM t1 ORDER BY b | {
"pile_set_name": "Github"
} |
<action_hash>Calculando hash</action_hash>
<action_login>Fazendo Login</action_login>
<action_logout>Fazendo Logout</action_logout>
<action_noop>Checando sessรฃo</action_noop>
<action_search>Procurando legendas</action_search>
<int_all>Todos</int_all>
<int_bool_false>Nรฃo</int_bool_false>
<int_bool_true>Sim</int_bool_true>
<int_cancel>Cancelar</int_cancel>
<int_close>Fechar</int_close>
<int_config>Configuraรงรตes</int_config>
<int_configuration>Configuraรงรฃo</int_configuration>
<int_default_lang>Idioma das legendas</int_default_lang>
<int_descr>Baixe legendas em OpenSubtitles.org</int_descr>
<int_display_code>Mostrar cรณdigo do idioma no nome do arquivo</int_display_code>
<int_dowload_behav>O que fazer com as legendas</int_dowload_behav>
<int_dowload_load>Apenas carregar</int_dowload_load>
<int_dowload_manual>Download manual</int_dowload_manual>
<int_dowload_save>Carregar e salvar</int_dowload_save>
<int_dowload_sel>Baixar seleรงรฃo</int_dowload_sel>
<int_episode>Episรณdio (seriados)</int_episode>
<int_help>Ajuda</int_help>
<int_help_mess> Baixe legendas de <a href='http://www.opensubtitles.org/'>opensubtitles.org</a> e exiba-os enquanto assiste um vรญdeo.<br> <br> <b><u>Uso:</u></b><br> <br> VLSub รฉ destinado para usar enquanto vocรช assiste um vรญdeo, entรฃo abra-o primeiro (se nada for exibido, vocรช receberรก um link para baixar a legenda em seu navegador).<br> <br> Selecione o idioma de sua legenda e clique no botรฃo que corresponde a um dos dois mรฉtodos de pesquisa disponiblizados pelo VLSub:<br> <br> <b>Mรฉtodo 1: Busca por hash</b><br> รฉ recomendรกvel o uso deste mรฉtodo primeiro, pois ele realiza uma pesquisa baseada na assinatura do arquivo, sendo assim vocรช pode encontrar legendas sincronizadas com o vรญdeo.<br> <br> <b>Mรฉtodo 2: Busca por nome</b><br> Se vocรช nรฃo tiver sorte com o primeiro mรฉtodo, verifique se o nome estรก correto antes de clicar. Se vocรช procura legendas para seriados, vocรช pode tambรฉm fornecer o n&uatilde;mero da temporada e do episรณdio.<br> <br> <b>Baixando legendas</b><br> Selecione uma legenda na lista e clique em 'Download'.<br> O arquivo serรก colocado no mesmo diretรณrio que seu vรญdeo contendo o mesmo nome (extensรตes diferentes) e o VLC carregarรก o arquivo automaticamente na prรณxima vรชz que vocรช iniciar o vรญdeo.<br> <br> <b>/!\ Atenรงรฃo:</b> Legendas existentes serรฃo substituรญdas sem confirmaรงรฃo, entรฃo coloque-as em outro lugar se elas forem importantes.<br> <br> Encontre mais extensรตes para o VLC em <a href='http://addons.videolan.org'>addons.videolan.org</a>.</int_help_mess>
<int_int_lang>Idioma da interface</int_int_lang>
<int_ok>Ok</int_ok>
<int_remove_tag>Remover tags</int_remove_tag>
<int_research>Pesquisa</int_research>
<int_save>Salvar</int_save>
<int_search_hash>Procurar por hash</int_search_hash>
<int_searching_transl>Procurando traduรงรตes...</int_searching_transl>
<int_search_name>Procurar por nome</int_search_name>
<int_search_transl>Procure traduรงรตes</int_search_transl>
<int_season>Temporada (seriados)</int_season>
<int_show_conf>Mostrar configuraรงรตes</int_show_conf>
<int_show_help>Mostrar ajuda</int_show_help>
<int_title>Tรญtulo</int_title>
<int_vlsub_work_dir>VLSub diretรณrio de trabalho</int_vlsub_work_dir>
<mess_click_link>Clique para abrir o arquivo</mess_click_link>
<mess_complete>Pesquisa concluรญda</mess_complete>
<mess_dowload_link>Link para download</mess_dowload_link>
<mess_downloading>Baixando legenda</mess_downloading>
<mess_error>Erro</mess_error>
<mess_expired>Sessรฃo expirada, tentando novamente</mess_expired>
<mess_loaded>Legenda carregada</mess_loaded>
<mess_no_input>Por favor, utlize este mรฉtodo quando estiver tocando</mess_no_input>
<mess_no_res>Sem resultados</mess_no_res>
<mess_no_response>Servidor nรฃo responde</mess_no_response>
<mess_no_selection>Nenhuma legenda selecionada</mess_no_selection>
<mess_not_found>Arquivo nรฃo encontrado</mess_not_found>
<mess_not_found2>Arquivo nรฃo encontrado (caractere nรฃo permitido?)</mess_not_found2>
<mess_not_local>Este mรฉtodo funciona apenas para arquivos locais (por enquanto)</mess_not_local>
<mess_overloaded>Servidor sobrecarregado, tente novamente mais tarde</mess_overloaded>
<mess_res>encontrada(s)</mess_res>
<mess_save_fail>Nรฃo foi possรญvel salvar a legenda</mess_save_fail>
<mess_success>Sucesso</mess_success>
<mess_unauthorized>Solicitaรงรฃo nรฃo autorizada</mess_unauthorized>
| {
"pile_set_name": "Github"
} |
@include describe("The json-decode--null function") {
@include it("should properly decode null to null") {
@include should(expect(nth(_json-decode--null('null', 2), 2)), to(equal(null)));
}
@include it("should decode to null type") {
@include should(expect(type-of(nth(_json-decode--null('null', 2), 2))), to(equal('null')));
}
}
| {
"pile_set_name": "Github"
} |
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { AngularComponent } from './angular.component';
@NgModule({
imports: [CommonModule],
declarations: [AngularComponent],
providers: [],
})
export class AngularModule {}
| {
"pile_set_name": "Github"
} |
{
"package": "com.android.gpstest.osmdroid",
"verified": true,
"authors": [
"AndyBernie"
],
"last_update": {
"timestamp": 1573909761
},
"recommendation": "@unnecessary",
"behaviors": [
"@standard"
]
} | {
"pile_set_name": "Github"
} |
// (C) Copyright 2009-2011 Frederic Bron.
// Use, modification and distribution are subject to the
// Boost Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
#include "test.hpp"
#include "check_integral_constant.hpp"
#ifdef TEST_STD
# include <type_traits>
#else
# include <boost/type_traits/has_not_equal_to.hpp>
#endif
#define BOOST_TT_TRAIT_NAME has_not_equal_to
#define BOOST_TT_TRAIT_OP !=
#include "has_binary_operators.hpp"
void specific() {
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const, void, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool &, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const &, void, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void, int &, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void, double, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void, void* const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* const, void, int >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int*, void, int >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const &, void, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool, bool, bool & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool, bool &, int const >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool, int const, bool & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool, int & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool, double &, int const >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool, double const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool, int* const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool, int* &, bool const & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool, int* &, int const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool, int* const &, int const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool, int* const &, int & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool, int* const &, int const & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const, bool const >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const, bool const, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const, int const, int >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const, int &, bool & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const, double const, bool const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const, double &, bool const >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const, double const &, bool >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const, double const &, bool & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const, void* const &, int >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const, void* const &, int const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const, int* >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const, int* const, int const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool &, bool, bool const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool &, int const, bool const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool &, int & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool &, int const &, int const >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool &, void* &, bool const & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool &, void* &, int const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool &, int*, int const & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool &, int* const &, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const &, bool, int & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const &, bool &, bool >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const &, double const, bool const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const &, double const, int const >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const &, void* const, int & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const &, void* &, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const &, void* &, bool const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const &, void* const & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< bool const &, void* const &, int & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int, bool, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int, bool const, bool & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int, bool const, int >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int, bool &, bool >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int, bool &, int & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int, bool &, int const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int, double &, int >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int, void* const, int const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int, void* const &, int & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int, int* & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int, int* const &, int const & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const, bool &, int >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const, bool &, int const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const, bool const &, bool const >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const, int, bool const >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const, int const, bool const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const, double const, bool const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const, double & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const, double &, bool const >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const, void* const, int const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const, void* &, bool & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const, int*, int const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const, int* &, bool const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const, int* &, int const & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int &, bool &, int & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int &, bool const &, bool const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int &, bool const &, int & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int &, int &, int & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int &, double, bool & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int &, double, int >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int &, double const, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int &, double const, int const >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int &, void* &, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int &, int* &, int const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const &, int const, int const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const &, int &, int & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const &, int const &, bool >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const &, double, bool const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const &, double const, int const >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const &, void* const, int const & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const &, void* &, bool & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const &, void* &, int const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const &, int* &, bool const & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int const &, int* const &, bool const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double, bool const >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double, bool const, int const >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double, int, bool const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double, int &, bool & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double, double, bool >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double, double const, int const >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double, void* const, int const & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double, void* const &, bool const & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double const, bool &, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double const, bool const &, int const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double const, int, bool & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double const, int, int const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double const, double, bool const >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double const, void*, int & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double const, void* &, int const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double const, int* const &, int const & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double &, bool, bool const >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double &, bool const &, int >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double &, double, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double &, double const >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double &, double &, bool const >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double &, double &, int & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double &, double const &, int & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double &, void* const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double &, int*, int const & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double const &, bool & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double const &, int, int const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double const &, int &, bool >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double const &, int &, bool const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double const &, void*, bool const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double const &, void* const, bool & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double const &, void* const & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double const &, int* const, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double const &, int* &, bool & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< double const &, int* &, bool const & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void*, bool, bool const & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void*, bool const, int >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void*, int, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void*, int const, bool const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void*, int const, int & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void*, int* const, bool const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void*, int* const &, int >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* const, bool >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* const, int const, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* const, double const & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* const, void* const, bool const >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* const, void* const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* &, bool const &, bool >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* &, int const, int >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* &, int const, int const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* &, double, bool const & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* &, double, int & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* &, double const, bool >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* &, double const, bool & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* &, void* const, bool const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* &, int* const, bool >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* &, int* const, bool const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* &, int* const &, bool const >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* &, int* const &, int & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* const &, bool, bool const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* const &, bool const, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* const &, bool const, bool const & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* const &, int >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* const &, int, int >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* const &, int const, bool const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* const &, int &, bool >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* const &, int &, bool const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* const &, double const, bool & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* const &, void* &, bool const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< void* const &, void* const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int*, int, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int*, int const, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int*, int const, bool const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int*, int &, int >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int*, double const &, bool >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int*, void*, bool const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int*, int* const, bool & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int*, int* &, int >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int*, int* &, int const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int*, int* const &, bool >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const, bool, int const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const, double const, bool >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const, double &, int >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const, double const &, bool & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const, void* >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const, int* const >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const, int* &, int >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* &, double &, bool const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* &, void* const &, bool & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* &, int*, int & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* &, int* const &, bool const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const &, bool, bool const >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const &, bool, int >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const &, bool, int & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const &, int, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const &, int, int >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const &, int &, bool & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const &, int &, int const & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const &, double const, int const & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const &, void*, bool >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const &, void* const &, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const &, int*, bool const & >::value), 1);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const &, int* const, void >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const &, int* &, int & >::value), 0);
BOOST_CHECK_INTEGRAL_CONSTANT((::boost::BOOST_TT_TRAIT_NAME< int* const &, int* const &, int const & >::value), 1);
}
TT_TEST_BEGIN(BOOST_TT_TRAIT_NAME)
common();
specific();
TT_TEST_END
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2000, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.nio.ch;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketAddress;
import java.net.SocketException;
import java.net.SocketTimeoutException;
import java.net.StandardSocketOptions;
import java.nio.channels.IllegalBlockingModeException;
import java.nio.channels.NotYetBoundException;
import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel;
// Make a server-socket channel look like a server socket.
//
// The methods in this class are defined in exactly the same order as in
// java.net.ServerSocket so as to simplify tracking future changes to that
// class.
//
class ServerSocketAdaptor // package-private
extends ServerSocket
{
// The channel being adapted
private final ServerSocketChannelImpl ssc;
// Timeout "option" value for accepts
private volatile int timeout;
public static ServerSocket create(ServerSocketChannelImpl ssc) {
try {
return new ServerSocketAdaptor(ssc);
} catch (IOException x) {
throw new Error(x);
}
}
// ## super will create a useless impl
private ServerSocketAdaptor(ServerSocketChannelImpl ssc) throws IOException {
this.ssc = ssc;
}
public void bind(SocketAddress local) throws IOException {
bind(local, 50);
}
public void bind(SocketAddress local, int backlog) throws IOException {
if (local == null)
local = new InetSocketAddress(0);
try {
ssc.bind(local, backlog);
} catch (Exception x) {
Net.translateException(x);
}
}
public InetAddress getInetAddress() {
InetSocketAddress local = ssc.localAddress();
if (local == null) {
return null;
} else {
return Net.getRevealedLocalAddress(local).getAddress();
}
}
public int getLocalPort() {
InetSocketAddress local = ssc.localAddress();
if (local == null) {
return -1;
} else {
return local.getPort();
}
}
public Socket accept() throws IOException {
synchronized (ssc.blockingLock()) {
try {
if (!ssc.isBound())
throw new NotYetBoundException();
long to = this.timeout;
if (to == 0) {
// for compatibility reasons: accept connection if available
// when configured non-blocking
SocketChannel sc = ssc.accept();
if (sc == null && !ssc.isBlocking())
throw new IllegalBlockingModeException();
return sc.socket();
}
if (!ssc.isBlocking())
throw new IllegalBlockingModeException();
for (;;) {
long st = System.currentTimeMillis();
if (ssc.pollAccept(to))
return ssc.accept().socket();
to -= System.currentTimeMillis() - st;
if (to <= 0)
throw new SocketTimeoutException();
}
} catch (Exception x) {
Net.translateException(x);
assert false;
return null; // Never happens
}
}
}
public void close() throws IOException {
ssc.close();
}
public ServerSocketChannel getChannel() {
return ssc;
}
public boolean isBound() {
return ssc.isBound();
}
public boolean isClosed() {
return !ssc.isOpen();
}
public void setSoTimeout(int timeout) throws SocketException {
this.timeout = timeout;
}
public int getSoTimeout() throws SocketException {
return timeout;
}
public void setReuseAddress(boolean on) throws SocketException {
try {
ssc.setOption(StandardSocketOptions.SO_REUSEADDR, on);
} catch (IOException x) {
Net.translateToSocketException(x);
}
}
public boolean getReuseAddress() throws SocketException {
try {
return ssc.getOption(StandardSocketOptions.SO_REUSEADDR).booleanValue();
} catch (IOException x) {
Net.translateToSocketException(x);
return false; // Never happens
}
}
public String toString() {
if (!isBound())
return "ServerSocket[unbound]";
return "ServerSocket[addr=" + getInetAddress() +
",localport=" + getLocalPort() + "]";
}
public void setReceiveBufferSize(int size) throws SocketException {
// size 0 valid for ServerSocketChannel, invalid for ServerSocket
if (size <= 0)
throw new IllegalArgumentException("size cannot be 0 or negative");
try {
ssc.setOption(StandardSocketOptions.SO_RCVBUF, size);
} catch (IOException x) {
Net.translateToSocketException(x);
}
}
public int getReceiveBufferSize() throws SocketException {
try {
return ssc.getOption(StandardSocketOptions.SO_RCVBUF).intValue();
} catch (IOException x) {
Net.translateToSocketException(x);
return -1; // Never happens
}
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<browserconfig>
<msapplication>
<tile>
<square150x150logo src="images/favicons/mstile-150x150.png"/>
<square310x310logo src="images/favicons/mstile-310x310.png"/>
<TileColor>#2d89ef</TileColor>
</tile>
</msapplication>
</browserconfig>
| {
"pile_set_name": "Github"
} |
package easik.ui.datamanip;
//~--- JDK imports ------------------------------------------------------------
import java.awt.BorderLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import javax.swing.JButton;
import javax.swing.JDialog;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTable;
/**
* JDialog set up to allow selection of records. The data being displayed is
* handed to this class in the form of a JTable. The use
*/
public class SelectDataDialog extends JDialog {
/**
*
*/
private static final long serialVersionUID = 1879087330825814111L;
/** */
boolean _ok = false;
/** */
private JTable guiTable;
/**
* Sets up a data selection dialog on a dialog instead of a frame.
*
* @param parent the parent JDialog of the modal dialog
* @param title The title of the dialog window
* @param table The data of the table to display
*/
public SelectDataDialog(final JDialog parent, final String title, final JTable table) {
super(parent, title, true);
init(table);
}
/**
* Sets up a data selection dialog.
*
* @param parent the parent frame of the modal dialog
* @param title The title of the dialog window
* @param table The data of the table to display
*/
public SelectDataDialog(final JFrame parent, final String title, final JTable table) {
super(parent, title, true);
init(table);
}
/**
*
*
* @param table
*/
private void init(final JTable table) {
guiTable = table;
guiTable.setDefaultEditor(Object.class, null); // make cells uneditable
setDefaultCloseOperation(DISPOSE_ON_CLOSE);
final JButton ok = new JButton("OK"), cancel = new JButton("Cancel");
getRootPane().setDefaultButton(ok);
ok.setActionCommand("ok");
cancel.setActionCommand("cancel");
final ButtonListener bl = new ButtonListener();
ok.addActionListener(bl);
cancel.addActionListener(bl);
final JPanel buttons = new JPanel();
buttons.add(ok);
buttons.add(cancel);
// make double click on table fire OK button
guiTable.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(final MouseEvent e) {
if ((e.getClickCount() > 1) && (e.getButton() == MouseEvent.BUTTON1)) // double
// click
{
ok.doClick();
}
}
@Override
public void mousePressed(final MouseEvent e) {
mouseClicked(e);
}
});
add(new JScrollPane(guiTable), BorderLayout.CENTER);
add(buttons, BorderLayout.SOUTH);
_ok = false;
this.pack();
this.setSize(this.getPreferredSize());
setLocationRelativeTo(getParent());
setVisible(true);
}
/**
* Returns true if the user accepted the options dialog (that is, clicked the OK
* button).
*
* @return true if the user clicked OK (and the fields verified successfully),
* false if the user cancelled or closed the dialog.
*/
public boolean isAccepted() {
return _ok;
}
/**
* Gets the rows selected from the data table. This information can then be used
* by DatabaseUtils to find selected primary IDs.
*
* @return An int array of row numbers selected in the display
*/
public int[] getSelectedPKs() {
final int[] selectedRows = guiTable.getSelectedRows();
final int[] selectedPKs = new int[selectedRows.length];
final int PKcolumn = 0; // convention for DatabaseUtil.getTable()
for (int i = 0; i < selectedRows.length; i++) {
selectedPKs[i] = Integer.parseInt((String) guiTable.getValueAt(selectedRows[i], PKcolumn));
}
return selectedPKs;
}
/**
*
*
* @version 12/09/12
* @author Christian Fiddick
*/
private class ButtonListener implements ActionListener {
// Fired when the user clicks OK or Cancel
/**
*
*
* @param e
*/
@Override
public void actionPerformed(final ActionEvent e) {
if ("ok".equals(e.getActionCommand())) {
_ok = true;
SelectDataDialog.this.dispose();
} else if ("cancel".equals(e.getActionCommand())) {
guiTable.setCellSelectionEnabled(false);
SelectDataDialog.this.dispose();
}
}
}
}
| {
"pile_set_name": "Github"
} |
from typing import Tuple
import bs4
from werkzeug.test import Client
def get_error_title(resp_data: str) -> Tuple[str, str]:
soup = bs4.BeautifulSoup(resp_data, "html.parser")
return soup.find("h1").string, soup.find("h2").string
def test_handle_404(web_client: Client) -> None:
resp = web_client.get("/asdf")
assert resp.status_code == 404
error_header, error_type = get_error_title(resp.data)
assert error_header == "Oh Noes!1!!"
assert error_type == "Error 404"
def test_handle_500() -> None:
from backend.web.main import app
def always_throw() -> str:
raise Exception("welp")
app.add_url_rule("/throw_500", view_func=always_throw)
client = app.test_client()
resp = client.get("/throw_500")
assert resp.status_code == 500
error_header, error_type = get_error_title(resp.data)
assert error_header == "Oh Noes!1!!"
assert error_type == "Error 500"
| {
"pile_set_name": "Github"
} |
---
title: BuildingBlockEntries.Parent Property (Word)
keywords: vbawd10.chm36242410
f1_keywords:
- vbawd10.chm36242410
ms.prod: word
api_name:
- Word.BuildingBlockEntries.Parent
ms.assetid: b24e4da5-1cc5-9359-1dbf-d28a24d6db22
ms.date: 06/08/2017
---
# BuildingBlockEntries.Parent Property (Word)
Returns an **Object** that represents the parent object of the specified **BuildingBlockEntries** object.
## Syntax
_expression_ . **Parent**
_expression_ A variable that represents a **[BuildingBlockEntries](buildingblockentries-object-word.md)** object.
## See also
#### Concepts
[BuildingBlockEntries Collection](buildingblockentries-object-word.md)
| {
"pile_set_name": "Github"
} |
<?php
/**
* Smarty Internal Plugin Compile Append
* Compiles the {append} tag
*
* @package Smarty
* @subpackage Compiler
* @author Uwe Tews
*/
/**
* Smarty Internal Plugin Compile Append Class
*
* @package Smarty
* @subpackage Compiler
*/
class Smarty_Internal_Compile_Append extends Smarty_Internal_Compile_Assign
{
/**
* Compiles code for the {append} tag
*
* @param array $args array with attributes from parser
* @param \Smarty_Internal_TemplateCompilerBase $compiler compiler object
* @param array $parameter array with compilation parameter
*
* @return string compiled code
*/
public function compile($args, Smarty_Internal_TemplateCompilerBase $compiler, $parameter)
{
// the following must be assigned at runtime because it will be overwritten in parent class
$this->required_attributes = array('var', 'value');
$this->shorttag_order = array('var', 'value');
$this->optional_attributes = array('scope', 'index');
// check and get attributes
$_attr = $this->getAttributes($compiler, $args);
// map to compile assign attributes
if (isset($_attr['index'])) {
$_params['smarty_internal_index'] = '[' . $_attr['index'] . ']';
unset($_attr['index']);
} else {
$_params['smarty_internal_index'] = '[]';
}
$_new_attr = array();
foreach ($_attr as $key => $value) {
$_new_attr[] = array($key => $value);
}
// call compile assign
return parent::compile($_new_attr, $compiler, $_params);
}
}
| {
"pile_set_name": "Github"
} |
<?php
/*
* $Id$
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* This software consists of voluntary contributions made by many individuals
* and is licensed under the LGPL. For more information, see
* <http://www.phpdoctrine.org>.
*/
/**
* Listener for the Blameable behavior which automatically sets the created
* and updated by columns when a record is inserted and updated.
*
* @package Doctrine
* @subpackage Template
* @license http://www.opensource.org/licenses/lgpl-license.php LGPL
* @link www.phpdoctrine.org
* @since 1.2
* @version $Revision$
* @author Colin DeCarlo <[email protected]>
*/
class Doctrine_Template_Listener_DmBlameable extends Doctrine_Record_Listener
{
/**
* Array of timestampable options
*
* @var string
*/
protected $_options = array();
/**
* The default value of the blameVar if one isn't available
*
* @var string
*/
protected $_default = null;
/**
* __construct
*
* @param string $options
* @return void
*/
public function __construct(array $options)
{
$this->_options = $options;
}
/**
* Set the created and updated Blameable columns when a record is inserted
*
* @param Doctrine_Event $event
* @return void
*/
public function preInsert(Doctrine_Event $event)
{
if (!$this->_options['columns']['created']['disabled']) {
$createdName = $event->getInvoker()->getTable()->getFieldName($this->_options['columns']['created']['name']);
$modified = $event->getInvoker()->getModified();
if (!isset($modified[$createdName])) {
$event->getInvoker()->$createdName = $this->getUserIdentity();
}
}
if ( ! $this->_options['columns']['updated']['disabled'] && $this->_options['columns']['updated']['onInsert']) {
$updatedName = $event->getInvoker()->getTable()->getFieldName($this->_options['columns']['updated']['name']);
$modified = $event->getInvoker()->getModified();
if ( ! isset($modified[$updatedName])) {
$event->getInvoker()->$updatedName = $this->getUserIdentity();
}
}
}
/**
* Set updated Blameable column when a record is updated
*
* @param Doctrine_Event $evet
* @return void
*/
public function preUpdate(Doctrine_Event $event)
{
if ( ! $this->_options['columns']['updated']['disabled']) {
$updatedName = $event->getInvoker()->getTable()->getFieldName($this->_options['columns']['updated']['name']);
$modified = $event->getInvoker()->getModified();
if ( ! isset($modified[$updatedName])) {
$event->getInvoker()->$updatedName = $this->getUserIdentity();
}
}
}
/**
* Set the updated field for dql update queries
*
* @param Doctrine_Event $evet
* @return void
*/
public function preDqlUpdate(Doctrine_Event $event)
{
if ( ! $this->_options['columns']['updated']['disabled']) {
$params = $event->getParams();
$updatedName = $event->getInvoker()->getTable()->getFieldName($this->_options['columns']['updated']['name']);
$field = $params['alias'] . '.' . $updatedName;
$query = $event->getQuery();
if ( ! $query->contains($field)) {
$query->set($field, '?', $this->getUserIdentity());
}
}
}
/**
* Gets the users identity from the $blameVar index of either the $_SESSION
* or $GLOBALS array; OR use the default value
*
* @return void
*/
public function getUserIdentity()
{
$ident = null;
if(class_exists('sfContext', false) && sfContext::hasInstance() && $user = sfContext::getInstance()->getUser())
{
if($dmUserId = $user->getUserId())
{
$ident = $dmUserId;
}
}
return $ident;
}
}
| {
"pile_set_name": "Github"
} |
Topologies:
Sub-topology: 0
Source: KSTREAM-SOURCE-0000000000 (topics: [LEFT])
--> KSTREAM-TRANSFORMVALUES-0000000001
Processor: KSTREAM-TRANSFORMVALUES-0000000001 (stores: [])
--> LeftSourceKeyed-SelectKey
<-- KSTREAM-SOURCE-0000000000
Processor: LeftSourceKeyed-SelectKey (stores: [])
--> PrependAliasLeft
<-- KSTREAM-TRANSFORMVALUES-0000000001
Processor: PrependAliasLeft (stores: [])
--> Join-left-repartition-filter
<-- LeftSourceKeyed-SelectKey
Processor: Join-left-repartition-filter (stores: [])
--> Join-left-repartition-sink
<-- PrependAliasLeft
Sink: Join-left-repartition-sink (topic: Join-left-repartition)
<-- Join-left-repartition-filter
Sub-topology: 1
Source: KSTREAM-SOURCE-0000000004 (topics: [RIGHT])
--> KSTREAM-TRANSFORMVALUES-0000000005
Processor: KSTREAM-TRANSFORMVALUES-0000000005 (stores: [])
--> RightSourceKeyed-SelectKey
<-- KSTREAM-SOURCE-0000000004
Processor: RightSourceKeyed-SelectKey (stores: [])
--> PrependAliasRight
<-- KSTREAM-TRANSFORMVALUES-0000000005
Processor: PrependAliasRight (stores: [])
--> Join-right-repartition-filter
<-- RightSourceKeyed-SelectKey
Processor: Join-right-repartition-filter (stores: [])
--> Join-right-repartition-sink
<-- PrependAliasRight
Sink: Join-right-repartition-sink (topic: Join-right-repartition)
<-- Join-right-repartition-filter
Sub-topology: 2
Source: Join-left-repartition-source (topics: [Join-left-repartition])
--> Join-this-windowed
Source: Join-right-repartition-source (topics: [Join-right-repartition])
--> Join-other-windowed
Processor: Join-other-windowed (stores: [KSTREAM-JOINOTHER-0000000017-store])
--> Join-other-join
<-- Join-right-repartition-source
Processor: Join-this-windowed (stores: [KSTREAM-JOINTHIS-0000000016-store])
--> Join-this-join
<-- Join-left-repartition-source
Processor: Join-other-join (stores: [KSTREAM-JOINTHIS-0000000016-store])
--> Join-merge
<-- Join-other-windowed
Processor: Join-this-join (stores: [KSTREAM-JOINOTHER-0000000017-store])
--> Join-merge
<-- Join-this-windowed
Processor: Join-merge (stores: [])
--> Project
<-- Join-this-join, Join-other-join
Processor: Project (stores: [])
--> KSTREAM-SINK-0000000020
<-- Join-merge
Sink: KSTREAM-SINK-0000000020 (topic: OUTPUT)
<-- Project
| {
"pile_set_name": "Github"
} |
//
// DemoViewController.m
// TwProfile
//
// Created by Edgar on 5/1/13.
// Copyright (c) 2013 mx.com.hunk. All rights reserved.
//
#import "DemoViewController.h"
#import "ProfileViewController.h"
#import "ProfileScrollViewController.h"
#import "ProfileImagesViewController.h"
#import "ProfileImagesScrollViewController.h"
@interface DemoViewController ()
@end
@implementation DemoViewController
- (id)initWithStyle:(UITableViewStyle)style
{
self = [super initWithStyle:style];
if (self) {
// Custom initialization
self.title = @"Tweetbot Profile effect";
}
return self;
}
- (void)viewDidLoad
{
[super viewDidLoad];
// Uncomment the following line to preserve selection between presentations.
// self.clearsSelectionOnViewWillAppear = NO;
// Uncomment the following line to display an Edit button in the navigation bar for this view controller.
// self.navigationItem.rightBarButtonItem = self.editButtonItem;
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
#pragma mark - Table view data source
//- (NSInteger)numberOfSectionsInTableView:(UITableView *)tableView
//{
// // Return the number of sections.
// return 0;
//}
- (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section
{
// Return the number of rows in the section.
return 4;
}
- (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath
{
static NSString *CellIdentifier = @"Cell";
UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:CellIdentifier];
if (!cell) {
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault reuseIdentifier:CellIdentifier];
}
// Configure the cell...
if (indexPath.row == 0) {
cell.textLabel.text = @"TwProfile with UITableView";
}else if(indexPath.row == 1){
cell.textLabel.text = @"TwProfile with UIScrollView";
}else if(indexPath.row == 2){
cell.textLabel.text = @"TwProfile img+ with UIScrollView";
}else{
cell.textLabel.text = @"TwProfile img+ with UIScrollView";
}
return cell;
}
/*
// Override to support conditional editing of the table view.
- (BOOL)tableView:(UITableView *)tableView canEditRowAtIndexPath:(NSIndexPath *)indexPath
{
// Return NO if you do not want the specified item to be editable.
return YES;
}
*/
/*
// Override to support editing the table view.
- (void)tableView:(UITableView *)tableView commitEditingStyle:(UITableViewCellEditingStyle)editingStyle forRowAtIndexPath:(NSIndexPath *)indexPath
{
if (editingStyle == UITableViewCellEditingStyleDelete) {
// Delete the row from the data source
[tableView deleteRowsAtIndexPaths:@[indexPath] withRowAnimation:UITableViewRowAnimationFade];
}
else if (editingStyle == UITableViewCellEditingStyleInsert) {
// Create a new instance of the appropriate class, insert it into the array, and add a new row to the table view
}
}
*/
/*
// Override to support rearranging the table view.
- (void)tableView:(UITableView *)tableView moveRowAtIndexPath:(NSIndexPath *)fromIndexPath toIndexPath:(NSIndexPath *)toIndexPath
{
}
*/
/*
// Override to support conditional rearranging of the table view.
- (BOOL)tableView:(UITableView *)tableView canMoveRowAtIndexPath:(NSIndexPath *)indexPath
{
// Return NO if you do not want the item to be re-orderable.
return YES;
}
*/
#pragma mark - Table view delegate
- (void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath
{
if (indexPath.row == 0) {
ProfileViewController *profileVC = [[ProfileViewController alloc] init];
[self.navigationController pushViewController:profileVC animated:YES];
}else if(indexPath.row == 1){
ProfileScrollViewController *profileScroll = [[ProfileScrollViewController alloc] init];
[self.navigationController pushViewController:profileScroll animated:YES];
}else if(indexPath.row == 2){
ProfileImagesViewController *proImgs = [[ProfileImagesViewController alloc] init];
[self.navigationController pushViewController:proImgs animated:YES];
}else{
ProfileImagesScrollViewController *proImgsSc = [[ProfileImagesScrollViewController alloc] init];
[self.navigationController pushViewController:proImgsSc animated:YES];
}
}
@end
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2002 - 2003
* NetGroup, Politecnico di Torino (Italy)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the Politecnico di Torino nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#ifndef __REMOTE_EXT_H__
#define __REMOTE_EXT_H__
// Definition for Microsoft Visual Studio
#if _MSC_VER > 1000
#pragma once
#endif
/*!
\file remote-ext.h
The goal of this file it to include most of the new definitions that should be
placed into the pcap.h file.
It includes all new definitions (structures and functions like pcap_open().
Some of the functions are not really a remote feature, but, right now,
they are placed here.
*/
// All this stuff is public
/*! \addtogroup remote_struct
\{
*/
/*!
\brief Defines the maximum buffer size in which address, port, interface names are kept.
In case the adapter name or such is larger than this value, it is truncated.
This is not used by the user; however it must be aware that an hostname / interface
name longer than this value will be truncated.
*/
#define PCAP_BUF_SIZE 1024
/*! \addtogroup remote_source_ID
\{
*/
/*!
\brief Internal representation of the type of source in use (file,
remote/local interface).
This indicates a file, i.e. the user want to open a capture from a local file.
*/
#define PCAP_SRC_FILE 2
/*!
\brief Internal representation of the type of source in use (file,
remote/local interface).
This indicates a local interface, i.e. the user want to open a capture from
a local interface. This does not involve the RPCAP protocol.
*/
#define PCAP_SRC_IFLOCAL 3
/*!
\brief Internal representation of the type of source in use (file,
remote/local interface).
This indicates a remote interface, i.e. the user want to open a capture from
an interface on a remote host. This does involve the RPCAP protocol.
*/
#define PCAP_SRC_IFREMOTE 4
/*!
\}
*/
/*! \addtogroup remote_source_string
The formats allowed by the pcap_open() are the following:
- file://path_and_filename [opens a local file]
- rpcap://devicename [opens the selected device devices available on the local host, without using the RPCAP protocol]
- rpcap://host/devicename [opens the selected device available on a remote host]
- rpcap://host:port/devicename [opens the selected device available on a remote host, using a non-standard port for RPCAP]
- adaptername [to open a local adapter; kept for compability, but it is strongly discouraged]
- (NULL) [to open the first local adapter; kept for compability, but it is strongly discouraged]
The formats allowed by the pcap_findalldevs_ex() are the following:
- file://folder/ [lists all the files in the given folder]
- rpcap:// [lists all local adapters]
- rpcap://host:port/ [lists the devices available on a remote host]
Referring to the 'host' and 'port' paramters, they can be either numeric or literal. Since
IPv6 is fully supported, these are the allowed formats:
- host (literal): e.g. host.foo.bar
- host (numeric IPv4): e.g. 10.11.12.13
- host (numeric IPv4, IPv6 style): e.g. [10.11.12.13]
- host (numeric IPv6): e.g. [1:2:3::4]
- port: can be either numeric (e.g. '80') or literal (e.g. 'http')
Here you find some allowed examples:
- rpcap://host.foo.bar/devicename [everything literal, no port number]
- rpcap://host.foo.bar:1234/devicename [everything literal, with port number]
- rpcap://10.11.12.13/devicename [IPv4 numeric, no port number]
- rpcap://10.11.12.13:1234/devicename [IPv4 numeric, with port number]
- rpcap://[10.11.12.13]:1234/devicename [IPv4 numeric with IPv6 format, with port number]
- rpcap://[1:2:3::4]/devicename [IPv6 numeric, no port number]
- rpcap://[1:2:3::4]:1234/devicename [IPv6 numeric, with port number]
- rpcap://[1:2:3::4]:http/devicename [IPv6 numeric, with literal port number]
\{
*/
/*!
\brief String that will be used to determine the type of source in use (file,
remote/local interface).
This string will be prepended to the interface name in order to create a string
that contains all the information required to open the source.
This string indicates that the user wants to open a capture from a local file.
*/
#define PCAP_SRC_FILE_STRING "file://"
/*!
\brief String that will be used to determine the type of source in use (file,
remote/local interface).
This string will be prepended to the interface name in order to create a string
that contains all the information required to open the source.
This string indicates that the user wants to open a capture from a network interface.
This string does not necessarily involve the use of the RPCAP protocol. If the
interface required resides on the local host, the RPCAP protocol is not involved
and the local functions are used.
*/
#define PCAP_SRC_IF_STRING "rpcap://"
/*!
\}
*/
/*!
\addtogroup remote_open_flags
\{
*/
/*!
\brief It defines if the adapter has to go in promiscuous mode.
It is '1' if you have to open the adapter in promiscuous mode, '0' otherwise.
Note that even if this parameter is false, the interface could well be in promiscuous
mode for some other reason (for example because another capture process with
promiscuous mode enabled is currently using that interface).
On on Linux systems with 2.2 or later kernels (that have the "any" device), this
flag does not work on the "any" device; if an argument of "any" is supplied,
the 'promisc' flag is ignored.
*/
#define PCAP_OPENFLAG_PROMISCUOUS 1
/*!
\brief It defines if the data trasfer (in case of a remote
capture) has to be done with UDP protocol.
If it is '1' if you want a UDP data connection, '0' if you want
a TCP data connection; control connection is always TCP-based.
A UDP connection is much lighter, but it does not guarantee that all
the captured packets arrive to the client workstation. Moreover,
it could be harmful in case of network congestion.
This flag is meaningless if the source is not a remote interface.
In that case, it is simply ignored.
*/
#define PCAP_OPENFLAG_DATATX_UDP 2
/*!
\brief It defines if the remote probe has to capture its own generated traffic.
In case the remote probe uses the same interface to capture traffic and to send
data back to the caller, the captured traffic includes the RPCAP traffic as well.
If this flag is turned on, the RPCAP traffic is excluded from the capture, so that
the trace returned back to the collector is does not include this traffic.
*/
#define PCAP_OPENFLAG_NOCAPTURE_RPCAP 4
/*!
\}
*/
/*!
\addtogroup remote_samp_methods
\{
*/
/*!
\brief No sampling has to be done on the current capture.
In this case, no sampling algorithms are applied to the current capture.
*/
#define PCAP_SAMP_NOSAMP 0
/*!
\brief It defines that only 1 out of N packets must be returned to the user.
In this case, the 'value' field of the 'pcap_samp' structure indicates the
number of packets (minus 1) that must be discarded before one packet got accepted.
In other words, if 'value = 10', the first packet is returned to the caller, while
the following 9 are discarded.
*/
#define PCAP_SAMP_1_EVERY_N 1
/*!
\brief It defines that we have to return 1 packet every N milliseconds.
In this case, the 'value' field of the 'pcap_samp' structure indicates the 'waiting
time' in milliseconds before one packet got accepted.
In other words, if 'value = 10', the first packet is returned to the caller; the next
returned one will be the first packet that arrives when 10ms have elapsed.
*/
#define PCAP_SAMP_FIRST_AFTER_N_MS 2
/*!
\}
*/
/*!
\addtogroup remote_auth_methods
\{
*/
/*!
\brief It defines the NULL authentication.
This value has to be used within the 'type' member of the pcap_rmtauth structure.
The 'NULL' authentication has to be equal to 'zero', so that old applications
can just put every field of struct pcap_rmtauth to zero, and it does work.
*/
#define RPCAP_RMTAUTH_NULL 0
/*!
\brief It defines the username/password authentication.
With this type of authentication, the RPCAP protocol will use the username/
password provided to authenticate the user on the remote machine. If the
authentication is successful (and the user has the right to open network devices)
the RPCAP connection will continue; otherwise it will be dropped.
This value has to be used within the 'type' member of the pcap_rmtauth structure.
*/
#define RPCAP_RMTAUTH_PWD 1
/*!
\}
*/
/*!
\brief This structure keeps the information needed to autheticate
the user on a remote machine.
The remote machine can either grant or refuse the access according
to the information provided.
In case the NULL authentication is required, both 'username' and
'password' can be NULL pointers.
This structure is meaningless if the source is not a remote interface;
in that case, the functions which requires such a structure can accept
a NULL pointer as well.
*/
struct pcap_rmtauth
{
/*!
\brief Type of the authentication required.
In order to provide maximum flexibility, we can support different types
of authentication based on the value of this 'type' variable. The currently
supported authentication methods are defined into the
\link remote_auth_methods Remote Authentication Methods Section\endlink.
*/
int type;
/*!
\brief Zero-terminated string containing the username that has to be
used on the remote machine for authentication.
This field is meaningless in case of the RPCAP_RMTAUTH_NULL authentication
and it can be NULL.
*/
char *username;
/*!
\brief Zero-terminated string containing the password that has to be
used on the remote machine for authentication.
This field is meaningless in case of the RPCAP_RMTAUTH_NULL authentication
and it can be NULL.
*/
char *password;
};
/*!
\brief This structure defines the information related to sampling.
In case the sampling is requested, the capturing device should read
only a subset of the packets coming from the source. The returned packets depend
on the sampling parameters.
\warning The sampling process is applied <strong>after</strong> the filtering process.
In other words, packets are filtered first, then the sampling process selects a
subset of the 'filtered' packets and it returns them to the caller.
*/
struct pcap_samp
{
/*!
Method used for sampling. Currently, the supported methods are listed in the
\link remote_samp_methods Sampling Methods Section\endlink.
*/
int method;
/*!
This value depends on the sampling method defined. For its meaning, please check
at the \link remote_samp_methods Sampling Methods Section\endlink.
*/
int value;
};
//! Maximum lenght of an host name (needed for the RPCAP active mode)
#define RPCAP_HOSTLIST_SIZE 1024
/*!
\}
*/ // end of public documentation
// Exported functions
/** \name New WinPcap functions
This section lists the new functions that are able to help considerably in writing
WinPcap programs because of their easiness of use.
*/
//\{
pcap_t *pcap_open(const char *source, int snaplen, int flags, int read_timeout, struct pcap_rmtauth *auth, char *errbuf);
int pcap_createsrcstr(char *source, int type, const char *host, const char *port, const char *name, char *errbuf);
int pcap_parsesrcstr(const char *source, int *type, char *host, char *port, char *name, char *errbuf);
int pcap_findalldevs_ex(char *source, struct pcap_rmtauth *auth, pcap_if_t **alldevs, char *errbuf);
struct pcap_samp *pcap_setsampling(pcap_t *p);
//\}
// End of new winpcap functions
/** \name Remote Capture functions
This section lists the functions that are specific for remote capture.
*/
//\{
SOCKET pcap_remoteact_accept(const char *address, const char *port, const char *hostlist, char *connectinghost, struct pcap_rmtauth *auth, char *errbuf);
int pcap_remoteact_list(char *hostlist, char sep, int size, char *errbuf);
int pcap_remoteact_close(const char *host, char *errbuf);
void pcap_remoteact_cleanup();
//\}
// End of remote capture functions
#endif
| {
"pile_set_name": "Github"
} |
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Comparing different copy schemes against spread initial literals.
// Benchmarks for large holey double arrays.
const largeHoleyArray = new Array(1e5);
for (var i = 0; i < 100; i++) {
largeHoleyArray[i] = i + 6.66;
}
for (var i = 5000; i < 5500; i++) {
largeHoleyArray[i] = i + 6.66;
}
// ----------------------------------------------------------------------------
// Benchmark: Spread
// ----------------------------------------------------------------------------
function SpreadLargeHoley() {
var newArr = [...largeHoleyArray];
// basic sanity check
if (newArr.length != largeHoleyArray.length) throw 666;
return newArr;
}
// ----------------------------------------------------------------------------
// Benchmark: ForLength
// ----------------------------------------------------------------------------
function ForLengthLargeHoley() {
var newArr = new Array(largeHoleyArray.length);
for (let i = 0; i < largeHoleyArray.length; i++) {
newArr[i] = largeHoleyArray[i];
}
// basic sanity check
if (newArr.length != largeHoleyArray.length) throw 666;
return newArr;
}
// ----------------------------------------------------------------------------
// Benchmark: ForLengthEmpty
// ----------------------------------------------------------------------------
function ForLengthEmptyLargeHoley() {
var newArr = [];
for (let i = 0; i < largeHoleyArray.length; i++) {
newArr[i] = largeHoleyArray[i];
}
// basic sanity check
if (newArr.length != largeHoleyArray.length) throw 666;
return newArr;
}
// ----------------------------------------------------------------------------
// Benchmark: Slice
// ----------------------------------------------------------------------------
function SliceLargeHoley() {
var newArr = largeHoleyArray.slice();
// basic sanity check
if (newArr.length != largeHoleyArray.length) throw 666;
return newArr;
}
// ----------------------------------------------------------------------------
// Benchmark: Slice0
// ----------------------------------------------------------------------------
function Slice0LargeHoley() {
var newArr = largeHoleyArray.slice(0);
// basic sanity check
if (newArr.length != largeHoleyArray.length) throw 666;
return newArr;
}
// ----------------------------------------------------------------------------
// Benchmark: ConcatReceive
// ----------------------------------------------------------------------------
function ConcatReceiveLargeHoley() {
var newArr = largeHoleyArray.concat();
// basic sanity check
if (newArr.length != largeHoleyArray.length) throw 666;
return newArr;
}
// ----------------------------------------------------------------------------
// Benchmark: ConcatArg
// ----------------------------------------------------------------------------
function ConcatArgLargeHoley() {
var newArr = [].concat(largeHoleyArray);
// basic sanity check
if (newArr.length != largeHoleyArray.length) throw 666;
return newArr;
}
// ----------------------------------------------------------------------------
// Benchmark: ForOfPush
// ----------------------------------------------------------------------------
function ForOfPushLargeHoley() {
var newArr = [];
for (let x of largeHoleyArray) {
newArr.push(x)
}
// basic sanity check
if (newArr.length != largeHoleyArray.length) throw 666;
return newArr;
}
// ----------------------------------------------------------------------------
// Benchmark: MapId
// ----------------------------------------------------------------------------
function MapIdLargeHoley() {
var newArr = largeHoleyArray.map(x => x);
// basic sanity check
if (newArr.length != largeHoleyArray.length) throw 666;
return newArr;
}
// ----------------------------------------------------------------------------
// Setup and Run
// ----------------------------------------------------------------------------
load('../base.js');
var success = true;
function PrintResult(name, result) {
print(name + '-ArrayLiteralInitialSpreadLargeDoubleHoley(Score): ' + result);
}
function PrintError(name, error) {
PrintResult('Error: ' + name, error);
success = false;
}
// Run the benchmark (5 x 100) iterations instead of 1 second.
function CreateBenchmark(name, f) {
new BenchmarkSuite(name, [1000], [ new Benchmark(name, false, false, 5, f) ]);
}
CreateBenchmark('Spread', SpreadLargeHoley);
CreateBenchmark('ForLength', ForLengthLargeHoley);
CreateBenchmark('ForLengthEmpty', ForLengthEmptyLargeHoley);
CreateBenchmark('Slice', SliceLargeHoley);
CreateBenchmark('Slice0', Slice0LargeHoley);
CreateBenchmark('ConcatReceive', ConcatReceiveLargeHoley);
CreateBenchmark('ConcatArg', ConcatArgLargeHoley);
BenchmarkSuite.config.doWarmup = true;
BenchmarkSuite.config.doDeterministic = true;
BenchmarkSuite.RunSuites({NotifyResult: PrintResult, NotifyError: PrintError});
| {
"pile_set_name": "Github"
} |
package p1.p2;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
public class ToolbarTestActivity extends AppCompatActivity {
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
}
}
| {
"pile_set_name": "Github"
} |
package plugin
import (
"encoding/json"
"io"
"io/ioutil"
"os"
"path/filepath"
"reflect"
"regexp"
"sort"
"strings"
"sync"
"github.com/Sirupsen/logrus"
"github.com/docker/distribution/reference"
"github.com/docker/docker/api/types"
"github.com/docker/docker/image"
"github.com/docker/docker/layer"
"github.com/docker/docker/libcontainerd"
"github.com/docker/docker/pkg/authorization"
"github.com/docker/docker/pkg/ioutils"
"github.com/docker/docker/pkg/mount"
"github.com/docker/docker/plugin/v2"
"github.com/docker/docker/registry"
"github.com/opencontainers/go-digest"
"github.com/pkg/errors"
)
const configFileName = "config.json"
const rootFSFileName = "rootfs"
var validFullID = regexp.MustCompile(`^([a-f0-9]{64})$`)
func (pm *Manager) restorePlugin(p *v2.Plugin) error {
if p.IsEnabled() {
return pm.restore(p)
}
return nil
}
type eventLogger func(id, name, action string)
// ManagerConfig defines configuration needed to start new manager.
type ManagerConfig struct {
Store *Store // remove
Executor libcontainerd.Remote
RegistryService registry.Service
LiveRestoreEnabled bool // TODO: remove
LogPluginEvent eventLogger
Root string
ExecRoot string
AuthzMiddleware *authorization.Middleware
}
// Manager controls the plugin subsystem.
type Manager struct {
config ManagerConfig
mu sync.RWMutex // protects cMap
muGC sync.RWMutex // protects blobstore deletions
cMap map[*v2.Plugin]*controller
containerdClient libcontainerd.Client
blobStore *basicBlobStore
}
// controller represents the manager's control on a plugin.
type controller struct {
restart bool
exitChan chan bool
timeoutInSecs int
}
// pluginRegistryService ensures that all resolved repositories
// are of the plugin class.
type pluginRegistryService struct {
registry.Service
}
func (s pluginRegistryService) ResolveRepository(name reference.Named) (repoInfo *registry.RepositoryInfo, err error) {
repoInfo, err = s.Service.ResolveRepository(name)
if repoInfo != nil {
repoInfo.Class = "plugin"
}
return
}
// NewManager returns a new plugin manager.
func NewManager(config ManagerConfig) (*Manager, error) {
if config.RegistryService != nil {
config.RegistryService = pluginRegistryService{config.RegistryService}
}
manager := &Manager{
config: config,
}
if err := os.MkdirAll(manager.config.Root, 0700); err != nil {
return nil, errors.Wrapf(err, "failed to mkdir %v", manager.config.Root)
}
if err := os.MkdirAll(manager.config.ExecRoot, 0700); err != nil {
return nil, errors.Wrapf(err, "failed to mkdir %v", manager.config.ExecRoot)
}
if err := os.MkdirAll(manager.tmpDir(), 0700); err != nil {
return nil, errors.Wrapf(err, "failed to mkdir %v", manager.tmpDir())
}
var err error
manager.containerdClient, err = config.Executor.Client(manager) // todo: move to another struct
if err != nil {
return nil, errors.Wrap(err, "failed to create containerd client")
}
manager.blobStore, err = newBasicBlobStore(filepath.Join(manager.config.Root, "storage/blobs"))
if err != nil {
return nil, err
}
manager.cMap = make(map[*v2.Plugin]*controller)
if err := manager.reload(); err != nil {
return nil, errors.Wrap(err, "failed to restore plugins")
}
return manager, nil
}
func (pm *Manager) tmpDir() string {
return filepath.Join(pm.config.Root, "tmp")
}
// StateChanged updates plugin internals using libcontainerd events.
func (pm *Manager) StateChanged(id string, e libcontainerd.StateInfo) error {
logrus.Debugf("plugin state changed %s %#v", id, e)
switch e.State {
case libcontainerd.StateExit:
p, err := pm.config.Store.GetV2Plugin(id)
if err != nil {
return err
}
os.RemoveAll(filepath.Join(pm.config.ExecRoot, id))
if p.PropagatedMount != "" {
if err := mount.Unmount(p.PropagatedMount); err != nil {
logrus.Warnf("Could not unmount %s: %v", p.PropagatedMount, err)
}
propRoot := filepath.Join(filepath.Dir(p.Rootfs), "propagated-mount")
if err := mount.Unmount(propRoot); err != nil {
logrus.Warn("Could not unmount %s: %v", propRoot, err)
}
}
pm.mu.RLock()
c := pm.cMap[p]
if c.exitChan != nil {
close(c.exitChan)
}
restart := c.restart
pm.mu.RUnlock()
if restart {
pm.enable(p, c, true)
}
}
return nil
}
func (pm *Manager) reload() error { // todo: restore
dir, err := ioutil.ReadDir(pm.config.Root)
if err != nil {
return errors.Wrapf(err, "failed to read %v", pm.config.Root)
}
plugins := make(map[string]*v2.Plugin)
for _, v := range dir {
if validFullID.MatchString(v.Name()) {
p, err := pm.loadPlugin(v.Name())
if err != nil {
return err
}
plugins[p.GetID()] = p
}
}
pm.config.Store.SetAll(plugins)
var wg sync.WaitGroup
wg.Add(len(plugins))
for _, p := range plugins {
c := &controller{} // todo: remove this
pm.cMap[p] = c
go func(p *v2.Plugin) {
defer wg.Done()
if err := pm.restorePlugin(p); err != nil {
logrus.Errorf("failed to restore plugin '%s': %s", p.Name(), err)
return
}
if p.Rootfs != "" {
p.Rootfs = filepath.Join(pm.config.Root, p.PluginObj.ID, "rootfs")
}
// We should only enable rootfs propagation for certain plugin types that need it.
for _, typ := range p.PluginObj.Config.Interface.Types {
if (typ.Capability == "volumedriver" || typ.Capability == "graphdriver") && typ.Prefix == "docker" && strings.HasPrefix(typ.Version, "1.") {
if p.PluginObj.Config.PropagatedMount != "" {
propRoot := filepath.Join(filepath.Dir(p.Rootfs), "propagated-mount")
// check if we need to migrate an older propagated mount from before
// these mounts were stored outside the plugin rootfs
if _, err := os.Stat(propRoot); os.IsNotExist(err) {
if _, err := os.Stat(p.PropagatedMount); err == nil {
// make sure nothing is mounted here
// don't care about errors
mount.Unmount(p.PropagatedMount)
if err := os.Rename(p.PropagatedMount, propRoot); err != nil {
logrus.WithError(err).WithField("dir", propRoot).Error("error migrating propagated mount storage")
}
if err := os.MkdirAll(p.PropagatedMount, 0755); err != nil {
logrus.WithError(err).WithField("dir", p.PropagatedMount).Error("error migrating propagated mount storage")
}
}
}
if err := os.MkdirAll(propRoot, 0755); err != nil {
logrus.Errorf("failed to create PropagatedMount directory at %s: %v", propRoot, err)
}
// TODO: sanitize PropagatedMount and prevent breakout
p.PropagatedMount = filepath.Join(p.Rootfs, p.PluginObj.Config.PropagatedMount)
if err := os.MkdirAll(p.PropagatedMount, 0755); err != nil {
logrus.Errorf("failed to create PropagatedMount directory at %s: %v", p.PropagatedMount, err)
return
}
}
}
}
pm.save(p)
requiresManualRestore := !pm.config.LiveRestoreEnabled && p.IsEnabled()
if requiresManualRestore {
// if liveRestore is not enabled, the plugin will be stopped now so we should enable it
if err := pm.enable(p, c, true); err != nil {
logrus.Errorf("failed to enable plugin '%s': %s", p.Name(), err)
}
}
}(p)
}
wg.Wait()
return nil
}
func (pm *Manager) loadPlugin(id string) (*v2.Plugin, error) {
p := filepath.Join(pm.config.Root, id, configFileName)
dt, err := ioutil.ReadFile(p)
if err != nil {
return nil, errors.Wrapf(err, "error reading %v", p)
}
var plugin v2.Plugin
if err := json.Unmarshal(dt, &plugin); err != nil {
return nil, errors.Wrapf(err, "error decoding %v", p)
}
return &plugin, nil
}
func (pm *Manager) save(p *v2.Plugin) error {
pluginJSON, err := json.Marshal(p)
if err != nil {
return errors.Wrap(err, "failed to marshal plugin json")
}
if err := ioutils.AtomicWriteFile(filepath.Join(pm.config.Root, p.GetID(), configFileName), pluginJSON, 0600); err != nil {
return errors.Wrap(err, "failed to write atomically plugin json")
}
return nil
}
// GC cleans up unrefrenced blobs. This is recommended to run in a goroutine
func (pm *Manager) GC() {
pm.muGC.Lock()
defer pm.muGC.Unlock()
whitelist := make(map[digest.Digest]struct{})
for _, p := range pm.config.Store.GetAll() {
whitelist[p.Config] = struct{}{}
for _, b := range p.Blobsums {
whitelist[b] = struct{}{}
}
}
pm.blobStore.gc(whitelist)
}
type logHook struct{ id string }
func (logHook) Levels() []logrus.Level {
return logrus.AllLevels
}
func (l logHook) Fire(entry *logrus.Entry) error {
entry.Data = logrus.Fields{"plugin": l.id}
return nil
}
func attachToLog(id string) func(libcontainerd.IOPipe) error {
return func(iop libcontainerd.IOPipe) error {
iop.Stdin.Close()
logger := logrus.New()
logger.Hooks.Add(logHook{id})
// TODO: cache writer per id
w := logger.Writer()
go func() {
io.Copy(w, iop.Stdout)
}()
go func() {
// TODO: update logrus and use logger.WriterLevel
io.Copy(w, iop.Stderr)
}()
return nil
}
}
func validatePrivileges(requiredPrivileges, privileges types.PluginPrivileges) error {
if !isEqual(requiredPrivileges, privileges, isEqualPrivilege) {
return errors.New("incorrect privileges")
}
return nil
}
func isEqual(arrOne, arrOther types.PluginPrivileges, compare func(x, y types.PluginPrivilege) bool) bool {
if len(arrOne) != len(arrOther) {
return false
}
sort.Sort(arrOne)
sort.Sort(arrOther)
for i := 1; i < arrOne.Len(); i++ {
if !compare(arrOne[i], arrOther[i]) {
return false
}
}
return true
}
func isEqualPrivilege(a, b types.PluginPrivilege) bool {
if a.Name != b.Name {
return false
}
return reflect.DeepEqual(a.Value, b.Value)
}
func configToRootFS(c []byte) (*image.RootFS, error) {
var pluginConfig types.PluginConfig
if err := json.Unmarshal(c, &pluginConfig); err != nil {
return nil, err
}
// validation for empty rootfs is in distribution code
if pluginConfig.Rootfs == nil {
return nil, nil
}
return rootFSFromPlugin(pluginConfig.Rootfs), nil
}
func rootFSFromPlugin(pluginfs *types.PluginConfigRootfs) *image.RootFS {
rootFS := image.RootFS{
Type: pluginfs.Type,
DiffIDs: make([]layer.DiffID, len(pluginfs.DiffIds)),
}
for i := range pluginfs.DiffIds {
rootFS.DiffIDs[i] = layer.DiffID(pluginfs.DiffIds[i])
}
return &rootFS
}
| {
"pile_set_name": "Github"
} |
{
"ver": "2.3.5",
"uuid": "3b564f95-7eed-4b13-aa43-f5db43dc1e86",
"type": "sprite",
"wrapMode": "clamp",
"filterMode": "bilinear",
"premultiplyAlpha": false,
"genMipmaps": false,
"packable": true,
"width": 58,
"height": 78,
"platformSettings": {},
"subMetas": {
"LEGO_txfd_spark2_2": {
"ver": "1.0.4",
"uuid": "80fb29e5-d9c1-4dd6-a73e-96870a445811",
"rawTextureUuid": "3b564f95-7eed-4b13-aa43-f5db43dc1e86",
"trimType": "auto",
"trimThreshold": 1,
"rotated": false,
"offsetX": 0,
"offsetY": 0,
"trimX": 0,
"trimY": 0,
"width": 58,
"height": 78,
"rawWidth": 58,
"rawHeight": 78,
"borderTop": 0,
"borderBottom": 0,
"borderLeft": 0,
"borderRight": 0,
"subMetas": {}
}
}
} | {
"pile_set_name": "Github"
} |
glabel func_809A1A08
/* 003F8 809A1A08 AFA40000 */ sw $a0, 0x0000($sp)
/* 003FC 809A1A0C AFA50004 */ sw $a1, 0x0004($sp)
/* 00400 809A1A10 84CF0056 */ lh $t7, 0x0056($a2) ## 00000056
/* 00404 809A1A14 84CE005C */ lh $t6, 0x005C($a2) ## 0000005C
/* 00408 809A1A18 01CF082A */ slt $at, $t6, $t7
/* 0040C 809A1A1C 10200011 */ beq $at, $zero, .L809A1A64
/* 00410 809A1A20 00000000 */ nop
/* 00414 809A1A24 84D80040 */ lh $t8, 0x0040($a2) ## 00000040
/* 00418 809A1A28 84D90052 */ lh $t9, 0x0052($a2) ## 00000052
/* 0041C 809A1A2C 03194021 */ addu $t0, $t8, $t9
/* 00420 809A1A30 A4C80040 */ sh $t0, 0x0040($a2) ## 00000040
/* 00424 809A1A34 84C90040 */ lh $t1, 0x0040($a2) ## 00000040
/* 00428 809A1A38 05230003 */ bgezl $t1, .L809A1A48
/* 0042C 809A1A3C 84CA004A */ lh $t2, 0x004A($a2) ## 0000004A
/* 00430 809A1A40 A4C00040 */ sh $zero, 0x0040($a2) ## 00000040
/* 00434 809A1A44 84CA004A */ lh $t2, 0x004A($a2) ## 0000004A
.L809A1A48:
/* 00438 809A1A48 84CB0054 */ lh $t3, 0x0054($a2) ## 00000054
/* 0043C 809A1A4C 014B6023 */ subu $t4, $t2, $t3
/* 00440 809A1A50 A4CC004A */ sh $t4, 0x004A($a2) ## 0000004A
/* 00444 809A1A54 84CD004A */ lh $t5, 0x004A($a2) ## 0000004A
/* 00448 809A1A58 05A10002 */ bgez $t5, .L809A1A64
/* 0044C 809A1A5C 00000000 */ nop
/* 00450 809A1A60 A4C0004A */ sh $zero, 0x004A($a2) ## 0000004A
.L809A1A64:
/* 00454 809A1A64 03E00008 */ jr $ra
/* 00458 809A1A68 00000000 */ nop
/* 0045C 809A1A6C 00000000 */ nop
| {
"pile_set_name": "Github"
} |
{
"name" : "SketchUp",
"version" : "2016",
"os" : "mac",
"mods_used" : ["COMMAND", "SHIFT"],
"default_context" : "Global Context",
"contexts" : {
"Global Context" : {
"A" : [
{"name":"2 Point Arc", "mods":[]},
{"name":"Select All", "mods":["COMMAND"]}
],
"B" : [
{"name":"Paint Bucket", "mods":[]}
],
"C" : [
{"name":"Circle", "mods":[]}
],
"DELETE" : [
{"name":"Delete", "mods":[]}
],
"E" : [
{"name":"Eraser", "mods":[]}
],
"F" : [
{"name":"Offset", "mods":[]}
],
"F1" : [
{"name":"Context Help", "mods":["SHIFT"]}
],
"G" : [
{"name":"Make Component", "mods":[]}
],
"H" : [
{"name":"Pan", "mods":[]}
],
"I" : [
{"name":"Image Igloo", "mods":[]}
],
"INSERT" : [
{"name":"Copy", "mods":["COMMAND"]},
{"name":"Paste", "mods":["SHIFT"]}
],
"K" : [
{"name":"Back Edges", "mods":[]}
],
"L" : [
{"name":"Line", "mods":[]}
],
"M" : [
{"name":"Move", "mods":[]}
],
"N" : [
{"name":"New", "mods":["COMMAND"]}
],
"O" : [
{"name":"Open", "mods":["COMMAND"]},
{"name":"Orbit", "mods":[]}
],
"P" : [
{"name":"Print", "mods":["COMMAND"]},
{"name":"Push/Pull", "mods":[]}
],
"Q" : [
{"name":"Rotate", "mods":[]}
],
"R" : [
{"name":"Rectangle", "mods":[]}
],
"S" : [
{"name":"Save", "mods":["COMMAND"]},
{"name":"Scale", "mods":[]}
],
"SPACE" : [
{"name":"Select", "mods":[]}
],
"T" : [
{"name":"Select None", "mods":["COMMAND"]},
{"name":"Tape Measure", "mods":[]}
],
"W" : [
{"name":"Zoom Window", "mods":["COMMAND", "SHIFT"]}
],
"X" : [
{"name":"Cut", "mods":["COMMAND"]}
],
"Y" : [
{"name":"Redo", "mods":["COMMAND"]}
],
"Z" : [
{"name":"Undo", "mods":["COMMAND"]},
{"name":"Zoom Extents", "mods":["SHIFT"]},
{"name":"Zoom", "mods":[]}
]
},
"Mode: Circle" : {
"SHIFT" : [
{"name":"Lock current inferences", "mods":[]}
]
},
"Mode: Eraser" : {
"COMMAND" : [
{"name":"Soften/Smooth", "mods":[]}
],
"SHIFT" : [
{"name":"Hide", "mods":[]}
]
},
"Mode: Follow me" : {
"ALT" : [
{"name":"Face perimeter as extrusion path", "mods":[]}
]
},
"Mode: Line" : {
"DOWN_ARROW" : [
{"name":"Parallel/Perpendicular", "mods":[]}
],
"LEFT_ARROW" : [
{"name":"On Green Axis", "mods":[]}
],
"RIGHT_ARROW" : [
{"name":"On Red Axis", "mods":[]}
],
"UP_ARROW" : [
{"name":"On Blue Axis", "mods":[]}
]
},
"Mode: Move" : {
"ALT" : [
{"name":"Auto-fold", "mods":[]}
],
"ASTERISK" : [
{"name":"External Copy Array", "mods":[]}
],
"COMMAND" : [
{"name":"Copy", "mods":[]}
],
"DOWN_ARROW" : [
{"name":"Parallel/Perpendicular", "mods":[]}
],
"LEFT_ARROW" : [
{"name":"On Green Axis", "mods":[]}
],
"NUMPAD_ASTERISK" : [
{"name":"External Copy Array", "mods":[]}
],
"NUMPAD_SLASH" : [
{"name":"Internal Copy Array", "mods":[]}
],
"RIGHT_ARROW" : [
{"name":"On Red Axis", "mods":[]}
],
"SHIFT" : [
{"name":"Lock Inference", "mods":[]}
],
"SLASH" : [
{"name":"Internal Copy Array", "mods":[]}
],
"UP_ARROW" : [
{"name":"On Blue Axis", "mods":[]}
],
"X" : [
{"name":"External Copy Array", "mods":[]}
]
},
"Mode: Orbit" : {
"COMMAND" : [
{"name":"(HOLD) Suspend gravity", "mods":[]}
],
"SHIFT" : [
{"name":"(HOLD) Activate Pan", "mods":[]}
]
},
"Mode: Paint Bucket" : {
"ALT" : [
{"name":"Sample material", "mods":[]}
],
"COMMAND" : [
{"name":"Paint all matching connected faces", "mods":[]}
],
"SHIFT" : [
{"name":"Paint all matching faces in the model", "mods":[]},
{"name":"Paint all matching faces on the same object", "mods":["COMMAND"]}
]
},
"Mode: Push/Pull" : {
"COMMAND" : [
{"name":"Push/Pull a copy of the face (leaving the original face in place)", "mods":[]}
]
},
"Mode: Rotate" : {
"COMMAND" : [
{"name":"Copy", "mods":[]}
]
},
"Mode: Rotated Rectangle" : {
"ALT" : [
{"name":"Lock drawing plane for first edge (after first click)", "mods":[]}
],
"SHIFT" : [
{"name":"Lock in current direction/plane", "mods":[]}
]
},
"Mode: Scale" : {
"COMMAND" : [
{"name":"About center", "mods":[]}
],
"SHIFT" : [
{"name":"Uniformly (donโt distort)", "mods":[]}
]
},
"Mode: Select" : {
"COMMAND" : [
{"name":"Add to selection", "mods":[]}
],
"SHIFT" : [
{"name":"Add/Subtract from selection", "mods":[]},
{"name":"Subtract from selection", "mods":["COMMAND"]}
]
},
"Mode: Tape Measure" : {
"COMMAND" : [
{"name":"Create guide or measure only", "mods":[]}
],
"DOWN_ARROW" : [
{"name":"Parallel/Perpendicular", "mods":[]}
],
"LEFT_ARROW" : [
{"name":"On Green Axis", "mods":[]}
],
"RIGHT_ARROW" : [
{"name":"On Red Axis", "mods":[]}
],
"UP_ARROW" : [
{"name":"On Blue Axis", "mods":[]}
]
},
"Mode: Zoom" : {
"SHIFT" : [
{"name":"(HOLD) and click-drag mouse to change Field of View", "mods":[]}
]
}
}
}
| {
"pile_set_name": "Github"
} |
# Python Security Project (PySec) and its related class files.
#
# PySec is a set of tools for secure application development under Linux
#
# Copyright 2014 PySec development team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -*- coding: ascii -*-
WRONG_OPEN_MODE = "modalita' d'apertura non conosciuta: %r"
WRONG_ONE_CHAR_STRING = "stringa da un carattere invalida: %r"
LOG_ALREADY_SET = "Log gia' presente"
LOG_NOT_STARTED = "Log non iniziato"
LOG_NEGATIVE_ACT_CODE = "codice negativo per l'operazione: %d"
LOG_CODE_PRESENT = "codice gia' presente: %d"
LOG_NAME_PRESENT = "nome gia' presente: %r"
LOG_ERR_NOT_FOUND = "errore %r inesistente"
CHECK_WRONG_SUBRULE_TYPE = "tipo della sottoregola errato: %r"
CHECK_WRONG_RULE_TYPE = "tipo della regola errato: %r"
ENT_NEGATIVE_BASE = "base negativa invalida: %d"
ENT_NEGATIVE_FREQ = "frequenza negativa invalida: %d"
ENT_WRONG_BYTE = "valore byte errato: %r"
EXPR_WRONG_VAR_NAME = "nome variabile invalido: %r"
LOAD_WRONG_FIELDS = "numero di campi errato alla riga %d"
LOAD_WRONG_LIB_NAME = "nome libreria errato alla riga %d"
LOAD_WRONG_VERSION_FORMAT = "formato della version errato alla riga %d"
LOAD_WRONG_HASH_FORMAT = "formato dell'hash invalido alla riga %d"
LOAD_DUP_LIB = "libreria gia' presente: %r %d.%d.%d"
LOAD_LIB_NOT_FOUND = "libreria %r non presente"
LOAD_LIB_VER_NOT_FOUND = "libreria %r %r non presente"
LOAD_INVALID_HASH = "modulo %r %r in %r non corrisponde all'hash %r"
STR_WRONG_BYTE = "valore byte errato: %r"
TIME_INVALID_TIME_FORMAT = "formato invalido: %r"
TIME_UNKNOWN_TIME_UNIT = "unita' temporale sconosciuta: %r"
TIME_NOT_NUMERIC_VALUE = "valore numerico invalido: %r"
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2017 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apiman.gateway.platforms.vertx3.api.auth;
import io.apiman.gateway.platforms.vertx3.common.config.VertxEngineConfig;
import io.vertx.core.Vertx;
import io.vertx.core.json.JsonObject;
import io.vertx.ext.web.Router;
import io.vertx.ext.web.handler.AuthHandler;
import java.util.Arrays;
import org.apache.commons.lang3.EnumUtils;
/**
* @author Marc Savy {@literal <[email protected]>}
*/
@SuppressWarnings("nls")
public class AuthFactory {
public static enum AuthType {
NONE, BASIC, KEYCLOAK;
public static AuthType getType(String name) {
return EnumUtils.getEnum(AuthType.class, name.toUpperCase());
}
public static String all() {
return Arrays.toString(AuthType.values());
}
}
/**
* Creates an auth handler of the type indicated in the `auth` section of config.
*
* @param vertx the vert.x instance
* @param router the vert.x web router to protect
* @param apimanConfig the apiman config
* @return an auth handler
*/
public static AuthHandler getAuth(Vertx vertx, Router router, VertxEngineConfig apimanConfig) {
String type = apimanConfig.getAuth().getString("type", "NONE");
JsonObject authConfig = apimanConfig.getAuth().getJsonObject("config", new JsonObject());
switch(AuthType.getType(type)) {
case BASIC:
return BasicAuth.create(authConfig);
case NONE:
return NoneAuth.create();
case KEYCLOAK:
return KeycloakOAuthFactory.create(vertx, router, apimanConfig, authConfig);
default:
return NoneAuth.create();
}
}
}
| {
"pile_set_name": "Github"
} |
๏ปฟ#pragma warning disable 1591
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.42000
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace ASP
{
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net;
using System.Text;
using System.Web;
using System.Web.Helpers;
using System.Web.Mvc;
using System.Web.Mvc.Ajax;
using System.Web.Mvc.Html;
using System.Web.Routing;
using System.Web.Security;
using System.Web.UI;
using System.Web.WebPages;
#line 28 "..\..\Views\Tags\TagsTemplate.cshtml"
using BetterCms.Module.Root;
#line default
#line hidden
#line 29 "..\..\Views\Tags\TagsTemplate.cshtml"
using BetterCms.Module.Root.Content.Resources;
#line default
#line hidden
#line 30 "..\..\Views\Tags\TagsTemplate.cshtml"
using BetterCms.Module.Root.Mvc;
#line default
#line hidden
#line 31 "..\..\Views\Tags\TagsTemplate.cshtml"
using BetterCms.Module.Root.Mvc.Helpers;
#line default
#line hidden
[System.CodeDom.Compiler.GeneratedCodeAttribute("RazorGenerator", "2.0.0.0")]
[System.Web.WebPages.PageVirtualPathAttribute("~/Views/Tags/TagsTemplate.cshtml")]
public partial class _Views_Tags_TagsTemplate_cshtml : System.Web.Mvc.WebViewPage<BetterCms.Module.Root.ViewModels.Tags.TagsTemplateViewModel>
{
public _Views_Tags_TagsTemplate_cshtml()
{
}
public override void Execute()
{
WriteLiteral("\r\n");
#line 34 "..\..\Views\Tags\TagsTemplate.cshtml"
var canEdit = (ViewContext.Controller as CmsControllerBase).SecurityService.IsAuthorized(RootModuleConstants.UserRoles.EditContent);
#line default
#line hidden
WriteLiteral("\r\n\r\n<div");
WriteLiteral(" class=\"bcms-content-titles\"");
WriteLiteral(">");
#line 36 "..\..\Views\Tags\TagsTemplate.cshtml"
Write(RootGlobalization.TagsTemplate_AddTags_Title);
#line default
#line hidden
WriteLiteral("</div>\r\n");
#line 37 "..\..\Views\Tags\TagsTemplate.cshtml"
Write(Html.Tooltip(Model.TooltipDescription));
#line default
#line hidden
WriteLiteral("\r\n\r\n<div");
WriteLiteral(" class=\"bcms-field-wrapper\"");
WriteLiteral(" data-bind=\"visible: isExpanded()\"");
WriteLiteral(">\r\n <input");
WriteLiteral(" type=\"text\"");
WriteLiteral(" class=\"bcms-field-text\"");
WriteLiteral(@" data-bind=""
css: { 'bcms-input-validation-error': newItem.hasError() },
value: newItem,
valueUpdate: 'afterkeydown',
hasfocus: hasfocus,
autocompleteList: '',
enterPress: addItem,
escPress: clearItem""");
WriteLiteral(" />\r\n <!-- ko if: newItem.hasError() -->\r\n <span");
WriteLiteral(" class=\"bcms-field-validation-error\"");
WriteLiteral(">\r\n <span");
WriteLiteral(" data-bind=\"text: newItem.validationMessage()\"");
WriteLiteral("></span>\r\n </span>\r\n <!-- /ko -->\r\n</div>\r\n\r\n<div");
WriteLiteral(" class=\"bcms-single-tag-holder\"");
WriteLiteral(" data-bind=\"foreach: items()\"");
WriteLiteral(">\r\n <div");
WriteLiteral(" class=\"bcms-single-tag\"");
WriteLiteral(" data-bind=\"css: { \'bcms-single-tag-active\': isActive() }\"");
WriteLiteral(">\r\n <div");
WriteLiteral(" data-bind=\"text: name()\"");
WriteLiteral("></div>\r\n <div");
WriteLiteral(" class=\"bcms-single-tag-remove\"");
WriteLiteral(" data-bind=\"");
#line 58 "..\..\Views\Tags\TagsTemplate.cshtml"
Write(canEdit ? "click: remove" : string.Empty);
#line default
#line hidden
WriteLiteral("\"");
WriteLiteral(">");
#line 58 "..\..\Views\Tags\TagsTemplate.cshtml"
Write(RootGlobalization.Button_Remove);
#line default
#line hidden
WriteLiteral("</div>\r\n </div>\r\n <input");
WriteLiteral(" type=\"hidden\"");
WriteLiteral(" data-bind=\"attr: { name: getItemInputName($index()), value: name() }\"");
WriteLiteral(" />\r\n</div>\r\n");
}
}
}
#pragma warning restore 1591
| {
"pile_set_name": "Github"
} |
t db 0,1,2
mov al,t[1]
| {
"pile_set_name": "Github"
} |
SET citus.next_shard_id TO 1200000;
SET citus.next_placement_id TO 1200000;
-- ===================================================================
-- test end-to-end modification functionality
-- ===================================================================
CREATE TABLE researchers (
id bigint NOT NULL,
lab_id int NOT NULL,
name text NOT NULL
);
CREATE TABLE labs (
id bigint NOT NULL,
name text NOT NULL
);
SELECT master_create_distributed_table('researchers', 'lab_id', 'hash');
master_create_distributed_table
---------------------------------------------------------------------
(1 row)
SELECT master_create_worker_shards('researchers', 2, 2);
master_create_worker_shards
---------------------------------------------------------------------
(1 row)
SELECT master_create_distributed_table('labs', 'id', 'hash');
master_create_distributed_table
---------------------------------------------------------------------
(1 row)
SELECT master_create_worker_shards('labs', 1, 1);
master_create_worker_shards
---------------------------------------------------------------------
(1 row)
-- might be confusing to have two people in the same lab with the same name
CREATE UNIQUE INDEX avoid_name_confusion_idx ON researchers (lab_id, name);
-- add some data
INSERT INTO researchers VALUES (1, 1, 'Donald Knuth');
INSERT INTO researchers VALUES (2, 1, 'Niklaus Wirth');
INSERT INTO researchers VALUES (3, 2, 'Tony Hoare');
INSERT INTO researchers VALUES (4, 2, 'Kenneth Iverson');
-- replace a researcher, reusing their id in a multi-row INSERT
BEGIN;
DELETE FROM researchers WHERE lab_id = 1 AND id = 2;
INSERT INTO researchers VALUES (2, 1, 'John Backus'), (12, 1, 'Frances E. Allen');
COMMIT;
SELECT name FROM researchers WHERE lab_id = 1 AND id % 10 = 2;
name
---------------------------------------------------------------------
John Backus
Frances E. Allen
(2 rows)
-- and the other way around
BEGIN;
INSERT INTO researchers VALUES (14, 2, 'Alan Kay'), (15, 2, 'Barbara Liskov');
DELETE FROM researchers WHERE id = 14 AND lab_id = 2;
ROLLBACK;
-- should have rolled everything back
SELECT * FROM researchers WHERE id = 15 AND lab_id = 2;
id | lab_id | name
---------------------------------------------------------------------
(0 rows)
-- abort a modification
BEGIN;
DELETE FROM researchers WHERE lab_id = 1 AND id = 1;
ABORT;
SELECT name FROM researchers WHERE lab_id = 1 AND id = 1;
name
---------------------------------------------------------------------
Donald Knuth
(1 row)
-- trigger a unique constraint violation
BEGIN;
\set VERBOSITY TERSE
UPDATE researchers SET name = 'John Backus' WHERE id = 1 AND lab_id = 1;
ERROR: duplicate key value violates unique constraint "avoid_name_confusion_idx_1200000"
\set VERBOSITY DEFAULT
ABORT;
-- creating savepoints should work...
BEGIN;
INSERT INTO researchers VALUES (5, 3, 'Dennis Ritchie');
SAVEPOINT hire_thompson;
INSERT INTO researchers VALUES (6, 3, 'Ken Thompson');
COMMIT;
SELECT name FROM researchers WHERE lab_id = 3 AND id = 6;
name
---------------------------------------------------------------------
Ken Thompson
(1 row)
-- even if created by PL/pgSQL...
\set VERBOSITY terse
BEGIN;
DO $$
BEGIN
INSERT INTO researchers VALUES (10, 10, 'Edsger Dijkstra');
EXCEPTION
WHEN not_null_violation THEN
RAISE NOTICE 'caught not_null_violation';
END $$;
COMMIT;
-- rollback should also work
BEGIN;
INSERT INTO researchers VALUES (7, 4, 'Jim Gray');
SAVEPOINT hire_engelbart;
INSERT INTO researchers VALUES (8, 4, 'Douglas Engelbart');
ROLLBACK TO hire_engelbart;
COMMIT;
SELECT name FROM researchers WHERE lab_id = 4;
name
---------------------------------------------------------------------
Jim Gray
(1 row)
BEGIN;
DO $$
BEGIN
INSERT INTO researchers VALUES (11, 11, 'Whitfield Diffie');
INSERT INTO researchers VALUES (NULL, 10, 'Edsger Dijkstra');
EXCEPTION
WHEN not_null_violation THEN
RAISE NOTICE 'caught not_null_violation';
END $$;
NOTICE: caught not_null_violation
COMMIT;
\set VERBOSITY default
-- should be valid to edit labs after researchers...
BEGIN;
INSERT INTO researchers VALUES (8, 5, 'Douglas Engelbart');
INSERT INTO labs VALUES (5, 'Los Alamos');
COMMIT;
SELECT * FROM researchers, labs WHERE labs.id = researchers.lab_id AND researchers.lab_id = 5;
id | lab_id | name | id | name
---------------------------------------------------------------------
8 | 5 | Douglas Engelbart | 5 | Los Alamos
(1 row)
-- and the other way around is also allowed
BEGIN;
INSERT INTO labs VALUES (6, 'Bell Labs');
INSERT INTO researchers VALUES (9, 6, 'Leslie Lamport');
COMMIT;
-- we should be able to expand the transaction participants
BEGIN;
INSERT INTO labs VALUES (6, 'Bell Labs');
INSERT INTO researchers VALUES (9, 6, 'Leslie Lamport');
ERROR: duplicate key value violates unique constraint "avoid_name_confusion_idx_1200001"
DETAIL: Key (lab_id, name)=(6, Leslie Lamport) already exists.
CONTEXT: while executing command on localhost:xxxxx
ABORT;
-- SELECTs may occur after a modification: First check that selecting
-- from the modified node works.
BEGIN;
INSERT INTO labs VALUES (6, 'Bell Labs');
SELECT count(*) FROM researchers WHERE lab_id = 6;
count
---------------------------------------------------------------------
1
(1 row)
ABORT;
-- then check that SELECT going to new node still is fine
BEGIN;
UPDATE pg_dist_shard_placement AS sp SET shardstate = 3
FROM pg_dist_shard AS s
WHERE sp.shardid = s.shardid
AND sp.nodename = 'localhost'
AND sp.nodeport = :worker_1_port
AND s.logicalrelid = 'researchers'::regclass;
INSERT INTO labs VALUES (6, 'Bell Labs');
SELECT count(*) FROM researchers WHERE lab_id = 6;
count
---------------------------------------------------------------------
1
(1 row)
ABORT;
-- we can mix DDL and INSERT
BEGIN;
INSERT INTO labs VALUES (6, 'Bell Labs');
ALTER TABLE labs ADD COLUMN motto text;
ABORT;
-- whether it occurs first or second
BEGIN;
ALTER TABLE labs ADD COLUMN motto text;
INSERT INTO labs VALUES (6, 'Bell Labs');
ABORT;
-- this should work find with sequential DDL as well
BEGIN;
SET LOCAL citus.multi_shard_modify_mode TO 'sequential';
ALTER TABLE labs ADD COLUMN motto text;
INSERT INTO labs VALUES (6, 'Bell Labs');
ABORT;
-- but the DDL should correctly roll back
SELECT "Column", "Type", "Modifiers" FROM table_desc WHERE relid='public.labs'::regclass;
Column | Type | Modifiers
---------------------------------------------------------------------
id | bigint | not null
name | text | not null
(2 rows)
SELECT * FROM labs WHERE id = 6;
id | name
---------------------------------------------------------------------
6 | Bell Labs
(1 row)
-- COPY can happen after single row INSERT
BEGIN;
INSERT INTO labs VALUES (6, 'Bell Labs');
\copy labs from stdin delimiter ','
COMMIT;
-- COPY can be performed if multiple shards were modified over the same connection
BEGIN;
INSERT INTO researchers VALUES (2, 1, 'Knuth Donald');
INSERT INTO researchers VALUES (10, 6, 'Lamport Leslie');
\copy researchers from stdin delimiter ','
ROLLBACK;
-- COPY can be performed after a multi-row INSERT that uses one connection
BEGIN;
INSERT INTO researchers VALUES (2, 1, 'Knuth Donald'), (10, 6, 'Lamport Leslie');
\copy researchers from stdin delimiter ','
ROLLBACK;
-- after a COPY you can modify multiple shards, since they'll use different connections
BEGIN;
\copy researchers from stdin delimiter ','
INSERT INTO researchers VALUES (2, 1, 'Knuth Donald');
INSERT INTO researchers VALUES (10, 6, 'Lamport Leslie');
ROLLBACK;
-- after a COPY you can perform a multi-row INSERT
BEGIN;
\copy researchers from stdin delimiter ','
INSERT INTO researchers VALUES (2, 1, 'Knuth Donald'), (10, 6, 'Lamport Leslie');
ROLLBACK;
-- COPY can happen before single row INSERT
BEGIN;
\copy labs from stdin delimiter ','
SELECT name FROM labs WHERE id = 10;
name
---------------------------------------------------------------------
Weyland-Yutani
Weyland-Yutani
(2 rows)
INSERT INTO labs VALUES (6, 'Bell Labs');
COMMIT;
-- two consecutive COPYs in a transaction are allowed
BEGIN;
\copy labs from stdin delimiter ','
\copy labs from stdin delimiter ','
COMMIT;
SELECT name FROM labs WHERE id = 11 OR id = 12 ORDER BY id;
name
---------------------------------------------------------------------
Planet Express
fsociety
(2 rows)
-- 1pc failure test
SELECT recover_prepared_transactions();
recover_prepared_transactions
---------------------------------------------------------------------
0
(1 row)
-- copy with unique index violation
BEGIN;
\copy researchers FROM STDIN delimiter ','
\copy researchers FROM STDIN delimiter ','
ERROR: duplicate key value violates unique constraint "avoid_name_confusion_idx_1200001"
DETAIL: Key (lab_id, name)=(6, 'Bjarne Stroustrup') already exists.
COMMIT;
-- verify rollback
SELECT * FROM researchers WHERE lab_id = 6;
id | lab_id | name
---------------------------------------------------------------------
9 | 6 | Leslie Lamport
(1 row)
SELECT count(*) FROM pg_dist_transaction;
count
---------------------------------------------------------------------
0
(1 row)
-- 2pc failure and success tests
SET citus.multi_shard_commit_protocol TO '2pc';
SELECT recover_prepared_transactions();
recover_prepared_transactions
---------------------------------------------------------------------
0
(1 row)
-- copy with unique index violation
BEGIN;
\copy researchers FROM STDIN delimiter ','
\copy researchers FROM STDIN delimiter ','
ERROR: duplicate key value violates unique constraint "avoid_name_confusion_idx_1200001"
DETAIL: Key (lab_id, name)=(6, 'Bjarne Stroustrup') already exists.
COMMIT;
-- verify rollback
SELECT * FROM researchers WHERE lab_id = 6;
id | lab_id | name
---------------------------------------------------------------------
9 | 6 | Leslie Lamport
(1 row)
SELECT count(*) FROM pg_dist_transaction;
count
---------------------------------------------------------------------
0
(1 row)
BEGIN;
\copy researchers FROM STDIN delimiter ','
\copy researchers FROM STDIN delimiter ','
COMMIT;
-- verify success
SELECT * FROM researchers WHERE lab_id = 6;
id | lab_id | name
---------------------------------------------------------------------
9 | 6 | Leslie Lamport
17 | 6 | 'Bjarne Stroustrup'
18 | 6 | 'Dennis Ritchie'
(3 rows)
-- verify 2pc
SELECT count(*) FROM pg_dist_transaction;
count
---------------------------------------------------------------------
2
(1 row)
RESET citus.multi_shard_commit_protocol;
-- create a check function
SELECT * from run_command_on_workers('CREATE FUNCTION reject_large_id() RETURNS trigger AS $rli$
BEGIN
IF (NEW.id > 30) THEN
RAISE ''illegal value'';
END IF;
RETURN NEW;
END;
$rli$ LANGUAGE plpgsql;')
ORDER BY nodeport;
nodename | nodeport | success | result
---------------------------------------------------------------------
localhost | 57637 | t | CREATE FUNCTION
localhost | 57638 | t | CREATE FUNCTION
(2 rows)
-- register after insert trigger
SELECT * FROM run_command_on_placements('researchers', 'CREATE CONSTRAINT TRIGGER reject_large_researcher_id AFTER INSERT ON %s DEFERRABLE INITIALLY DEFERRED FOR EACH ROW EXECUTE FUNCTION reject_large_id()')
ORDER BY nodeport, shardid;
nodename | nodeport | shardid | success | result
---------------------------------------------------------------------
localhost | 57637 | 1200000 | t | CREATE TRIGGER
localhost | 57637 | 1200001 | t | CREATE TRIGGER
localhost | 57638 | 1200000 | t | CREATE TRIGGER
localhost | 57638 | 1200001 | t | CREATE TRIGGER
(4 rows)
-- hide postgresql version dependend messages for next test only
\set VERBOSITY terse
-- deferred check should abort the transaction
BEGIN;
SET LOCAL citus.multi_shard_commit_protocol TO '1pc';
DELETE FROM researchers WHERE lab_id = 6;
\copy researchers FROM STDIN delimiter ','
\copy researchers FROM STDIN delimiter ','
COMMIT;
WARNING: illegal value
WARNING: failed to commit transaction on localhost:xxxxx
WARNING: illegal value
WARNING: failed to commit transaction on localhost:xxxxx
WARNING: could not commit transaction for shard xxxxx on any active node
ERROR: could not commit transaction on any active node
\unset VERBOSITY
-- verify everyhing including delete is rolled back
SELECT * FROM researchers WHERE lab_id = 6;
id | lab_id | name
---------------------------------------------------------------------
9 | 6 | Leslie Lamport
17 | 6 | 'Bjarne Stroustrup'
18 | 6 | 'Dennis Ritchie'
(3 rows)
-- cleanup triggers and the function
SELECT * from run_command_on_placements('researchers', 'drop trigger reject_large_researcher_id on %s')
ORDER BY nodeport, shardid;
nodename | nodeport | shardid | success | result
---------------------------------------------------------------------
localhost | 57637 | 1200000 | t | DROP TRIGGER
localhost | 57637 | 1200001 | t | DROP TRIGGER
localhost | 57638 | 1200000 | t | DROP TRIGGER
localhost | 57638 | 1200001 | t | DROP TRIGGER
(4 rows)
SELECT * FROM run_command_on_workers('drop function reject_large_id()')
ORDER BY nodeport;
nodename | nodeport | success | result
---------------------------------------------------------------------
localhost | 57637 | t | DROP FUNCTION
localhost | 57638 | t | DROP FUNCTION
(2 rows)
-- ALTER and copy are compatible
BEGIN;
ALTER TABLE labs ADD COLUMN motto text;
\copy labs from stdin delimiter ','
ROLLBACK;
BEGIN;
\copy labs from stdin delimiter ','
ALTER TABLE labs ADD COLUMN motto text;
ABORT;
-- can perform parallel DDL even a connection is used for multiple shards
BEGIN;
SELECT lab_id FROM researchers WHERE lab_id = 1 AND id = 0;
lab_id
---------------------------------------------------------------------
(0 rows)
SELECT lab_id FROM researchers WHERE lab_id = 2 AND id = 0;
lab_id
---------------------------------------------------------------------
(0 rows)
ALTER TABLE researchers ADD COLUMN motto text;
ROLLBACK;
-- can perform sequential DDL once a connection is used for multiple shards
BEGIN;
SET LOCAL citus.multi_shard_modify_mode TO 'sequential';
SELECT lab_id FROM researchers WHERE lab_id = 1 AND id = 0;
lab_id
---------------------------------------------------------------------
(0 rows)
SELECT lab_id FROM researchers WHERE lab_id = 2 AND id = 0;
lab_id
---------------------------------------------------------------------
(0 rows)
ALTER TABLE researchers ADD COLUMN motto text;
ROLLBACK;
-- multi-shard operations can co-exist with DDL in a transactional way
BEGIN;
ALTER TABLE labs ADD COLUMN motto text;
DELETE FROM labs;
ALTER TABLE labs ADD COLUMN score float;
ROLLBACK;
-- should have rolled everything back
SELECT * FROM labs WHERE id = 12;
id | name
---------------------------------------------------------------------
12 | fsociety
(1 row)
-- now, for some special failures...
CREATE TABLE objects (
id bigint PRIMARY KEY,
name text NOT NULL
);
SELECT master_create_distributed_table('objects', 'id', 'hash');
master_create_distributed_table
---------------------------------------------------------------------
(1 row)
SELECT master_create_worker_shards('objects', 1, 2);
master_create_worker_shards
---------------------------------------------------------------------
(1 row)
-- test primary key violations
BEGIN;
INSERT INTO objects VALUES (1, 'apple');
INSERT INTO objects VALUES (1, 'orange');
ERROR: duplicate key value violates unique constraint "objects_pkey_1200003"
DETAIL: Key (id)=(X) already exists.
CONTEXT: while executing command on localhost:xxxxx
COMMIT;
-- data shouldn't have persisted...
SELECT * FROM objects WHERE id = 1;
id | name
---------------------------------------------------------------------
(0 rows)
-- and placements should still be healthy...
SELECT count(*)
FROM pg_dist_shard_placement AS sp,
pg_dist_shard AS s
WHERE sp.shardid = s.shardid
AND sp.shardstate = 1
AND s.logicalrelid = 'objects'::regclass;
count
---------------------------------------------------------------------
2
(1 row)
-- create trigger on one worker to reject certain values
\c - - - :worker_2_port
CREATE FUNCTION reject_bad() RETURNS trigger AS $rb$
BEGIN
IF (NEW.name = 'BAD') THEN
RAISE 'illegal value';
END IF;
RETURN NEW;
END;
$rb$ LANGUAGE plpgsql;
CREATE CONSTRAINT TRIGGER reject_bad
AFTER INSERT ON objects_1200003
DEFERRABLE INITIALLY IMMEDIATE
FOR EACH ROW EXECUTE FUNCTION reject_bad();
\c - - - :master_port
-- test partial failure; worker_1 succeeds, 2 fails
-- in this case, we expect the transaction to abort
\set VERBOSITY terse
BEGIN;
INSERT INTO objects VALUES (1, 'apple');
INSERT INTO objects VALUES (2, 'BAD');
ERROR: illegal value
COMMIT;
-- so the data should noy be persisted
SELECT * FROM objects WHERE id = 2;
id | name
---------------------------------------------------------------------
(0 rows)
SELECT * FROM labs WHERE id = 7;
id | name
---------------------------------------------------------------------
(0 rows)
-- and none of placements should be inactive
SELECT count(*)
FROM pg_dist_shard_placement AS sp,
pg_dist_shard AS s
WHERE sp.shardid = s.shardid
AND sp.nodename = 'localhost'
AND sp.nodeport = :worker_2_port
AND sp.shardstate = 3
AND s.logicalrelid = 'objects'::regclass;
count
---------------------------------------------------------------------
0
(1 row)
DELETE FROM objects;
-- there cannot be errors on different shards at different times
-- because the first failure will fail the whole transaction
\c - - - :worker_1_port
CREATE FUNCTION reject_bad() RETURNS trigger AS $rb$
BEGIN
IF (NEW.name = 'BAD') THEN
RAISE 'illegal value';
END IF;
RETURN NEW;
END;
$rb$ LANGUAGE plpgsql;
CREATE CONSTRAINT TRIGGER reject_bad
AFTER INSERT ON labs_1200002
DEFERRABLE INITIALLY IMMEDIATE
FOR EACH ROW EXECUTE FUNCTION reject_bad();
\c - - - :master_port
BEGIN;
INSERT INTO objects VALUES (1, 'apple');
INSERT INTO objects VALUES (2, 'BAD');
ERROR: illegal value
INSERT INTO labs VALUES (8, 'Aperture Science');
ERROR: current transaction is aborted, commands ignored until end of transaction block
INSERT INTO labs VALUES (2, 'BAD');
ERROR: current transaction is aborted, commands ignored until end of transaction block
COMMIT;
-- data should NOT be persisted
SELECT * FROM objects WHERE id = 1;
id | name
---------------------------------------------------------------------
(0 rows)
SELECT * FROM labs WHERE id = 8;
id | name
---------------------------------------------------------------------
(0 rows)
-- all placements should remain healthy
SELECT count(*)
FROM pg_dist_shard_placement AS sp,
pg_dist_shard AS s
WHERE sp.shardid = s.shardid
AND sp.shardstate = 1
AND (s.logicalrelid = 'objects'::regclass OR
s.logicalrelid = 'labs'::regclass);
count
---------------------------------------------------------------------
3
(1 row)
-- what if the failures happen at COMMIT time?
\c - - - :worker_2_port
DROP TRIGGER reject_bad ON objects_1200003;
CREATE CONSTRAINT TRIGGER reject_bad
AFTER INSERT ON objects_1200003
DEFERRABLE INITIALLY DEFERRED
FOR EACH ROW EXECUTE FUNCTION reject_bad();
\c - - - :master_port
-- should be the same story as before, just at COMMIT time
BEGIN;
INSERT INTO objects VALUES (1, 'apple');
INSERT INTO objects VALUES (2, 'BAD');
INSERT INTO labs VALUES (9, 'Umbrella Corporation');
COMMIT;
WARNING: illegal value
WARNING: failed to commit transaction on localhost:xxxxx
-- data should be persisted
SELECT * FROM objects WHERE id = 2;
id | name
---------------------------------------------------------------------
2 | BAD
(1 row)
-- but one placement should be bad
SELECT count(*)
FROM pg_dist_shard_placement AS sp,
pg_dist_shard AS s
WHERE sp.shardid = s.shardid
AND sp.nodename = 'localhost'
AND sp.nodeport = :worker_2_port
AND sp.shardstate = 3
AND s.logicalrelid = 'objects'::regclass;
count
---------------------------------------------------------------------
1
(1 row)
DELETE FROM objects;
-- mark shards as healthy again; delete all data
UPDATE pg_dist_shard_placement AS sp SET shardstate = 1
FROM pg_dist_shard AS s
WHERE sp.shardid = s.shardid
AND s.logicalrelid = 'objects'::regclass;
-- what if all nodes have failures at COMMIT time?
\c - - - :worker_1_port
DROP TRIGGER reject_bad ON labs_1200002;
CREATE CONSTRAINT TRIGGER reject_bad
AFTER INSERT ON labs_1200002
DEFERRABLE INITIALLY DEFERRED
FOR EACH ROW EXECUTE FUNCTION reject_bad();
\c - - - :master_port
BEGIN;
INSERT INTO objects VALUES (1, 'apple');
INSERT INTO objects VALUES (2, 'BAD');
INSERT INTO labs VALUES (8, 'Aperture Science');
INSERT INTO labs VALUES (9, 'BAD');
COMMIT;
WARNING: illegal value
WARNING: failed to commit transaction on localhost:xxxxx
WARNING: illegal value
WARNING: failed to commit transaction on localhost:xxxxx
WARNING: could not commit transaction for shard xxxxx on any active node
WARNING: could not commit transaction for shard xxxxx on any active node
ERROR: could not commit transaction on any active node
-- data should NOT be persisted
SELECT * FROM objects WHERE id = 1;
id | name
---------------------------------------------------------------------
(0 rows)
SELECT * FROM labs WHERE id = 8;
id | name
---------------------------------------------------------------------
(0 rows)
-- all placements should remain healthy
SELECT count(*)
FROM pg_dist_shard_placement AS sp,
pg_dist_shard AS s
WHERE sp.shardid = s.shardid
AND sp.shardstate = 1
AND (s.logicalrelid = 'objects'::regclass OR
s.logicalrelid = 'labs'::regclass);
count
---------------------------------------------------------------------
3
(1 row)
-- what if one shard (objects) succeeds but another (labs) completely fails?
\c - - - :worker_2_port
DROP TRIGGER reject_bad ON objects_1200003;
\c - - - :master_port
SET citus.next_shard_id TO 1200004;
BEGIN;
INSERT INTO objects VALUES (1, 'apple');
INSERT INTO labs VALUES (8, 'Aperture Science');
INSERT INTO labs VALUES (9, 'BAD');
COMMIT;
WARNING: illegal value
WARNING: failed to commit transaction on localhost:xxxxx
WARNING: could not commit transaction for shard xxxxx on any active node
\set VERBOSITY default
-- data to objects should be persisted, but labs should not...
SELECT * FROM objects WHERE id = 1;
id | name
---------------------------------------------------------------------
1 | apple
(1 row)
SELECT * FROM labs WHERE id = 8;
id | name
---------------------------------------------------------------------
(0 rows)
-- labs should be healthy, but one object placement shouldn't be
SELECT s.logicalrelid::regclass::text, sp.shardstate, count(*)
FROM pg_dist_shard_placement AS sp,
pg_dist_shard AS s
WHERE sp.shardid = s.shardid
AND (s.logicalrelid = 'objects'::regclass OR
s.logicalrelid = 'labs'::regclass)
GROUP BY s.logicalrelid, sp.shardstate
ORDER BY s.logicalrelid, sp.shardstate;
logicalrelid | shardstate | count
---------------------------------------------------------------------
labs | 1 | 1
objects | 1 | 1
objects | 3 | 1
(3 rows)
-- some append-partitioned tests for good measure
CREATE TABLE append_researchers ( LIKE researchers );
SELECT master_create_distributed_table('append_researchers', 'id', 'append');
master_create_distributed_table
---------------------------------------------------------------------
(1 row)
SET citus.shard_replication_factor TO 1;
SELECT master_create_empty_shard('append_researchers') AS new_shard_id
\gset
UPDATE pg_dist_shard SET shardminvalue = 0, shardmaxvalue = 500000
WHERE shardid = :new_shard_id;
SELECT master_create_empty_shard('append_researchers') AS new_shard_id
\gset
UPDATE pg_dist_shard SET shardminvalue = 500000, shardmaxvalue = 1000000
WHERE shardid = :new_shard_id;
SET citus.shard_replication_factor TO DEFAULT;
-- try single-shard INSERT
BEGIN;
INSERT INTO append_researchers VALUES (0, 0, 'John Backus');
COMMIT;
SELECT * FROM append_researchers WHERE id = 0;
id | lab_id | name
---------------------------------------------------------------------
0 | 0 | John Backus
(1 row)
-- try rollback
BEGIN;
DELETE FROM append_researchers WHERE id = 0;
ROLLBACK;
SELECT * FROM append_researchers WHERE id = 0;
id | lab_id | name
---------------------------------------------------------------------
0 | 0 | John Backus
(1 row)
-- try hitting shard on other node
BEGIN;
INSERT INTO append_researchers VALUES (1, 1, 'John McCarthy');
INSERT INTO append_researchers VALUES (500000, 500000, 'Tony Hoare');
ERROR: cannot run INSERT command which targets multiple shards
HINT: Make sure the value for partition column "id" falls into a single shard.
ROLLBACK;
SELECT * FROM append_researchers;
id | lab_id | name
---------------------------------------------------------------------
0 | 0 | John Backus
(1 row)
-- we use 2PC for reference tables by default
-- let's add some tests for them
CREATE TABLE reference_modifying_xacts (key int, value int);
SELECT create_reference_table('reference_modifying_xacts');
create_reference_table
---------------------------------------------------------------------
(1 row)
-- very basic test, ensure that INSERTs work
INSERT INTO reference_modifying_xacts VALUES (1, 1);
SELECT * FROM reference_modifying_xacts;
key | value
---------------------------------------------------------------------
1 | 1
(1 row)
-- now ensure that it works in a transaction as well
BEGIN;
INSERT INTO reference_modifying_xacts VALUES (2, 2);
SELECT * FROM reference_modifying_xacts;
key | value
---------------------------------------------------------------------
1 | 1
2 | 2
(2 rows)
COMMIT;
-- we should be able to see the insert outside of the transaction as well
SELECT * FROM reference_modifying_xacts;
key | value
---------------------------------------------------------------------
1 | 1
2 | 2
(2 rows)
-- rollback should also work
BEGIN;
INSERT INTO reference_modifying_xacts VALUES (3, 3);
SELECT * FROM reference_modifying_xacts;
key | value
---------------------------------------------------------------------
1 | 1
2 | 2
3 | 3
(3 rows)
ROLLBACK;
-- see that we've not inserted
SELECT * FROM reference_modifying_xacts;
key | value
---------------------------------------------------------------------
1 | 1
2 | 2
(2 rows)
-- lets fail on of the workers at before the commit time
\c - - - :worker_1_port
CREATE FUNCTION reject_bad_reference() RETURNS trigger AS $rb$
BEGIN
IF (NEW.key = 999) THEN
RAISE 'illegal value';
END IF;
RETURN NEW;
END;
$rb$ LANGUAGE plpgsql;
CREATE CONSTRAINT TRIGGER reject_bad_reference
AFTER INSERT ON reference_modifying_xacts_1200006
DEFERRABLE INITIALLY IMMEDIATE
FOR EACH ROW EXECUTE FUNCTION reject_bad_reference();
\c - - - :master_port
\set VERBOSITY terse
-- try without wrapping inside a transaction
INSERT INTO reference_modifying_xacts VALUES (999, 3);
ERROR: illegal value
-- same test within a transaction
BEGIN;
INSERT INTO reference_modifying_xacts VALUES (999, 3);
ERROR: illegal value
COMMIT;
-- lets fail one of the workers at COMMIT time
\c - - - :worker_1_port
DROP TRIGGER reject_bad_reference ON reference_modifying_xacts_1200006;
CREATE CONSTRAINT TRIGGER reject_bad_reference
AFTER INSERT ON reference_modifying_xacts_1200006
DEFERRABLE INITIALLY DEFERRED
FOR EACH ROW EXECUTE FUNCTION reject_bad_reference();
\c - - - :master_port
\set VERBOSITY terse
-- try without wrapping inside a transaction
INSERT INTO reference_modifying_xacts VALUES (999, 3);
ERROR: illegal value
-- same test within a transaction
BEGIN;
INSERT INTO reference_modifying_xacts VALUES (999, 3);
COMMIT;
ERROR: illegal value
-- all placements should be healthy
SELECT s.logicalrelid::regclass::text, sp.shardstate, count(*)
FROM pg_dist_shard_placement AS sp,
pg_dist_shard AS s
WHERE sp.shardid = s.shardid
AND s.logicalrelid = 'reference_modifying_xacts'::regclass
GROUP BY s.logicalrelid, sp.shardstate
ORDER BY s.logicalrelid, sp.shardstate;
logicalrelid | shardstate | count
---------------------------------------------------------------------
reference_modifying_xacts | 1 | 2
(1 row)
-- for the time-being drop the constraint
\c - - - :worker_1_port
DROP TRIGGER reject_bad_reference ON reference_modifying_xacts_1200006;
\c - - - :master_port
-- now create a hash distributed table and run tests
-- including both the reference table and the hash
-- distributed table
-- To prevent colocating a hash table with append table
DELETE FROM pg_dist_colocation WHERE colocationid = 100001;
SET citus.next_shard_id TO 1200007;
SET citus.shard_count = 4;
SET citus.shard_replication_factor = 1;
CREATE TABLE hash_modifying_xacts (key int, value int);
SELECT create_distributed_table('hash_modifying_xacts', 'key');
create_distributed_table
---------------------------------------------------------------------
(1 row)
-- let's try to expand the xact participants
BEGIN;
INSERT INTO hash_modifying_xacts VALUES (1, 1);
INSERT INTO reference_modifying_xacts VALUES (10, 10);
COMMIT;
-- it is allowed when turning off deadlock prevention
BEGIN;
INSERT INTO hash_modifying_xacts VALUES (1, 1);
INSERT INTO reference_modifying_xacts VALUES (10, 10);
ABORT;
BEGIN;
INSERT INTO hash_modifying_xacts VALUES (1, 1);
INSERT INTO hash_modifying_xacts VALUES (2, 2);
ABORT;
-- lets fail one of the workers before COMMIT time for the hash table
\c - - - :worker_1_port
CREATE FUNCTION reject_bad_hash() RETURNS trigger AS $rb$
BEGIN
IF (NEW.key = 997) THEN
RAISE 'illegal value';
END IF;
RETURN NEW;
END;
$rb$ LANGUAGE plpgsql;
CREATE CONSTRAINT TRIGGER reject_bad_hash
AFTER INSERT ON hash_modifying_xacts_1200007
DEFERRABLE INITIALLY IMMEDIATE
FOR EACH ROW EXECUTE FUNCTION reject_bad_hash();
\c - - - :master_port
\set VERBOSITY terse
-- the transaction as a whole should fail
BEGIN;
INSERT INTO reference_modifying_xacts VALUES (55, 10);
INSERT INTO hash_modifying_xacts VALUES (997, 1);
ERROR: illegal value
COMMIT;
-- ensure that the value didn't go into the reference table
SELECT * FROM reference_modifying_xacts WHERE key = 55;
key | value
---------------------------------------------------------------------
(0 rows)
-- now lets fail on of the workers for the hash distributed table table
-- when there is a reference table involved
\c - - - :worker_1_port
DROP TRIGGER reject_bad_hash ON hash_modifying_xacts_1200007;
-- the trigger is on execution time
CREATE CONSTRAINT TRIGGER reject_bad_hash
AFTER INSERT ON hash_modifying_xacts_1200007
DEFERRABLE INITIALLY DEFERRED
FOR EACH ROW EXECUTE FUNCTION reject_bad_hash();
\c - - - :master_port
\set VERBOSITY terse
-- the transaction as a whole should fail
BEGIN;
INSERT INTO reference_modifying_xacts VALUES (12, 12);
INSERT INTO hash_modifying_xacts VALUES (997, 1);
COMMIT;
ERROR: illegal value
-- ensure that the values didn't go into the reference table
SELECT * FROM reference_modifying_xacts WHERE key = 12;
key | value
---------------------------------------------------------------------
(0 rows)
-- all placements should be healthy
SELECT s.logicalrelid::regclass::text, sp.shardstate, count(*)
FROM pg_dist_shard_placement AS sp,
pg_dist_shard AS s
WHERE sp.shardid = s.shardid
AND (s.logicalrelid = 'reference_modifying_xacts'::regclass OR
s.logicalrelid = 'hash_modifying_xacts'::regclass)
GROUP BY s.logicalrelid, sp.shardstate
ORDER BY s.logicalrelid, sp.shardstate;
logicalrelid | shardstate | count
---------------------------------------------------------------------
reference_modifying_xacts | 1 | 2
hash_modifying_xacts | 1 | 4
(2 rows)
-- now, fail the insert on reference table
-- and ensure that hash distributed table's
-- change is rollbacked as well
\c - - - :worker_1_port
CREATE CONSTRAINT TRIGGER reject_bad_reference
AFTER INSERT ON reference_modifying_xacts_1200006
DEFERRABLE INITIALLY IMMEDIATE
FOR EACH ROW EXECUTE FUNCTION reject_bad_reference();
\c - - - :master_port
\set VERBOSITY terse
BEGIN;
-- to expand participant to include all worker nodes
INSERT INTO reference_modifying_xacts VALUES (66, 3);
INSERT INTO hash_modifying_xacts VALUES (80, 1);
INSERT INTO reference_modifying_xacts VALUES (999, 3);
ERROR: illegal value
COMMIT;
SELECT * FROM hash_modifying_xacts WHERE key = 80;
key | value
---------------------------------------------------------------------
(0 rows)
SELECT * FROM reference_modifying_xacts WHERE key = 66;
key | value
---------------------------------------------------------------------
(0 rows)
SELECT * FROM reference_modifying_xacts WHERE key = 999;
key | value
---------------------------------------------------------------------
(0 rows)
-- all placements should be healthy
SELECT s.logicalrelid::regclass::text, sp.shardstate, count(*)
FROM pg_dist_shard_placement AS sp,
pg_dist_shard AS s
WHERE sp.shardid = s.shardid
AND (s.logicalrelid = 'reference_modifying_xacts'::regclass OR
s.logicalrelid = 'hash_modifying_xacts'::regclass)
GROUP BY s.logicalrelid, sp.shardstate
ORDER BY s.logicalrelid, sp.shardstate;
logicalrelid | shardstate | count
---------------------------------------------------------------------
reference_modifying_xacts | 1 | 2
hash_modifying_xacts | 1 | 4
(2 rows)
-- now show that all modifications to reference
-- tables are done in 2PC
SELECT recover_prepared_transactions();
recover_prepared_transactions
---------------------------------------------------------------------
0
(1 row)
INSERT INTO reference_modifying_xacts VALUES (70, 70);
SELECT count(*) FROM pg_dist_transaction;
count
---------------------------------------------------------------------
2
(1 row)
-- reset the transactions table
SELECT recover_prepared_transactions();
recover_prepared_transactions
---------------------------------------------------------------------
0
(1 row)
BEGIN;
INSERT INTO reference_modifying_xacts VALUES (71, 71);
COMMIT;
SELECT count(*) FROM pg_dist_transaction;
count
---------------------------------------------------------------------
2
(1 row)
-- create a hash distributed tablw which spans all nodes
SET citus.shard_count = 4;
SET citus.shard_replication_factor = 2;
CREATE TABLE hash_modifying_xacts_second (key int, value int);
SELECT create_distributed_table('hash_modifying_xacts_second', 'key');
create_distributed_table
---------------------------------------------------------------------
(1 row)
-- reset the transactions table
SELECT recover_prepared_transactions();
recover_prepared_transactions
---------------------------------------------------------------------
0
(1 row)
BEGIN;
INSERT INTO hash_modifying_xacts_second VALUES (72, 1);
INSERT INTO reference_modifying_xacts VALUES (72, 3);
COMMIT;
SELECT count(*) FROM pg_dist_transaction;
count
---------------------------------------------------------------------
2
(1 row)
-- reset the transactions table
SELECT recover_prepared_transactions();
recover_prepared_transactions
---------------------------------------------------------------------
0
(1 row)
DELETE FROM reference_modifying_xacts;
SELECT count(*) FROM pg_dist_transaction;
count
---------------------------------------------------------------------
2
(1 row)
-- reset the transactions table
SELECT recover_prepared_transactions();
recover_prepared_transactions
---------------------------------------------------------------------
0
(1 row)
UPDATE reference_modifying_xacts SET key = 10;
SELECT count(*) FROM pg_dist_transaction;
count
---------------------------------------------------------------------
2
(1 row)
-- now to one more type of failure testing
-- in which we'll make the remote host unavailable
-- first create the new user on all nodes
CREATE USER test_user;
NOTICE: not propagating CREATE ROLE/USER commands to worker nodes
\c - - - :worker_1_port
CREATE USER test_user;
NOTICE: not propagating CREATE ROLE/USER commands to worker nodes
\c - - - :worker_2_port
CREATE USER test_user;
NOTICE: not propagating CREATE ROLE/USER commands to worker nodes
-- now connect back to the master with the new user
\c - test_user - :master_port
SET citus.next_shard_id TO 1200015;
CREATE TABLE reference_failure_test (key int, value int);
SELECT create_reference_table('reference_failure_test');
create_reference_table
---------------------------------------------------------------------
(1 row)
-- create a hash distributed table
SET citus.shard_count TO 4;
CREATE TABLE numbers_hash_failure_test(key int, value int);
SELECT create_distributed_table('numbers_hash_failure_test', 'key');
create_distributed_table
---------------------------------------------------------------------
(1 row)
-- ensure that the shard is created for this user
\c - test_user - :worker_1_port
\dt reference_failure_test_1200015
List of relations
Schema | Name | Type | Owner
---------------------------------------------------------------------
public | reference_failure_test_1200015 | table | test_user
(1 row)
-- now connect with the default user,
-- and rename the existing user
\c - :default_user - :worker_1_port
ALTER USER test_user RENAME TO test_user_new;
NOTICE: not propagating ALTER ROLE ... RENAME TO commands to worker nodes
-- connect back to master and query the reference table
\c - test_user - :master_port
-- should fail since the worker doesn't have test_user anymore
INSERT INTO reference_failure_test VALUES (1, '1');
ERROR: connection to the remote node localhost:xxxxx failed with the following error: FATAL: role "test_user" does not exist
-- the same as the above, but wrapped within a transaction
BEGIN;
INSERT INTO reference_failure_test VALUES (1, '1');
ERROR: connection to the remote node localhost:xxxxx failed with the following error: FATAL: role "test_user" does not exist
COMMIT;
BEGIN;
COPY reference_failure_test FROM STDIN WITH (FORMAT 'csv');
ERROR: connection to the remote node localhost:xxxxx failed with the following error: FATAL: role "test_user" does not exist
COMMIT;
-- show that no data go through the table and shard states are good
SET client_min_messages to 'ERROR';
SELECT * FROM reference_failure_test;
key | value
---------------------------------------------------------------------
(0 rows)
RESET client_min_messages;
-- all placements should be healthy
SELECT s.logicalrelid::regclass::text, sp.shardstate, count(*)
FROM pg_dist_shard_placement AS sp,
pg_dist_shard AS s
WHERE sp.shardid = s.shardid
AND s.logicalrelid = 'reference_failure_test'::regclass
GROUP BY s.logicalrelid, sp.shardstate
ORDER BY s.logicalrelid, sp.shardstate;
logicalrelid | shardstate | count
---------------------------------------------------------------------
reference_failure_test | 1 | 2
(1 row)
BEGIN;
COPY numbers_hash_failure_test FROM STDIN WITH (FORMAT 'csv');
WARNING: connection to the remote node localhost:xxxxx failed with the following error: FATAL: role "test_user" does not exist
WARNING: connection to the remote node localhost:xxxxx failed with the following error: FATAL: role "test_user" does not exist
-- some placements are invalid before abort
SELECT shardid, shardstate, nodename, nodeport
FROM pg_dist_shard_placement JOIN pg_dist_shard USING (shardid)
WHERE logicalrelid = 'numbers_hash_failure_test'::regclass
ORDER BY shardid, nodeport;
shardid | shardstate | nodename | nodeport
---------------------------------------------------------------------
1200016 | 3 | localhost | 57637
1200016 | 1 | localhost | 57638
1200017 | 1 | localhost | 57637
1200017 | 1 | localhost | 57638
1200018 | 1 | localhost | 57637
1200018 | 1 | localhost | 57638
1200019 | 3 | localhost | 57637
1200019 | 1 | localhost | 57638
(8 rows)
ABORT;
-- verify nothing is inserted
SELECT count(*) FROM numbers_hash_failure_test;
WARNING: connection to the remote node localhost:xxxxx failed with the following error: FATAL: role "test_user" does not exist
WARNING: connection to the remote node localhost:xxxxx failed with the following error: FATAL: role "test_user" does not exist
count
---------------------------------------------------------------------
0
(1 row)
-- all placements to be market valid
SELECT shardid, shardstate, nodename, nodeport
FROM pg_dist_shard_placement JOIN pg_dist_shard USING (shardid)
WHERE logicalrelid = 'numbers_hash_failure_test'::regclass
ORDER BY shardid, nodeport;
shardid | shardstate | nodename | nodeport
---------------------------------------------------------------------
1200016 | 1 | localhost | 57637
1200016 | 1 | localhost | 57638
1200017 | 1 | localhost | 57637
1200017 | 1 | localhost | 57638
1200018 | 1 | localhost | 57637
1200018 | 1 | localhost | 57638
1200019 | 1 | localhost | 57637
1200019 | 1 | localhost | 57638
(8 rows)
BEGIN;
COPY numbers_hash_failure_test FROM STDIN WITH (FORMAT 'csv');
WARNING: connection to the remote node localhost:xxxxx failed with the following error: FATAL: role "test_user" does not exist
WARNING: connection to the remote node localhost:xxxxx failed with the following error: FATAL: role "test_user" does not exist
-- check shard states before commit
SELECT shardid, shardstate, nodename, nodeport
FROM pg_dist_shard_placement JOIN pg_dist_shard USING (shardid)
WHERE logicalrelid = 'numbers_hash_failure_test'::regclass
ORDER BY shardid, nodeport;
shardid | shardstate | nodename | nodeport
---------------------------------------------------------------------
1200016 | 3 | localhost | 57637
1200016 | 1 | localhost | 57638
1200017 | 1 | localhost | 57637
1200017 | 1 | localhost | 57638
1200018 | 1 | localhost | 57637
1200018 | 1 | localhost | 57638
1200019 | 3 | localhost | 57637
1200019 | 1 | localhost | 57638
(8 rows)
COMMIT;
-- expect some placements to be market invalid after commit
SELECT shardid, shardstate, nodename, nodeport
FROM pg_dist_shard_placement JOIN pg_dist_shard USING (shardid)
WHERE logicalrelid = 'numbers_hash_failure_test'::regclass
ORDER BY shardid, nodeport;
shardid | shardstate | nodename | nodeport
---------------------------------------------------------------------
1200016 | 3 | localhost | 57637
1200016 | 1 | localhost | 57638
1200017 | 1 | localhost | 57637
1200017 | 1 | localhost | 57638
1200018 | 1 | localhost | 57637
1200018 | 1 | localhost | 57638
1200019 | 3 | localhost | 57637
1200019 | 1 | localhost | 57638
(8 rows)
-- verify data is inserted
SELECT count(*) FROM numbers_hash_failure_test;
WARNING: connection to the remote node localhost:xxxxx failed with the following error: FATAL: role "test_user" does not exist
WARNING: connection to the remote node localhost:xxxxx failed with the following error: FATAL: role "test_user" does not exist
count
---------------------------------------------------------------------
2
(1 row)
-- break the other node as well
\c - :default_user - :worker_2_port
ALTER USER test_user RENAME TO test_user_new;
NOTICE: not propagating ALTER ROLE ... RENAME TO commands to worker nodes
\c - test_user - :master_port
-- fails on all shard placements
INSERT INTO numbers_hash_failure_test VALUES (2,2);
ERROR: connection to the remote node localhost:xxxxx failed with the following error: FATAL: role "test_user" does not exist
-- connect back to the master with the proper user to continue the tests
\c - :default_user - :master_port
SET citus.next_shard_id TO 1200020;
SET citus.next_placement_id TO 1200033;
-- unbreak both nodes by renaming the user back to the original name
SELECT * FROM run_command_on_workers('ALTER USER test_user_new RENAME TO test_user');
nodename | nodeport | success | result
---------------------------------------------------------------------
localhost | 57637 | t | ALTER ROLE
localhost | 57638 | t | ALTER ROLE
(2 rows)
DROP TABLE reference_modifying_xacts, hash_modifying_xacts, hash_modifying_xacts_second,
reference_failure_test, numbers_hash_failure_test;
SELECT * FROM run_command_on_workers('DROP USER test_user');
nodename | nodeport | success | result
---------------------------------------------------------------------
localhost | 57637 | t | DROP ROLE
localhost | 57638 | t | DROP ROLE
(2 rows)
DROP USER test_user;
-- set up foreign keys to test transactions with co-located and reference tables
BEGIN;
SET LOCAL citus.shard_replication_factor TO 1;
SET LOCAL citus.shard_count TO 4;
CREATE TABLE usergroups (
gid int PRIMARY KEY,
name text
);
SELECT create_reference_table('usergroups');
create_reference_table
---------------------------------------------------------------------
(1 row)
CREATE TABLE itemgroups (
gid int PRIMARY KEY,
name text
);
SELECT create_reference_table('itemgroups');
create_reference_table
---------------------------------------------------------------------
(1 row)
CREATE TABLE users (
id int PRIMARY KEY,
name text,
user_group int
);
SELECT create_distributed_table('users', 'id');
create_distributed_table
---------------------------------------------------------------------
(1 row)
CREATE TABLE items (
user_id int REFERENCES users (id) ON DELETE CASCADE,
item_name text,
item_group int
);
SELECT create_distributed_table('items', 'user_id');
create_distributed_table
---------------------------------------------------------------------
(1 row)
-- Table to find values that live in different shards on the same node
SELECT id, shard_name('users', shardid), nodename, nodeport
FROM
pg_dist_shard_placement
JOIN
( SELECT id, get_shard_id_for_distribution_column('users', id) shardid FROM generate_series(1,10) id ) ids
USING (shardid)
ORDER BY
id;
id | shard_name | nodename | nodeport
---------------------------------------------------------------------
1 | users_1200022 | localhost | 57637
2 | users_1200025 | localhost | 57638
3 | users_1200023 | localhost | 57638
4 | users_1200023 | localhost | 57638
5 | users_1200022 | localhost | 57637
6 | users_1200024 | localhost | 57637
7 | users_1200023 | localhost | 57638
8 | users_1200022 | localhost | 57637
9 | users_1200025 | localhost | 57638
10 | users_1200022 | localhost | 57637
(10 rows)
END;
-- the INSERTs into items should see the users
BEGIN;
\COPY users FROM STDIN WITH CSV
INSERT INTO items VALUES (1, 'item-1');
INSERT INTO items VALUES (6, 'item-6');
END;
SELECT user_id FROM items ORDER BY user_id;
user_id
---------------------------------------------------------------------
1
6
(2 rows)
-- should be able to open multiple connections per node after INSERTing over one connection
BEGIN;
INSERT INTO users VALUES (2, 'burak');
INSERT INTO users VALUES (3, 'burak');
\COPY items FROM STDIN WITH CSV
ROLLBACK;
-- perform parallel DDL after a co-located table has been read over 1 connection
BEGIN;
SELECT id FROM users WHERE id = 1;
id
---------------------------------------------------------------------
1
(1 row)
SELECT id FROM users WHERE id = 6;
id
---------------------------------------------------------------------
6
(1 row)
ALTER TABLE items ADD COLUMN last_update timestamptz;
ROLLBACK;
-- can perform sequential DDL after a co-located table has been read over 1 connection
BEGIN;
SET LOCAL citus.multi_shard_modify_mode TO 'sequential';
SELECT id FROM users WHERE id = 1;
id
---------------------------------------------------------------------
1
(1 row)
SELECT id FROM users WHERE id = 6;
id
---------------------------------------------------------------------
6
(1 row)
ALTER TABLE items ADD COLUMN last_update timestamptz;
ROLLBACK;
-- and the other way around is also fine
BEGIN;
ALTER TABLE items ADD COLUMN last_update timestamptz;
SELECT id FROM users JOIN items ON (id = user_id) WHERE id = 1;
id
---------------------------------------------------------------------
1
(1 row)
SELECT id FROM users JOIN items ON (id = user_id) WHERE id = 6;
id
---------------------------------------------------------------------
6
(1 row)
END;
BEGIN;
-- establish multiple connections to a node
\COPY users FROM STDIN WITH CSV
-- now read from the reference table over each connection
SELECT user_id FROM items JOIN itemgroups ON (item_group = gid) WHERE user_id = 2;
user_id
---------------------------------------------------------------------
(0 rows)
SELECT user_id FROM items JOIN itemgroups ON (item_group = gid) WHERE user_id = 3;
user_id
---------------------------------------------------------------------
(0 rows)
-- perform a DDL command on the reference table errors
-- because the current implementation of COPY always opens one connection
-- per placement SELECTs have to use those connections for correctness
ALTER TABLE itemgroups ADD COLUMN last_update timestamptz;
ERROR: cannot perform DDL on placement xxxxx, which has been read over multiple connections
END;
BEGIN;
-- establish multiple connections to a node
\COPY users FROM STDIN WITH CSV
-- read from the reference table over each connection
SELECT user_id FROM items JOIN itemgroups ON (item_group = gid) WHERE user_id = 2;
user_id
---------------------------------------------------------------------
(0 rows)
SELECT user_id FROM items JOIN itemgroups ON (item_group = gid) WHERE user_id = 3;
user_id
---------------------------------------------------------------------
(0 rows)
-- perform a DDL command on a co-located reference table
ALTER TABLE usergroups ADD COLUMN last_update timestamptz;
ERROR: cannot perform DDL on placement xxxxx since a co-located placement has been read over multiple connections
END;
BEGIN;
-- make a modification over connection 1
INSERT INTO usergroups VALUES (0,'istanbul');
-- copy over connections 1 and 2
\COPY users FROM STDIN WITH CSV
-- cannot read modifications made over different connections
SELECT id FROM users JOIN usergroups ON (gid = user_group) WHERE id = 3;
ERROR: cannot perform query with placements that were modified over multiple connections
END;
-- make sure we can see cascading deletes
BEGIN;
DELETE FROM users;
SELECT user_id FROM items JOIN itemgroups ON (item_group = gid) WHERE user_id = 1;
user_id
---------------------------------------------------------------------
(0 rows)
SELECT user_id FROM items JOIN itemgroups ON (item_group = gid) WHERE user_id = 6;
user_id
---------------------------------------------------------------------
(0 rows)
END;
-- test visibility after COPY
INSERT INTO usergroups VALUES (2,'group');
BEGIN;
-- opens two separate connections to node
\COPY users FROM STDIN WITH CSV
-- Uses first connection, which wrote the row with id = 2
SELECT * FROM users JOIN usergroups ON (user_group = gid) WHERE id = 2;
id | name | user_group | gid | name
---------------------------------------------------------------------
2 | onder | 2 | 2 | group
(1 row)
-- Should use second connection, which wrote the row with id = 4
SELECT * FROM users JOIN usergroups ON (user_group = gid) WHERE id = 4;
id | name | user_group | gid | name
---------------------------------------------------------------------
4 | murat | 2 | 2 | group
(1 row)
END;
-- make sure functions that throw an error roll back propertly
CREATE FUNCTION insert_abort()
RETURNS bool
AS $BODY$
BEGIN
INSERT INTO labs VALUES (1001, 'Abort Labs');
UPDATE labs SET name = 'Rollback Labs' WHERE id = 1001;
RAISE 'do not insert';
END;
$BODY$ LANGUAGE plpgsql;
SELECT insert_abort();
ERROR: do not insert
SELECT name FROM labs WHERE id = 1001;
name
---------------------------------------------------------------------
(0 rows)
-- if function_opens_transaction-block is disabled the insert commits immediately
SET citus.function_opens_transaction_block TO off;
SELECT insert_abort();
ERROR: do not insert
SELECT name FROM labs WHERE id = 1001;
name
---------------------------------------------------------------------
Rollback Labs
(1 row)
RESET citus.function_opens_transaction_block;
DROP FUNCTION insert_abort();
DROP TABLE items, users, itemgroups, usergroups, researchers, labs;
| {
"pile_set_name": "Github"
} |
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 46;
objects = {
/* Begin PBXBuildFile section */
2C505DC5205E759500A96390 /* libpyWolf.system.macOS.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = 2C505DC6205E759500A96390 /* libpyWolf.system.macOS.dylib */; };
2CB6D9B8205EFB1D00F8B4E9 /* libpyWolf.vulkan.macOS.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = 2CB6D9B9205EFB1D00F8B4E9 /* libpyWolf.vulkan.macOS.dylib */; };
2CE07531201A83540078B239 /* pch.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2CE0752E201A83540078B239 /* pch.cpp */; };
2CE07532201A83540078B239 /* pch.h in Headers */ = {isa = PBXBuildFile; fileRef = 2CE0752F201A83540078B239 /* pch.h */; };
2CE07533201A83540078B239 /* main.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2CE07530201A83540078B239 /* main.cpp */; };
2CE0753D201A85630078B239 /* libboost_python-mt.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 2CE0753C201A85630078B239 /* libboost_python-mt.a */; };
2CE0753F201B12F50078B239 /* Python.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 2CE0753E201B12F50078B239 /* Python.framework */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
2C505DC6205E759500A96390 /* libpyWolf.system.macOS.dylib */ = {isa = PBXFileReference; explicitFileType = "compiled.mach-o.dylib"; path = libpyWolf.system.macOS.dylib; sourceTree = BUILT_PRODUCTS_DIR; };
2CB6D9B9205EFB1D00F8B4E9 /* libpyWolf.vulkan.macOS.dylib */ = {isa = PBXFileReference; explicitFileType = "compiled.mach-o.dylib"; path = libpyWolf.vulkan.macOS.dylib; sourceTree = BUILT_PRODUCTS_DIR; };
2CD617231E59DA3E006FC5DC /* pyWolf.so */ = {isa = PBXFileReference; explicitFileType = "compiled.mach-o.dylib"; includeInIndex = 0; path = pyWolf.so; sourceTree = BUILT_PRODUCTS_DIR; };
2CE0752E201A83540078B239 /* pch.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = pch.cpp; path = ../../../src/pyWolf/pch.cpp; sourceTree = "<group>"; };
2CE0752F201A83540078B239 /* pch.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = pch.h; path = ../../../src/pyWolf/pch.h; sourceTree = "<group>"; };
2CE07530201A83540078B239 /* main.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = main.cpp; path = ../../../src/pyWolf/main.cpp; sourceTree = "<group>"; };
2CE07538201A836B0078B239 /* libwolf.content_pipeline.macOS.dylib */ = {isa = PBXFileReference; explicitFileType = "compiled.mach-o.dylib"; path = libwolf.content_pipeline.macOS.dylib; sourceTree = BUILT_PRODUCTS_DIR; };
2CE07539201A836B0078B239 /* libwolf.media_core.macOS.dylib */ = {isa = PBXFileReference; explicitFileType = "compiled.mach-o.dylib"; path = libwolf.media_core.macOS.dylib; sourceTree = BUILT_PRODUCTS_DIR; };
2CE0753A201A836B0078B239 /* libwolf.system.macOS.dylib */ = {isa = PBXFileReference; explicitFileType = "compiled.mach-o.dylib"; path = libwolf.system.macOS.dylib; sourceTree = BUILT_PRODUCTS_DIR; };
2CE0753B201A836B0078B239 /* libwolf.vulkan.macOS.dylib */ = {isa = PBXFileReference; explicitFileType = "compiled.mach-o.dylib"; path = libwolf.vulkan.macOS.dylib; sourceTree = BUILT_PRODUCTS_DIR; };
2CE0753C201A85630078B239 /* libboost_python-mt.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = "libboost_python-mt.a"; path = "../../../dependencies/boost/lib/osx/debug/libboost_python-mt.a"; sourceTree = "<group>"; };
2CE0753E201B12F50078B239 /* Python.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Python.framework; path = System/Library/Frameworks/Python.framework; sourceTree = SDKROOT; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
2CD617201E59DA3E006FC5DC /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
2CB6D9B8205EFB1D00F8B4E9 /* libpyWolf.vulkan.macOS.dylib in Frameworks */,
2C505DC5205E759500A96390 /* libpyWolf.system.macOS.dylib in Frameworks */,
2CE0753F201B12F50078B239 /* Python.framework in Frameworks */,
2CE0753D201A85630078B239 /* libboost_python-mt.a in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
2CD6171A1E59DA3E006FC5DC = {
isa = PBXGroup;
children = (
2CE07530201A83540078B239 /* main.cpp */,
2CE0752E201A83540078B239 /* pch.cpp */,
2CE0752F201A83540078B239 /* pch.h */,
2CD617241E59DA3E006FC5DC /* Products */,
2CFA170D1E674C850041A83F /* Frameworks */,
);
sourceTree = "<group>";
};
2CD617241E59DA3E006FC5DC /* Products */ = {
isa = PBXGroup;
children = (
2CD617231E59DA3E006FC5DC /* pyWolf.so */,
);
name = Products;
sourceTree = "<group>";
};
2CFA170D1E674C850041A83F /* Frameworks */ = {
isa = PBXGroup;
children = (
2CB6D9B9205EFB1D00F8B4E9 /* libpyWolf.vulkan.macOS.dylib */,
2C505DC6205E759500A96390 /* libpyWolf.system.macOS.dylib */,
2CE0753E201B12F50078B239 /* Python.framework */,
2CE0753C201A85630078B239 /* libboost_python-mt.a */,
2CE07538201A836B0078B239 /* libwolf.content_pipeline.macOS.dylib */,
2CE07539201A836B0078B239 /* libwolf.media_core.macOS.dylib */,
2CE0753A201A836B0078B239 /* libwolf.system.macOS.dylib */,
2CE0753B201A836B0078B239 /* libwolf.vulkan.macOS.dylib */,
);
name = Frameworks;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXHeadersBuildPhase section */
2CD617211E59DA3E006FC5DC /* Headers */ = {
isa = PBXHeadersBuildPhase;
buildActionMask = 2147483647;
files = (
2CE07532201A83540078B239 /* pch.h in Headers */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXHeadersBuildPhase section */
/* Begin PBXNativeTarget section */
2CD617221E59DA3E006FC5DC /* pyWolf */ = {
isa = PBXNativeTarget;
buildConfigurationList = 2CD6172E1E59DA3E006FC5DC /* Build configuration list for PBXNativeTarget "pyWolf" */;
buildPhases = (
2CD6171F1E59DA3E006FC5DC /* Sources */,
2CD617201E59DA3E006FC5DC /* Frameworks */,
2CD617211E59DA3E006FC5DC /* Headers */,
);
buildRules = (
);
dependencies = (
);
name = pyWolf;
productName = wolf.content_pipeline;
productReference = 2CD617231E59DA3E006FC5DC /* pyWolf.so */;
productType = "com.apple.product-type.library.dynamic";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
2CD6171B1E59DA3E006FC5DC /* Project object */ = {
isa = PBXProject;
attributes = {
LastUpgradeCheck = 0900;
ORGANIZATIONNAME = WolfSource.io;
TargetAttributes = {
2CD617221E59DA3E006FC5DC = {
CreatedOnToolsVersion = 8.2.1;
ProvisioningStyle = Automatic;
};
};
};
buildConfigurationList = 2CD6171E1E59DA3E006FC5DC /* Build configuration list for PBXProject "pyWolf" */;
compatibilityVersion = "Xcode 3.2";
developmentRegion = English;
hasScannedForEncodings = 0;
knownRegions = (
en,
);
mainGroup = 2CD6171A1E59DA3E006FC5DC;
productRefGroup = 2CD617241E59DA3E006FC5DC /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
2CD617221E59DA3E006FC5DC /* pyWolf */,
);
};
/* End PBXProject section */
/* Begin PBXSourcesBuildPhase section */
2CD6171F1E59DA3E006FC5DC /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
2CE07533201A83540078B239 /* main.cpp in Sources */,
2CE07531201A83540078B239 /* pch.cpp in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin XCBuildConfiguration section */
2CD6172C1E59DA3E006FC5DC /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "c++14";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
CODE_SIGN_IDENTITY = "-";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"$(inherited)",
DEBUG,
MSGPACK_USE_CPP03,
MSGPACK_DISABLE_LEGACY_NIL,
__PYTHON__,
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
HEADER_SEARCH_PATHS = "";
MACOSX_DEPLOYMENT_TARGET = 10.13;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = macosx;
};
name = Debug;
};
2CD6172D1E59DA3E006FC5DC /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "c++14";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
CODE_SIGN_IDENTITY = "-";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_NO_COMMON_BLOCKS = YES;
"GCC_PREPROCESSOR_DEFINITIONS[arch=*]" = "";
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
HEADER_SEARCH_PATHS = "";
MACOSX_DEPLOYMENT_TARGET = 10.13;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = macosx;
};
name = Release;
};
2CD6172F1E59DA3E006FC5DC /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
EXECUTABLE_EXTENSION = so;
EXECUTABLE_PREFIX = "";
FRAMEWORK_SEARCH_PATHS = "";
"FRAMEWORK_SEARCH_PATHS[arch=*]" = /System/Library/Frameworks/Python.framework/Versions/2.7/lib;
GCC_ENABLE_CPP_EXCEPTIONS = YES;
GCC_ENABLE_CPP_RTTI = YES;
GCC_PRECOMPILE_PREFIX_HEADER = YES;
GCC_PREFIX_HEADER = "$(SRCROOT)/../../../src/pyWolf/pch.h";
GCC_PREPROCESSOR_DEFINITIONS = (
"$(inherited)",
"DEBUG=1",
BOOST_PYTHON_STATIC_LIB,
__VULKAN__,
);
GCC_SYMBOLS_PRIVATE_EXTERN = YES;
HEADER_SEARCH_PATHS = "";
"HEADER_SEARCH_PATHS[arch=*]" = (
"$(SRCROOT)/../../../dependencies/vulkan/macOS/macOS/include",
"$(SRCROOT)/../../../src/wolf.system",
"$(SRCROOT)/../../../src/wolf.content_pipeline",
"$(SRCROOT)/../../../src/wolf.render",
"$(SRCROOT)/../../../dependencies/boost/include/",
/System/Library/Frameworks/Python.framework/Versions/2.7/include/python2.7/,
);
INSTALL_PATH = /usr/local/lib;
LD_DYLIB_INSTALL_NAME = "$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)";
LD_RUNPATH_SEARCH_PATHS = "$(INSTALL_PATH)";
LIBRARY_SEARCH_PATHS = "";
"LIBRARY_SEARCH_PATHS[arch=*]" = (
"$(SRCROOT)/../../../dependencies/nanomsg/lib/osx/debug/",
"$(SRCROOT)/../../../dependencies/boost/lib/osx/debug",
);
MACH_O_TYPE = mh_dylib;
PRODUCT_NAME = "$(TARGET_NAME)";
SYSTEM_FRAMEWORK_SEARCH_PATHS = "";
};
name = Debug;
};
2CD617301E59DA3E006FC5DC /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
EXECUTABLE_EXTENSION = so;
EXECUTABLE_PREFIX = "";
FRAMEWORK_SEARCH_PATHS = "";
GCC_ENABLE_CPP_EXCEPTIONS = YES;
GCC_ENABLE_CPP_RTTI = YES;
GCC_PRECOMPILE_PREFIX_HEADER = YES;
GCC_PREFIX_HEADER = "$(SRCROOT)/../../../src/pyWolf/pch.h";
"GCC_PREPROCESSOR_DEFINITIONS[arch=*]" = "";
GCC_SYMBOLS_PRIVATE_EXTERN = YES;
HEADER_SEARCH_PATHS = "";
INSTALL_PATH = /usr/local/lib;
LD_DYLIB_INSTALL_NAME = "$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)";
LD_RUNPATH_SEARCH_PATHS = "$(INSTALL_PATH)";
LIBRARY_SEARCH_PATHS = "";
MACH_O_TYPE = mh_dylib;
PRODUCT_NAME = "$(TARGET_NAME)";
SYSTEM_FRAMEWORK_SEARCH_PATHS = "";
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
2CD6171E1E59DA3E006FC5DC /* Build configuration list for PBXProject "pyWolf" */ = {
isa = XCConfigurationList;
buildConfigurations = (
2CD6172C1E59DA3E006FC5DC /* Debug */,
2CD6172D1E59DA3E006FC5DC /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
2CD6172E1E59DA3E006FC5DC /* Build configuration list for PBXNativeTarget "pyWolf" */ = {
isa = XCConfigurationList;
buildConfigurations = (
2CD6172F1E59DA3E006FC5DC /* Debug */,
2CD617301E59DA3E006FC5DC /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 2CD6171B1E59DA3E006FC5DC /* Project object */;
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#ifndef SHARE_RUNTIME_RELOCATOR_HPP
#define SHARE_RUNTIME_RELOCATOR_HPP
#include "interpreter/bytecodes.hpp"
#include "oops/method.hpp"
#include "utilities/bytes.hpp"
// This code has been converted from the 1.1E java virtual machine
// Thanks to the JavaTopics group for using the code
class ChangeItem;
// Callback object for code relocations
class RelocatorListener : public StackObj {
public:
RelocatorListener() {};
virtual void relocated(int bci, int delta, int new_method_size) = 0;
};
class Relocator : public ResourceObj {
public:
Relocator(const methodHandle& method, RelocatorListener* listener);
methodHandle insert_space_at(int bci, int space, u_char inst_buffer[], TRAPS);
// Callbacks from ChangeItem's
bool handle_code_changes();
bool handle_widen (int bci, int new_ilen, u_char inst_buffer[]); // handles general instructions
void push_jump_widen (int bci, int delta, int new_delta); // pushes jumps
bool handle_jump_widen (int bci, int delta); // handles jumps
bool handle_switch_pad (int bci, int old_pad, bool is_lookup_switch); // handles table and lookup switches
private:
unsigned char* _code_array;
int _code_array_length;
int _code_length;
unsigned char* _compressed_line_number_table;
int _compressed_line_number_table_size;
methodHandle _method;
u_char _overwrite[3]; // stores overwritten bytes for shrunken instructions
GrowableArray<ChangeItem*>* _changes;
unsigned char* code_array() const { return _code_array; }
void set_code_array(unsigned char* array) { _code_array = array; }
int code_length() const { return _code_length; }
void set_code_length(int length) { _code_length = length; }
int code_array_length() const { return _code_array_length; }
void set_code_array_length(int length) { _code_array_length = length; }
unsigned char* compressed_line_number_table() const { return _compressed_line_number_table; }
void set_compressed_line_number_table(unsigned char* table) { _compressed_line_number_table = table; }
int compressed_line_number_table_size() const { return _compressed_line_number_table_size; }
void set_compressed_line_number_table_size(int size) { _compressed_line_number_table_size = size; }
methodHandle method() const { return _method; }
void set_method(const methodHandle& method) { _method = method; }
// This will return a raw bytecode, which is possibly rewritten.
Bytecodes::Code code_at(int bci) const { return (Bytecodes::Code) code_array()[bci]; }
void code_at_put(int bci, Bytecodes::Code code) { code_array()[bci] = (char) code; }
// get and set signed integers in the code_array
inline int int_at(int bci) const { return Bytes::get_Java_u4(&code_array()[bci]); }
inline void int_at_put(int bci, int value) { Bytes::put_Java_u4(&code_array()[bci], value); }
// get and set signed shorts in the code_array
inline short short_at(int bci) const { return (short)Bytes::get_Java_u2(&code_array()[bci]); }
inline void short_at_put(int bci, short value) { Bytes::put_Java_u2((address) &code_array()[bci], value); }
// get the address of in the code_array
inline char* addr_at(int bci) const { return (char*) &code_array()[bci]; }
int instruction_length_at(int bci) { return Bytecodes::length_at(NULL, code_array() + bci); }
// Helper methods
int align(int n) const { return (n+3) & ~3; }
int code_slop_pct() const { return 25; }
bool is_opcode_lookupswitch(Bytecodes::Code bc);
// basic relocation methods
bool relocate_code (int bci, int ilen, int delta);
void change_jumps (int break_bci, int delta);
void change_jump (int bci, int offset, bool is_short, int break_bci, int delta);
void adjust_exception_table(int bci, int delta);
void adjust_line_no_table (int bci, int delta);
void adjust_local_var_table(int bci, int delta);
void adjust_stack_map_table(int bci, int delta);
int get_orig_switch_pad (int bci, bool is_lookup_switch);
int rc_instr_len (int bci);
bool expand_code_array (int delta);
// Callback support
RelocatorListener *_listener;
void notify(int bci, int delta, int new_code_length) {
if (_listener != NULL)
_listener->relocated(bci, delta, new_code_length);
}
};
#endif // SHARE_RUNTIME_RELOCATOR_HPP
| {
"pile_set_name": "Github"
} |
enum {OS_UNKNOWN, OS_WIN95, OS_WIN98, OS_WINME, OS_WINNT, OS_WIN2K, OS_WINXP, OS_WIN2K3};
char *Uptime(int startup=0);
int OSVersionCheck(void);
#ifndef NO_SYSINFO
inline unsigned __int64 GetCycleCount(void);
unsigned __int64 GetCPUSpeed(void);
char *sysinfo(char *sinfo, SOCKET sock);
#endif
#ifndef NO_NETINFO
char *netinfo(char *ninfo, char *host, SOCKET sock);
#endif
| {
"pile_set_name": "Github"
} |
// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO
// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
// PARTICULAR PURPOSE.
//
// Copyright (c) Microsoft Corporation. All rights reserved
#include "DisplayManager.h"
using namespace DirectX;
//
// Constructor NULLs out vars
//
DISPLAYMANAGER::DISPLAYMANAGER() : m_Device(nullptr),
m_DeviceContext(nullptr),
m_MoveSurf(nullptr),
m_VertexShader(nullptr),
m_PixelShader(nullptr),
m_InputLayout(nullptr),
m_RTV(nullptr),
m_SamplerLinear(nullptr),
m_DirtyVertexBufferAlloc(nullptr),
m_DirtyVertexBufferAllocSize(0)
{
}
//
// Destructor calls CleanRefs to destroy everything
//
DISPLAYMANAGER::~DISPLAYMANAGER()
{
CleanRefs();
if (m_DirtyVertexBufferAlloc)
{
delete [] m_DirtyVertexBufferAlloc;
m_DirtyVertexBufferAlloc = nullptr;
}
}
//
// Initialize D3D variables
//
void DISPLAYMANAGER::InitD3D(DX_RESOURCES* Data)
{
m_Device = Data->Device;
m_DeviceContext = Data->Context;
m_VertexShader = Data->VertexShader;
m_PixelShader = Data->PixelShader;
m_InputLayout = Data->InputLayout;
m_SamplerLinear = Data->SamplerLinear;
m_Device->AddRef();
m_DeviceContext->AddRef();
m_VertexShader->AddRef();
m_PixelShader->AddRef();
m_InputLayout->AddRef();
m_SamplerLinear->AddRef();
}
//
// Process a given frame and its metadata
//
DUPL_RETURN DISPLAYMANAGER::ProcessFrame(_In_ FRAME_DATA* Data, _Inout_ ID3D11Texture2D* SharedSurf, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc)
{
DUPL_RETURN Ret = DUPL_RETURN_SUCCESS;
// Process dirties and moves
if (Data->FrameInfo.TotalMetadataBufferSize)
{
D3D11_TEXTURE2D_DESC Desc;
Data->Frame->GetDesc(&Desc);
if (Data->MoveCount)
{
Ret = CopyMove(SharedSurf, reinterpret_cast<DXGI_OUTDUPL_MOVE_RECT*>(Data->MetaData), Data->MoveCount, OffsetX, OffsetY, DeskDesc, Desc.Width, Desc.Height);
if (Ret != DUPL_RETURN_SUCCESS)
{
return Ret;
}
}
if (Data->DirtyCount)
{
Ret = CopyDirty(Data->Frame, SharedSurf, reinterpret_cast<RECT*>(Data->MetaData + (Data->MoveCount * sizeof(DXGI_OUTDUPL_MOVE_RECT))), Data->DirtyCount, OffsetX, OffsetY, DeskDesc);
}
}
return Ret;
}
//
// Returns D3D device being used
//
ID3D11Device* DISPLAYMANAGER::GetDevice()
{
return m_Device;
}
//
// Set appropriate source and destination rects for move rects
//
void DISPLAYMANAGER::SetMoveRect(_Out_ RECT* SrcRect, _Out_ RECT* DestRect, _In_ DXGI_OUTPUT_DESC* DeskDesc, _In_ DXGI_OUTDUPL_MOVE_RECT* MoveRect, INT TexWidth, INT TexHeight)
{
switch (DeskDesc->Rotation)
{
case DXGI_MODE_ROTATION_UNSPECIFIED:
case DXGI_MODE_ROTATION_IDENTITY:
{
SrcRect->left = MoveRect->SourcePoint.x;
SrcRect->top = MoveRect->SourcePoint.y;
SrcRect->right = MoveRect->SourcePoint.x + MoveRect->DestinationRect.right - MoveRect->DestinationRect.left;
SrcRect->bottom = MoveRect->SourcePoint.y + MoveRect->DestinationRect.bottom - MoveRect->DestinationRect.top;
*DestRect = MoveRect->DestinationRect;
break;
}
case DXGI_MODE_ROTATION_ROTATE90:
{
SrcRect->left = TexHeight - (MoveRect->SourcePoint.y + MoveRect->DestinationRect.bottom - MoveRect->DestinationRect.top);
SrcRect->top = MoveRect->SourcePoint.x;
SrcRect->right = TexHeight - MoveRect->SourcePoint.y;
SrcRect->bottom = MoveRect->SourcePoint.x + MoveRect->DestinationRect.right - MoveRect->DestinationRect.left;
DestRect->left = TexHeight - MoveRect->DestinationRect.bottom;
DestRect->top = MoveRect->DestinationRect.left;
DestRect->right = TexHeight - MoveRect->DestinationRect.top;
DestRect->bottom = MoveRect->DestinationRect.right;
break;
}
case DXGI_MODE_ROTATION_ROTATE180:
{
SrcRect->left = TexWidth - (MoveRect->SourcePoint.x + MoveRect->DestinationRect.right - MoveRect->DestinationRect.left);
SrcRect->top = TexHeight - (MoveRect->SourcePoint.y + MoveRect->DestinationRect.bottom - MoveRect->DestinationRect.top);
SrcRect->right = TexWidth - MoveRect->SourcePoint.x;
SrcRect->bottom = TexHeight - MoveRect->SourcePoint.y;
DestRect->left = TexWidth - MoveRect->DestinationRect.right;
DestRect->top = TexHeight - MoveRect->DestinationRect.bottom;
DestRect->right = TexWidth - MoveRect->DestinationRect.left;
DestRect->bottom = TexHeight - MoveRect->DestinationRect.top;
break;
}
case DXGI_MODE_ROTATION_ROTATE270:
{
SrcRect->left = MoveRect->SourcePoint.x;
SrcRect->top = TexWidth - (MoveRect->SourcePoint.x + MoveRect->DestinationRect.right - MoveRect->DestinationRect.left);
SrcRect->right = MoveRect->SourcePoint.y + MoveRect->DestinationRect.bottom - MoveRect->DestinationRect.top;
SrcRect->bottom = TexWidth - MoveRect->SourcePoint.x;
DestRect->left = MoveRect->DestinationRect.top;
DestRect->top = TexWidth - MoveRect->DestinationRect.right;
DestRect->right = MoveRect->DestinationRect.bottom;
DestRect->bottom = TexWidth - MoveRect->DestinationRect.left;
break;
}
default:
{
RtlZeroMemory(DestRect, sizeof(RECT));
RtlZeroMemory(SrcRect, sizeof(RECT));
break;
}
}
}
//
// Copy move rectangles
//
DUPL_RETURN DISPLAYMANAGER::CopyMove(_Inout_ ID3D11Texture2D* SharedSurf, _In_reads_(MoveCount) DXGI_OUTDUPL_MOVE_RECT* MoveBuffer, UINT MoveCount, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc, INT TexWidth, INT TexHeight)
{
D3D11_TEXTURE2D_DESC FullDesc;
SharedSurf->GetDesc(&FullDesc);
// Make new intermediate surface to copy into for moving
if (!m_MoveSurf)
{
D3D11_TEXTURE2D_DESC MoveDesc;
MoveDesc = FullDesc;
MoveDesc.Width = DeskDesc->DesktopCoordinates.right - DeskDesc->DesktopCoordinates.left;
MoveDesc.Height = DeskDesc->DesktopCoordinates.bottom - DeskDesc->DesktopCoordinates.top;
MoveDesc.BindFlags = D3D11_BIND_RENDER_TARGET;
MoveDesc.MiscFlags = 0;
HRESULT hr = m_Device->CreateTexture2D(&MoveDesc, nullptr, &m_MoveSurf);
if (FAILED(hr))
{
return ProcessFailure(m_Device, L"Failed to create staging texture for move rects", L"Error", hr, SystemTransitionsExpectedErrors);
}
}
for (UINT i = 0; i < MoveCount; ++i)
{
RECT SrcRect;
RECT DestRect;
SetMoveRect(&SrcRect, &DestRect, DeskDesc, &(MoveBuffer[i]), TexWidth, TexHeight);
// Copy rect out of shared surface
D3D11_BOX Box;
Box.left = SrcRect.left + DeskDesc->DesktopCoordinates.left - OffsetX;
Box.top = SrcRect.top + DeskDesc->DesktopCoordinates.top - OffsetY;
Box.front = 0;
Box.right = SrcRect.right + DeskDesc->DesktopCoordinates.left - OffsetX;
Box.bottom = SrcRect.bottom + DeskDesc->DesktopCoordinates.top - OffsetY;
Box.back = 1;
m_DeviceContext->CopySubresourceRegion(m_MoveSurf, 0, SrcRect.left, SrcRect.top, 0, SharedSurf, 0, &Box);
// Copy back to shared surface
Box.left = SrcRect.left;
Box.top = SrcRect.top;
Box.front = 0;
Box.right = SrcRect.right;
Box.bottom = SrcRect.bottom;
Box.back = 1;
m_DeviceContext->CopySubresourceRegion(SharedSurf, 0, DestRect.left + DeskDesc->DesktopCoordinates.left - OffsetX, DestRect.top + DeskDesc->DesktopCoordinates.top - OffsetY, 0, m_MoveSurf, 0, &Box);
}
return DUPL_RETURN_SUCCESS;
}
//
// Sets up vertices for dirty rects for rotated desktops
//
#pragma warning(push)
#pragma warning(disable:__WARNING_USING_UNINIT_VAR) // false positives in SetDirtyVert due to tool bug
void DISPLAYMANAGER::SetDirtyVert(_Out_writes_(NUMVERTICES) VERTEX* Vertices, _In_ RECT* Dirty, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc, _In_ D3D11_TEXTURE2D_DESC* FullDesc, _In_ D3D11_TEXTURE2D_DESC* ThisDesc)
{
INT CenterX = FullDesc->Width / 2;
INT CenterY = FullDesc->Height / 2;
INT Width = DeskDesc->DesktopCoordinates.right - DeskDesc->DesktopCoordinates.left;
INT Height = DeskDesc->DesktopCoordinates.bottom - DeskDesc->DesktopCoordinates.top;
// Rotation compensated destination rect
RECT DestDirty = *Dirty;
// Set appropriate coordinates compensated for rotation
switch (DeskDesc->Rotation)
{
case DXGI_MODE_ROTATION_ROTATE90:
{
DestDirty.left = Width - Dirty->bottom;
DestDirty.top = Dirty->left;
DestDirty.right = Width - Dirty->top;
DestDirty.bottom = Dirty->right;
Vertices[0].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
Vertices[1].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
Vertices[2].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
Vertices[5].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
break;
}
case DXGI_MODE_ROTATION_ROTATE180:
{
DestDirty.left = Width - Dirty->right;
DestDirty.top = Height - Dirty->bottom;
DestDirty.right = Width - Dirty->left;
DestDirty.bottom = Height - Dirty->top;
Vertices[0].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
Vertices[1].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
Vertices[2].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
Vertices[5].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
break;
}
case DXGI_MODE_ROTATION_ROTATE270:
{
DestDirty.left = Dirty->top;
DestDirty.top = Height - Dirty->right;
DestDirty.right = Dirty->bottom;
DestDirty.bottom = Height - Dirty->left;
Vertices[0].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
Vertices[1].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
Vertices[2].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
Vertices[5].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
break;
}
default:
assert(false); // drop through
case DXGI_MODE_ROTATION_UNSPECIFIED:
case DXGI_MODE_ROTATION_IDENTITY:
{
Vertices[0].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
Vertices[1].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
Vertices[2].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
Vertices[5].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
break;
}
}
// Set positions
Vertices[0].Pos = XMFLOAT3((DestDirty.left + DeskDesc->DesktopCoordinates.left - OffsetX - CenterX) / static_cast<FLOAT>(CenterX),
-1 * (DestDirty.bottom + DeskDesc->DesktopCoordinates.top - OffsetY - CenterY) / static_cast<FLOAT>(CenterY),
0.0f);
Vertices[1].Pos = XMFLOAT3((DestDirty.left + DeskDesc->DesktopCoordinates.left - OffsetX - CenterX) / static_cast<FLOAT>(CenterX),
-1 * (DestDirty.top + DeskDesc->DesktopCoordinates.top - OffsetY - CenterY) / static_cast<FLOAT>(CenterY),
0.0f);
Vertices[2].Pos = XMFLOAT3((DestDirty.right + DeskDesc->DesktopCoordinates.left - OffsetX - CenterX) / static_cast<FLOAT>(CenterX),
-1 * (DestDirty.bottom + DeskDesc->DesktopCoordinates.top - OffsetY - CenterY) / static_cast<FLOAT>(CenterY),
0.0f);
Vertices[3].Pos = Vertices[2].Pos;
Vertices[4].Pos = Vertices[1].Pos;
Vertices[5].Pos = XMFLOAT3((DestDirty.right + DeskDesc->DesktopCoordinates.left - OffsetX - CenterX) / static_cast<FLOAT>(CenterX),
-1 * (DestDirty.top + DeskDesc->DesktopCoordinates.top - OffsetY - CenterY) / static_cast<FLOAT>(CenterY),
0.0f);
Vertices[3].TexCoord = Vertices[2].TexCoord;
Vertices[4].TexCoord = Vertices[1].TexCoord;
}
#pragma warning(pop) // re-enable __WARNING_USING_UNINIT_VAR
//
// Copies dirty rectangles
//
DUPL_RETURN DISPLAYMANAGER::CopyDirty(_In_ ID3D11Texture2D* SrcSurface, _Inout_ ID3D11Texture2D* SharedSurf, _In_reads_(DirtyCount) RECT* DirtyBuffer, UINT DirtyCount, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc)
{
HRESULT hr;
D3D11_TEXTURE2D_DESC FullDesc;
SharedSurf->GetDesc(&FullDesc);
D3D11_TEXTURE2D_DESC ThisDesc;
SrcSurface->GetDesc(&ThisDesc);
if (!m_RTV)
{
hr = m_Device->CreateRenderTargetView(SharedSurf, nullptr, &m_RTV);
if (FAILED(hr))
{
return ProcessFailure(m_Device, L"Failed to create render target view for dirty rects", L"Error", hr, SystemTransitionsExpectedErrors);
}
}
D3D11_SHADER_RESOURCE_VIEW_DESC ShaderDesc;
ShaderDesc.Format = ThisDesc.Format;
ShaderDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
ShaderDesc.Texture2D.MostDetailedMip = ThisDesc.MipLevels - 1;
ShaderDesc.Texture2D.MipLevels = ThisDesc.MipLevels;
// Create new shader resource view
ID3D11ShaderResourceView* ShaderResource = nullptr;
hr = m_Device->CreateShaderResourceView(SrcSurface, &ShaderDesc, &ShaderResource);
if (FAILED(hr))
{
return ProcessFailure(m_Device, L"Failed to create shader resource view for dirty rects", L"Error", hr, SystemTransitionsExpectedErrors);
}
FLOAT BlendFactor[4] = {0.f, 0.f, 0.f, 0.f};
m_DeviceContext->OMSetBlendState(nullptr, BlendFactor, 0xFFFFFFFF);
m_DeviceContext->OMSetRenderTargets(1, &m_RTV, nullptr);
m_DeviceContext->VSSetShader(m_VertexShader, nullptr, 0);
m_DeviceContext->PSSetShader(m_PixelShader, nullptr, 0);
m_DeviceContext->PSSetShaderResources(0, 1, &ShaderResource);
m_DeviceContext->PSSetSamplers(0, 1, &m_SamplerLinear);
m_DeviceContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
// Create space for vertices for the dirty rects if the current space isn't large enough
UINT BytesNeeded = sizeof(VERTEX) * NUMVERTICES * DirtyCount;
if (BytesNeeded > m_DirtyVertexBufferAllocSize)
{
if (m_DirtyVertexBufferAlloc)
{
delete [] m_DirtyVertexBufferAlloc;
}
m_DirtyVertexBufferAlloc = new (std::nothrow) BYTE[BytesNeeded];
if (!m_DirtyVertexBufferAlloc)
{
m_DirtyVertexBufferAllocSize = 0;
return ProcessFailure(nullptr, L"Failed to allocate memory for dirty vertex buffer.", L"Error", E_OUTOFMEMORY);
}
m_DirtyVertexBufferAllocSize = BytesNeeded;
}
// Fill them in
VERTEX* DirtyVertex = reinterpret_cast<VERTEX*>(m_DirtyVertexBufferAlloc);
for (UINT i = 0; i < DirtyCount; ++i, DirtyVertex += NUMVERTICES)
{
SetDirtyVert(DirtyVertex, &(DirtyBuffer[i]), OffsetX, OffsetY, DeskDesc, &FullDesc, &ThisDesc);
}
// Create vertex buffer
D3D11_BUFFER_DESC BufferDesc;
RtlZeroMemory(&BufferDesc, sizeof(BufferDesc));
BufferDesc.Usage = D3D11_USAGE_DEFAULT;
BufferDesc.ByteWidth = BytesNeeded;
BufferDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
BufferDesc.CPUAccessFlags = 0;
D3D11_SUBRESOURCE_DATA InitData;
RtlZeroMemory(&InitData, sizeof(InitData));
InitData.pSysMem = m_DirtyVertexBufferAlloc;
ID3D11Buffer* VertBuf = nullptr;
hr = m_Device->CreateBuffer(&BufferDesc, &InitData, &VertBuf);
if (FAILED(hr))
{
return ProcessFailure(m_Device, L"Failed to create vertex buffer in dirty rect processing", L"Error", hr, SystemTransitionsExpectedErrors);
}
UINT Stride = sizeof(VERTEX);
UINT Offset = 0;
m_DeviceContext->IASetVertexBuffers(0, 1, &VertBuf, &Stride, &Offset);
D3D11_VIEWPORT VP;
VP.Width = static_cast<FLOAT>(FullDesc.Width);
VP.Height = static_cast<FLOAT>(FullDesc.Height);
VP.MinDepth = 0.0f;
VP.MaxDepth = 1.0f;
VP.TopLeftX = 0.0f;
VP.TopLeftY = 0.0f;
m_DeviceContext->RSSetViewports(1, &VP);
m_DeviceContext->Draw(NUMVERTICES * DirtyCount, 0);
VertBuf->Release();
VertBuf = nullptr;
ShaderResource->Release();
ShaderResource = nullptr;
return DUPL_RETURN_SUCCESS;
}
//
// Clean all references
//
void DISPLAYMANAGER::CleanRefs()
{
if (m_DeviceContext)
{
m_DeviceContext->Release();
m_DeviceContext = nullptr;
}
if (m_Device)
{
m_Device->Release();
m_Device = nullptr;
}
if (m_MoveSurf)
{
m_MoveSurf->Release();
m_MoveSurf = nullptr;
}
if (m_VertexShader)
{
m_VertexShader->Release();
m_VertexShader = nullptr;
}
if (m_PixelShader)
{
m_PixelShader->Release();
m_PixelShader = nullptr;
}
if (m_InputLayout)
{
m_InputLayout->Release();
m_InputLayout = nullptr;
}
if (m_SamplerLinear)
{
m_SamplerLinear->Release();
m_SamplerLinear = nullptr;
}
if (m_RTV)
{
m_RTV->Release();
m_RTV = nullptr;
}
}
| {
"pile_set_name": "Github"
} |
---
http_interactions:
- request:
method: get
uri: http://ps.pndsn.com/v2/auth/grant/sub-key/sub-a-mock-key?channel=demo&pnsdk=PubNub-Ruby/4.1.0&r=0&signature=zIGN36bzCJDw8DgovQpIeNzB2L9koD9y7Sep7COTtR4=×tamp=1464192841&ttl=300&uuid=ruby-test-uuid-client-one&w=0
body:
encoding: UTF-8
string: ''
headers:
User-Agent:
- HTTPClient/1.0 (2.8.0, ruby 2.3.0 (2015-12-25))
Accept:
- "*/*"
Date:
- Wed, 25 May 2016 16:14:01 GMT
response:
status:
code: 200
message: OK
headers:
Date:
- Wed, 25 May 2016 16:14:01 GMT
Content-Type:
- text/javascript; charset=UTF-8
Content-Length:
- '200'
Connection:
- keep-alive
Access-Control-Allow-Origin:
- "*"
Access-Control-Allow-Methods:
- GET
Access-Control-Allow-Headers:
- Origin, X-Requested-With, Content-Type, Accept
Cache-Control:
- no-cache, no-store, must-revalidate
body:
encoding: UTF-8
string: '{"message":"Success","payload":{"level":"channel","subscribe_key":"sub-a-mock-key","ttl":1,"channels":{"demo":{"r":0,"w":0,"m":0}}},"service":"Access
Manager","status":200}'
http_version:
recorded_at: Wed, 25 May 2016 16:14:01 GMT
recorded_with: VCR 3.0.1
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2009, Swedish Institute of Computer Science.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the Institute nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE INSTITUTE AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE INSTITUTE OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* This file is part of the Contiki operating system.
*
*/
/**
* \file
* Header file for the communication power accounting module
* \author
* Adam Dunkels <[email protected]>
*/
/** \addtogroup sys
* @{ */
/**
* \defgroup compower Communication power accounting
* @{
*
* The compower module accumulates power consumption information and
* attributes it to communication activities. Examples of
* communication activities are packet transmission, packet reception,
* and idle listening.
*
*/
#ifndef COMPOWER_H_
#define COMPOWER_H_
/**
* \brief An activity record that contains power consumption information for a specific communication activity.
*
* This is a structure that holds power information about
* a communication activity. It is an opaque structure
* with no user-visible elements.
*/
struct compower_activity {
uint32_t listen, transmit;
};
/**
* \brief The default idle communication activity.
*
* This is the idle communication activity, to which all
* energy that is not possible to attribute to individual
* packets, is attributed. Examples include idle listening
* for incoming packets and MAC-level beacon
* transmissions.
*/
extern struct compower_activity compower_idle_activity;
/**
* \brief Initialize the communication power accounting module.
*
* This function initializes the communication power
* accounting module. The function is called by the system
* during boot-up.
*/
void compower_init(void);
/**
* \brief Accumulate power contumption for a communication activity
* \param a A pointer to an activity structure.
*
* This function accumulates power consumption information
* for a communication activity. The function typically is
* called by a power-saving MAC protocol when the radio is
* switched off, or when a packet is received or
* transmitted.
*
*/
void compower_accumulate(struct compower_activity *a);
/**
* \brief Clear power consumption information for a communication activity
* \param a A pointer to an activity structure.
*
* This function clears any power contumption information
* that has previously been accumulated in an activity
* structure.
*
*/
void compower_clear(struct compower_activity *a);
/**
* \brief Convert power contumption information to packet attributes
* \param a A pointer to an activity structure.
*
* This function converts accumulated power consumption
* information for a communication activity to packet
* attributes (see \ref packetbufattr "packet attributes").
*/
void compower_attrconv(struct compower_activity *a);
/**
* \brief Accumulate power contumption for a communication activity based on energy data in packet attributes
* \param a A pointer to an activity structure.
*
* This function accumulates power consumption information
* for a communication activity from packet attributes
* (see \ref packetbufattr "packet attributes").
*/
void compower_accumulate_attrs(struct compower_activity *a);
#endif /* COMPOWER_H_ */
/** @} */
/** @} */
| {
"pile_set_name": "Github"
} |
mat3 xll_constructMat3( mat4 m) {
return mat3( vec3( m[0]), vec3( m[1]), vec3( m[2]));
}
struct v2f_vertex_lit {
vec2 uv;
vec4 diff;
vec4 spec;
};
struct v2f_img {
vec4 pos;
vec2 uv;
};
struct appdata_img {
vec4 vertex;
vec2 texcoord;
};
struct v2f {
vec4 pos;
vec2 uv;
vec4 nz;
};
struct appdata {
vec4 vertex;
vec3 normal;
vec4 color;
vec4 texcoord;
};
uniform vec4 _ProjectionParams;
uniform vec4 _Scale;
uniform float _SquashAmount;
uniform vec4 _SquashPlaneNormal;
uniform mat4 _TerrainEngineBendTree;
vec4 Squash( in vec4 pos );
void TerrainAnimateTree( inout vec4 pos, in float alpha );
v2f vert( in appdata v );
vec4 Squash( in vec4 pos ) {
vec3 projectedVertex;
vec3 planePoint;
vec3 planeNormal;
projectedVertex = pos.xyz ;
planePoint = vec3( 0.000000, _SquashPlaneNormal.w , 0.000000);
planeNormal = _SquashPlaneNormal.xyz ;
projectedVertex += (dot( planeNormal, (planePoint - vec3( pos))) * planeNormal);
pos = vec4( mix( projectedVertex, pos.xyz , vec3( _SquashAmount)), 1.00000);
return pos;
}
void TerrainAnimateTree( inout vec4 pos, in float alpha ) {
vec3 bent;
pos.xyz *= _Scale.xyz ;
bent = ( _TerrainEngineBendTree * vec4( pos.xyz , 0.000000) ).xyz ;
pos.xyz = mix( pos.xyz , bent, vec3( alpha));
pos = Squash( pos);
}
v2f vert( in appdata v ) {
v2f o;
TerrainAnimateTree( v.vertex, v.color.w );
o.pos = ( gl_ModelViewProjectionMatrix * v.vertex );
o.uv = v.texcoord.xy ;
o.nz.xyz = ( xll_constructMat3( gl_ModelViewMatrixInverseTranspose) * v.normal );
o.nz.w = ( -(( gl_ModelViewMatrix * v.vertex ).z * _ProjectionParams.w ) );
return o;
}
void main() {
v2f xl_retval;
appdata xlt_v;
xlt_v.vertex = vec4( gl_Vertex);
xlt_v.normal = vec3( gl_Normal);
xlt_v.color = vec4( gl_Color);
xlt_v.texcoord = vec4( gl_MultiTexCoord0);
xl_retval = vert( xlt_v);
gl_Position = vec4( xl_retval.pos);
gl_TexCoord[0] = vec4( xl_retval.uv, 0.0, 0.0);
gl_TexCoord[1] = vec4( xl_retval.nz);
}
| {
"pile_set_name": "Github"
} |
//
// Copyright (c) 2009-2011 Artyom Beilis (Tonkikh)
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
#ifndef BOOST_LOCALE_DATE_TIME_HPP_INCLUDED
#define BOOST_LOCALE_DATE_TIME_HPP_INCLUDED
#include <boost/locale/config.hpp>
#ifdef BOOST_MSVC
# pragma warning(push)
# pragma warning(disable : 4275 4251 4231 4660)
#endif
#include <boost/locale/hold_ptr.hpp>
#include <boost/locale/date_time_facet.hpp>
#include <boost/locale/formatting.hpp>
#include <boost/locale/time_zone.hpp>
#include <locale>
#include <vector>
#include <stdexcept>
namespace boost {
namespace locale {
///
/// \defgroup date_time Date, Time, Timezone and Calendar manipulations
///
/// This module provides various calendar, timezone and date time services
///
/// @{
///
/// \brief This error is thrown in case of invalid state that occurred
///
class BOOST_SYMBOL_VISIBLE date_time_error : public std::runtime_error {
public:
///
/// Constructor of date_time_error class
///
date_time_error(std::string const &e) : std::runtime_error(e) {}
};
///
/// \brief This class represents a pair of period_type and the integer
/// values that describes its amount. For example 3 days or 4 years.
///
/// Usually obtained as product of period_type and integer or
/// my calling a representative functions
/// For example day()*3 == date_time_period(day(),3) == day(3)
///
struct date_time_period
{
period::period_type type; ///< The type of period, i.e. era, year, day etc.
int value; ///< The value the actual number of \a periods
///
/// Operator + returns copy of itself
///
date_time_period operator+() const { return *this; }
///
/// Operator -, switches the sign of period
///
date_time_period operator-() const { return date_time_period(type,-value); }
///
/// Constructor that creates date_time_period from period_type \a f and a value \a v -- default 1.
///
date_time_period(period::period_type f=period::period_type(),int v=1) : type(f), value(v) {}
};
namespace period {
///
/// Get period_type for: special invalid value, should not be used directly
///
inline period_type invalid(){ return period_type(marks::invalid); }
///
/// Get period_type for: Era i.e. AC, BC in Gregorian and Julian calendar, range [0,1]
///
inline period_type era(){ return period_type(marks::era); }
///
/// Get period_type for: Year, it is calendar specific, for example 2011 in Gregorian calendar.
///
inline period_type year(){ return period_type(marks::year); }
///
/// Get period_type for: Extended year for Gregorian/Julian calendars, where 1 BC == 0, 2 BC == -1.
///
inline period_type extended_year(){ return period_type(marks::extended_year); }
///
/// Get period_type for: The month of year, calendar specific, in Gregorian [0..11]
///
inline period_type month(){ return period_type(marks::month); }
///
/// Get period_type for: The day of month, calendar specific, in Gregorian [1..31]
///
inline period_type day(){ return period_type(marks::day); }
///
/// Get period_type for: The number of day in year, starting from 1, in Gregorian [1..366]
///
inline period_type day_of_year(){ return period_type(marks::day_of_year); }
///
/// Get period_type for: Day of week, Sunday=1, Monday=2,..., Saturday=7.
///
/// Note that updating this value respects local day of week, so for example,
/// If first day of week is Monday and the current day is Tuesday then setting
/// the value to Sunday (1) would forward the date by 5 days forward and not backward
/// by two days as it could be expected if the numbers were taken as is.
///
inline period_type day_of_week(){ return period_type(marks::day_of_week); }
///
/// Get period_type for: Original number of the day of the week in month. For example 1st Sunday,
/// 2nd Sunday, etc. in Gregorian [1..5]
///
inline period_type day_of_week_in_month(){ return period_type(marks::day_of_week_in_month); }
///
/// Get period_type for: Local day of week, for example in France Monday is 1, in US Sunday is 1, [1..7]
///
inline period_type day_of_week_local(){ return period_type(marks::day_of_week_local); }
///
/// Get period_type for: 24 clock hour [0..23]
///
inline period_type hour(){ return period_type(marks::hour); }
///
/// Get period_type for: 12 clock hour [0..11]
///
inline period_type hour_12(){ return period_type(marks::hour_12); }
///
/// Get period_type for: am or pm marker [0..1]
///
inline period_type am_pm(){ return period_type(marks::am_pm); }
///
/// Get period_type for: minute [0..59]
///
inline period_type minute(){ return period_type(marks::minute); }
///
/// Get period_type for: second [0..59]
///
inline period_type second(){ return period_type(marks::second); }
///
/// Get period_type for: The week number in the year
///
inline period_type week_of_year(){ return period_type(marks::week_of_year); }
///
/// Get period_type for: The week number within current month
///
inline period_type week_of_month(){ return period_type(marks::week_of_month); }
///
/// Get period_type for: First day of week, constant, for example Sunday in US = 1, Monday in France = 2
///
inline period_type first_day_of_week(){ return period_type(marks::first_day_of_week); }
///
/// Get date_time_period for: Era i.e. AC, BC in Gregorian and Julian calendar, range [0,1]
///
inline date_time_period era(int v) { return date_time_period(era(),v); }
///
/// Get date_time_period for: Year, it is calendar specific, for example 2011 in Gregorian calendar.
///
inline date_time_period year(int v) { return date_time_period(year(),v); }
///
/// Get date_time_period for: Extended year for Gregorian/Julian calendars, where 1 BC == 0, 2 BC == -1.
///
inline date_time_period extended_year(int v) { return date_time_period(extended_year(),v); }
///
/// Get date_time_period for: The month of year, calendar specific, in Gregorian [0..11]
///
inline date_time_period month(int v) { return date_time_period(month(),v); }
///
/// Get date_time_period for: The day of month, calendar specific, in Gregorian [1..31]
///
inline date_time_period day(int v) { return date_time_period(day(),v); }
///
/// Get date_time_period for: The number of day in year, starting from 1, in Gregorian [1..366]
///
inline date_time_period day_of_year(int v) { return date_time_period(day_of_year(),v); }
///
/// Get date_time_period for: Day of week, Sunday=1, Monday=2,..., Saturday=7.
///
/// Note that updating this value respects local day of week, so for example,
/// If first day of week is Monday and the current day is Tuesday then setting
/// the value to Sunday (1) would forward the date by 5 days forward and not backward
/// by two days as it could be expected if the numbers were taken as is.
///
inline date_time_period day_of_week(int v) { return date_time_period(day_of_week(),v); }
///
/// Get date_time_period for: Original number of the day of the week in month. For example 1st Sunday,
/// 2nd Sunday, etc. in Gregorian [1..5]
///
inline date_time_period day_of_week_in_month(int v) { return date_time_period(day_of_week_in_month(),v); }
///
/// Get date_time_period for: Local day of week, for example in France Monday is 1, in US Sunday is 1, [1..7]
///
inline date_time_period day_of_week_local(int v) { return date_time_period(day_of_week_local(),v); }
///
/// Get date_time_period for: 24 clock hour [0..23]
///
inline date_time_period hour(int v) { return date_time_period(hour(),v); }
///
/// Get date_time_period for: 12 clock hour [0..11]
///
inline date_time_period hour_12(int v) { return date_time_period(hour_12(),v); }
///
/// Get date_time_period for: am or pm marker [0..1]
///
inline date_time_period am_pm(int v) { return date_time_period(am_pm(),v); }
///
/// Get date_time_period for: minute [0..59]
///
inline date_time_period minute(int v) { return date_time_period(minute(),v); }
///
/// Get date_time_period for: second [0..59]
///
inline date_time_period second(int v) { return date_time_period(second(),v); }
///
/// Get date_time_period for: The week number in the year
///
inline date_time_period week_of_year(int v) { return date_time_period(week_of_year(),v); }
///
/// Get date_time_period for: The week number within current month
///
inline date_time_period week_of_month(int v) { return date_time_period(week_of_month(),v); }
///
/// Get date_time_period for: First day of week, constant, for example Sunday in US = 1, Monday in France = 2
///
inline date_time_period first_day_of_week(int v) { return date_time_period(first_day_of_week(),v); }
///
/// Get predefined constant for January
///
inline date_time_period january() { return date_time_period(month(),0); }
///
/// Get predefined constant for February
///
inline date_time_period february() { return date_time_period(month(),1); }
///
/// Get predefined constant for March
///
inline date_time_period march() { return date_time_period(month(),2); }
///
/// Get predefined constant for April
///
inline date_time_period april() { return date_time_period(month(),3); }
///
/// Get predefined constant for May
///
inline date_time_period may() { return date_time_period(month(),4); }
///
/// Get predefined constant for June
///
inline date_time_period june() { return date_time_period(month(),5); }
///
/// Get predefined constant for July
///
inline date_time_period july() { return date_time_period(month(),6); }
///
/// Get predefined constant for August
///
inline date_time_period august() { return date_time_period(month(),7); }
///
/// Get predefined constant for September
///
inline date_time_period september() { return date_time_period(month(),8); }
///
/// Get predefined constant for October
///
inline date_time_period october() { return date_time_period(month(),9); }
///
/// Get predefined constant for November
///
inline date_time_period november() { return date_time_period(month(),10); }
///
/// Get predefined constant for December
///
inline date_time_period december() { return date_time_period(month(),11); }
///
/// Get predefined constant for Sunday
///
inline date_time_period sunday() { return date_time_period(day_of_week(),1); }
///
/// Get predefined constant for Monday
///
inline date_time_period monday() { return date_time_period(day_of_week(),2); }
///
/// Get predefined constant for Tuesday
///
inline date_time_period tuesday() { return date_time_period(day_of_week(),3); }
///
/// Get predefined constant for Wednesday
///
inline date_time_period wednesday() { return date_time_period(day_of_week(),4); }
///
/// Get predefined constant for Thursday
///
inline date_time_period thursday() { return date_time_period(day_of_week(),5); }
///
/// Get predefined constant for Friday
///
inline date_time_period friday() { return date_time_period(day_of_week(),6); }
///
/// Get predefined constant for Saturday
///
inline date_time_period saturday() { return date_time_period(day_of_week(),7); }
///
/// Get predefined constant for AM (Ante Meridiem)
///
inline date_time_period am() { return date_time_period(am_pm(),0); }
///
/// Get predefined constant for PM (Post Meridiem)
///
inline date_time_period pm() { return date_time_period(am_pm(),1); }
///
/// convert period_type to date_time_period(f,1)
///
inline date_time_period operator+(period::period_type f)
{
return date_time_period(f);
}
///
/// convert period_type to date_time_period(f,-1)
///
inline date_time_period operator-(period::period_type f)
{
return date_time_period(f,-1);
}
///
/// Create date_time_period of type \a f with value \a v.
///
template<typename T>
date_time_period operator*(period::period_type f,T v)
{
return date_time_period(f,v);
}
///
/// Create date_time_period of type \a f with value \a v.
///
template<typename T>
date_time_period operator*(T v,period::period_type f)
{
return date_time_period(f,v);
}
///
/// Create date_time_period of type \a f with value \a v.
///
template<typename T>
date_time_period operator*(T v,date_time_period f)
{
return date_time_period(f.type,f.value*v);
}
///
/// Create date_time_period of type \a f with value \a v.
///
template<typename T>
date_time_period operator*(date_time_period f,T v)
{
return date_time_period(f.type,f.value*v);
}
} // period
///
/// \brief this class that represents a set of periods,
///
/// It is generally created by operations on periods:
/// 1995*year + 3*month + 1*day. Note: operations are not commutative.
///
class date_time_period_set {
public:
///
/// Default constructor - empty set
///
date_time_period_set()
{
}
///
/// Create a set of single period with value 1
///
date_time_period_set(period::period_type f)
{
basic_[0]=date_time_period(f);
}
///
/// Create a set of single period \a fl
///
date_time_period_set(date_time_period const &fl)
{
basic_[0]=fl;
}
///
/// Append date_time_period \a f to the set
///
void add(date_time_period f)
{
size_t n=size();
if(n < 4)
basic_[n]=f;
else
periods_.push_back(f);
}
///
/// Get number if items in list
///
size_t size() const
{
if(basic_[0].type == period::period_type())
return 0;
if(basic_[1].type == period::period_type())
return 1;
if(basic_[2].type == period::period_type())
return 2;
if(basic_[3].type == period::period_type())
return 3;
return 4+periods_.size();
}
///
/// Get item at position \a n the set, n should be in range [0,size)
///
date_time_period const &operator[](size_t n) const
{
if(n >= size())
throw std::out_of_range("Invalid index to date_time_period");
if(n < 4)
return basic_[n];
else
return periods_[n-4];
}
private:
date_time_period basic_[4];
std::vector<date_time_period> periods_;
};
///
/// Append two periods sets. Note this operator is not commutative
///
inline date_time_period_set operator+(date_time_period_set const &a,date_time_period_set const &b)
{
date_time_period_set s(a);
for(unsigned i=0;i<b.size();i++)
s.add(b[i]);
return s;
}
///
/// Append two period sets when all periods of set \b change their sign
///
inline date_time_period_set operator-(date_time_period_set const &a,date_time_period_set const &b)
{
date_time_period_set s(a);
for(unsigned i=0;i<b.size();i++)
s.add(-b[i]);
return s;
}
///
/// \brief this class provides an access to general calendar information.
///
/// This information is not connected to specific date but generic to locale, and timezone.
/// It is used in obtaining general information about calendar and is essential for creation of
/// date_time objects.
///
class BOOST_LOCALE_DECL calendar {
public:
///
/// Create calendar taking locale and timezone information from ios_base instance.
///
/// \note throws std::bad_cast if ios does not have a locale with installed \ref calendar_facet
/// facet installed
///
calendar(std::ios_base &ios);
///
/// Create calendar with locale \a l and time_zone \a zone
///
/// \note throws std::bad_cast if loc does not have \ref calendar_facet facet installed
///
calendar(std::locale const &l,std::string const &zone);
///
/// Create calendar with locale \a l and default timezone
///
/// \note throws std::bad_cast if loc does not have \ref calendar_facet facet installed
///
calendar(std::locale const &l);
///
/// Create calendar with default locale and timezone \a zone
///
/// \note throws std::bad_cast if global locale does not have \ref calendar_facet facet installed
///
calendar(std::string const &zone);
///
/// Create calendar with default locale and timezone
///
/// \note throws std::bad_cast if global locale does not have \ref calendar_facet facet installed
///
calendar();
~calendar();
///
/// copy calendar
///
calendar(calendar const &other);
///
/// assign calendar
///
calendar const &operator=(calendar const &other);
///
/// Get minimum value for period f, For example for period::day it is 1.
///
int minimum(period::period_type f) const;
///
/// Get greatest possible minimum value for period f, For example for period::day it is 1, but may be different for other calendars.
///
int greatest_minimum(period::period_type f) const;
///
/// Get maximum value for period f, For example for Gregorian calendar's maximum period::day it is 31.
///
int maximum(period::period_type f) const;
///
/// Get least maximum value for period f, For example for Gregorian calendar's maximum period::day it is 28.
///
int least_maximum(period::period_type f) const;
///
/// Get first day of week for specific calendar, for example for US it is 1 - Sunday for France it is 2 - Monday
int first_day_of_week() const;
///
/// get calendar's locale
///
std::locale get_locale() const;
///
/// get calendar's time zone
///
std::string get_time_zone() const;
///
/// Check if the calendar is Gregorian
///
bool is_gregorian() const;
///
/// Compare calendars for equivalence: i.e. calendar types, time zones etc.
///
bool operator==(calendar const &other) const;
///
/// Opposite of ==
///
bool operator!=(calendar const &other) const;
private:
friend class date_time;
std::locale locale_;
std::string tz_;
hold_ptr<abstract_calendar> impl_;
};
///
/// \brief this class represents a date time and allows to perform various operation according to the
/// locale settings.
///
/// This class allows to manipulate various aspects of dates and times easily using arithmetic operations with
/// periods.
///
/// General arithmetic functions:
///
/// - date_time + date_time_period_set = date_time: move time point forward by specific periods like date_time + month;
/// - date_time - date_time_period_set = date_time: move time point backward by specific periods like date_time - month;
/// - date_time << date_time_period_set = date_time: roll time point forward by specific periods with rolling to begin if overflows: like "2010-01-31" << 2* day == "2010-01-02" instead of "2010-02-02"
/// - date_time >> date_time_period_set = date_time: roll time point backward by specific periods with rolling to end if overflows: like "2010-01-02" >> 2* day == "2010-01-31" instead of "2009-12-30"
/// - date_time / period_type = int - current period value: like "2010-12-21" / month == 12. "2010-12-21" / year = 2010
/// - (date_time - date_time) / period_type = int: distance between dates in period_type. Like ("2010-12-01" - "2008-12-01") / month = 24.
///
/// You can also assign specific periods using assignment operator like:
/// some_time = year * 1995 that sets the year to 1995.
///
///
class BOOST_LOCALE_DECL date_time {
public:
///
/// Dafault constructor, uses default calendar initialized date_time object to current time.
///
/// \note throws std::bad_cast if the global locale does not have \ref calendar_facet facet installed
///
date_time();
///
/// copy date_time
///
date_time(date_time const &other);
///
/// copy date_time and change some fields according to the \a set
///
date_time(date_time const &other,date_time_period_set const &set);
///
/// assign the date_time
///
date_time const &operator=(date_time const &other);
~date_time();
///
/// Create a date_time object using POSIX time \a time and default calendar
///
/// \note throws std::bad_cast if the global locale does not have \ref calendar_facet facet installed
///
date_time(double time);
///
/// Create a date_time object using POSIX time \a time and calendar \a cal
///
date_time(double time,calendar const &cal);
///
/// Create a date_time object using calendar \a cal and initializes it to current time.
///
date_time(calendar const &cal);
///
/// Create a date_time object using default calendar and define values given in \a set
///
/// \note throws std::bad_cast if the global locale does not have \ref calendar_facet facet installed
///
date_time(date_time_period_set const &set);
///
/// Create a date_time object using calendar \a cal and define values given in \a set
///
date_time(date_time_period_set const &set,calendar const &cal);
///
/// assign values to various periods in set \a f
///
date_time const &operator=(date_time_period_set const &f);
///
/// set specific period \a f value to \a v
///
void set(period::period_type f,int v);
///
/// get specific period \a f value
///
int get(period::period_type f) const;
///
/// syntactic sugar for get(f)
///
int operator/(period::period_type f) const
{
return get(f);
}
///
/// add single period f to the current date_time
///
date_time operator+(period::period_type f) const
{
return *this+date_time_period(f);
}
///
/// subtract single period f from the current date_time
///
date_time operator-(period::period_type f) const
{
return *this-date_time_period(f);
}
///
/// add single period f to the current date_time
///
date_time const &operator+=(period::period_type f)
{
return *this+=date_time_period(f);
}
///
/// subtract single period f from the current date_time
///
date_time const &operator-=(period::period_type f)
{
return *this-=date_time_period(f);
}
///
/// roll forward a date by single period f.
///
date_time operator<<(period::period_type f) const
{
return *this<<date_time_period(f);
}
///
/// roll backward a date by single period f.
///
date_time operator>>(period::period_type f) const
{
return *this>>date_time_period(f);
}
///
/// roll forward a date by single period f.
///
date_time const &operator<<=(period::period_type f)
{
return *this<<=date_time_period(f);
}
///
/// roll backward a date by single period f.
///
date_time const &operator>>=(period::period_type f)
{
return *this>>=date_time_period(f);
}
///
/// add date_time_period to the current date_time
///
date_time operator+(date_time_period const &v) const;
///
/// subtract date_time_period from the current date_time
///
date_time operator-(date_time_period const &v) const;
///
/// add date_time_period to the current date_time
///
date_time const &operator+=(date_time_period const &v);
///
/// subtract date_time_period from the current date_time
///
date_time const &operator-=(date_time_period const &v);
///
/// roll current date_time forward by date_time_period v
///
date_time operator<<(date_time_period const &v) const;
///
/// roll current date_time backward by date_time_period v
///
date_time operator>>(date_time_period const &v) const ;
///
/// roll current date_time forward by date_time_period v
///
date_time const &operator<<=(date_time_period const &v);
///
/// roll current date_time backward by date_time_period v
///
date_time const &operator>>=(date_time_period const &v);
///
/// add date_time_period_set v to the current date_time
///
date_time operator+(date_time_period_set const &v) const;
///
/// subtract date_time_period_set v from the current date_time
///
date_time operator-(date_time_period_set const &v) const;
///
/// add date_time_period_set v to the current date_time
///
date_time const &operator+=(date_time_period_set const &v);
///
/// subtract date_time_period_set v from the current date_time
///
date_time const &operator-=(date_time_period_set const &v);
///
/// roll current date_time forward by date_time_period_set v
///
date_time operator<<(date_time_period_set const &v) const;
///
/// roll current date_time backward by date_time_period_set v
///
date_time operator>>(date_time_period_set const &v) const ;
///
/// roll current date_time forward by date_time_period_set v
///
date_time const &operator<<=(date_time_period_set const &v);
///
/// roll current date_time backward by date_time_period_set v
///
date_time const &operator>>=(date_time_period_set const &v);
///
/// Get POSIX time
///
/// The POSIX time is number of seconds since January 1st, 1970 00:00 UTC, ignoring leap seconds.
///
double time() const;
///
/// set POSIX time
///
/// The POSIX time is number of seconds since January 1st, 1970 00:00 UTC, ignoring leap seconds.
/// This time can be fetched from Operating system clock using C function time, gettimeofday and others.
///
void time(double v);
///
/// compare date_time in the timeline (ignores difference in calendar, timezone etc)
///
bool operator==(date_time const &other) const;
///
/// compare date_time in the timeline (ignores difference in calendar, timezone etc)
///
bool operator!=(date_time const &other) const;
///
/// compare date_time in the timeline (ignores difference in calendar, timezone etc)
///
bool operator<(date_time const &other) const;
///
/// compare date_time in the timeline (ignores difference in calendar, timezone etc)
///
bool operator>(date_time const &other) const;
///
/// compare date_time in the timeline (ignores difference in calendar, timezone etc)
///
bool operator<=(date_time const &other) const;
///
/// compare date_time in the timeline (ignores difference in calendar, timezone etc)
///
bool operator>=(date_time const &other) const;
///
/// swaps two dates - efficient, does not throw
///
void swap(date_time &other);
///
/// calculate the distance from this date_time to \a other in terms of perios \a f
///
int difference(date_time const &other,period::period_type f) const;
///
/// Get minimal possible value for *this time point for a period \a f.
///
int minimum(period::period_type f) const;
///
/// Get minimal possible value for *this time point for a period \a f. For example
/// in February maximum(day) may be 28 or 29, in January maximum(day)==31
///
int maximum(period::period_type f) const;
///
/// Check if *this time point is in daylight saving time
///
bool is_in_daylight_saving_time() const;
private:
hold_ptr<abstract_calendar> impl_;
};
///
/// Writes date_time \a t to output stream \a out.
///
/// This function uses locale, calendar and time zone of the target stream \a in.
///
/// For example:
/// \code
/// date_time now(time(0),hebrew_calendar)
/// cout << "Year: " << period::year(now) <<" Full Date:"<< now;
/// \endcode
///
/// The output may be Year:5770 Full Date:Jan 1, 2010
///
template<typename CharType>
std::basic_ostream<CharType> &operator<<(std::basic_ostream<CharType> &out,date_time const &t)
{
double time_point = t.time();
uint64_t display_flags = ios_info::get(out).display_flags();
if (
display_flags == flags::date
|| display_flags == flags::time
|| display_flags == flags::datetime
|| display_flags == flags::strftime
)
{
out << time_point;
}
else {
ios_info::get(out).display_flags(flags::datetime);
out << time_point;
ios_info::get(out).display_flags(display_flags);
}
return out;
}
///
/// Reads date_time \a t from output stream \a in
///
/// This function uses locale, calendar and time zone of the source stream \a in.
///
template<typename CharType>
std::basic_istream<CharType> &operator>>(std::basic_istream<CharType> &in,date_time &t)
{
double v;
uint64_t display_flags = ios_info::get(in).display_flags();
if (
display_flags == flags::date
|| display_flags == flags::time
|| display_flags == flags::datetime
|| display_flags == flags::strftime
)
{
in >> v;
}
else {
ios_info::get(in).display_flags(flags::datetime);
in >> v;
ios_info::get(in).display_flags(display_flags);
}
if(!in.fail())
t.time(v);
return in;
}
///
/// \brief This class represents a period: a pair of two date_time objects.
///
/// It is generally used as syntactic sugar to calculate difference between two dates.
///
/// Note: it stores references to the original objects, so it is not recommended to be used
/// outside of the equation you calculate the difference in.
///
class date_time_duration {
public:
///
/// Create an object were \a first represents earlier point on time line and \a second is later
/// point.
///
date_time_duration(date_time const &first,date_time const &second) :
s_(first),
e_(second)
{
}
///
/// find a difference in terms of period_type \a f
///
int get(period::period_type f) const
{
return start().difference(end(),f);
}
///
/// Syntactic sugar for get(f)
///
int operator / (period::period_type f) const
{
return start().difference(end(),f);
}
///
/// Get starting point
///
date_time const &start() const { return s_; }
///
/// Get ending point
///
date_time const &end() const { return e_; }
private:
date_time const &s_;
date_time const &e_;
};
///
/// Calculates the difference between two dates, the left operand is a later point on time line.
/// Returns date_time_duration object.
///
inline date_time_duration operator-(date_time const &later,date_time const &earlier)
{
return date_time_duration(earlier,later);
}
namespace period {
///
/// Extract from date_time numerical value of Era i.e. AC, BC in Gregorian and Julian calendar, range [0,1]
///
inline int era(date_time const &dt) { return dt.get(era()); }
///
/// Extract from date_time numerical value of Year, it is calendar specific, for example 2011 in Gregorian calendar.
///
inline int year(date_time const &dt) { return dt.get(year()); }
///
/// Extract from date_time numerical value of Extended year for Gregorian/Julian calendars, where 1 BC == 0, 2 BC == -1.
///
inline int extended_year(date_time const &dt) { return dt.get(extended_year()); }
///
/// Extract from date_time numerical value of The month of year, calendar specific, in Gregorian [0..11]
///
inline int month(date_time const &dt) { return dt.get(month()); }
///
/// Extract from date_time numerical value of The day of month, calendar specific, in Gregorian [1..31]
///
inline int day(date_time const &dt) { return dt.get(day()); }
///
/// Extract from date_time numerical value of The number of day in year, starting from 1, in Gregorian [1..366]
///
inline int day_of_year(date_time const &dt) { return dt.get(day_of_year()); }
///
/// Extract from date_time numerical value of Day of week, Sunday=1, Monday=2,..., Saturday=7.
///
/// Note that updating this value respects local day of week, so for example,
/// If first day of week is Monday and the current day is Tuesday then setting
/// the value to Sunday (1) would forward the date by 5 days forward and not backward
/// by two days as it could be expected if the numbers were taken as is.
///
inline int day_of_week(date_time const &dt) { return dt.get(day_of_week()); }
///
/// Extract from date_time numerical value of Original number of the day of the week in month. For example 1st Sunday,
/// 2nd Sunday, etc. in Gregorian [1..5]
///
inline int day_of_week_in_month(date_time const &dt) { return dt.get(day_of_week_in_month()); }
///
/// Extract from date_time numerical value of Local day of week, for example in France Monday is 1, in US Sunday is 1, [1..7]
///
inline int day_of_week_local(date_time const &dt) { return dt.get(day_of_week_local()); }
///
/// Extract from date_time numerical value of 24 clock hour [0..23]
///
inline int hour(date_time const &dt) { return dt.get(hour()); }
///
/// Extract from date_time numerical value of 12 clock hour [0..11]
///
inline int hour_12(date_time const &dt) { return dt.get(hour_12()); }
///
/// Extract from date_time numerical value of am or pm marker [0..1]
///
inline int am_pm(date_time const &dt) { return dt.get(am_pm()); }
///
/// Extract from date_time numerical value of minute [0..59]
///
inline int minute(date_time const &dt) { return dt.get(minute()); }
///
/// Extract from date_time numerical value of second [0..59]
///
inline int second(date_time const &dt) { return dt.get(second()); }
///
/// Extract from date_time numerical value of The week number in the year
///
inline int week_of_year(date_time const &dt) { return dt.get(week_of_year()); }
///
/// Extract from date_time numerical value of The week number within current month
///
inline int week_of_month(date_time const &dt) { return dt.get(week_of_month()); }
///
/// Extract from date_time numerical value of First day of week, constant, for example Sunday in US = 1, Monday in France = 2
///
inline int first_day_of_week(date_time const &dt) { return dt.get(first_day_of_week()); }
///
/// Extract from date_time_duration numerical value of duration in Era i.e. AC, BC in Gregorian and Julian calendar, range [0,1]
///
inline int era(date_time_duration const &dt) { return dt.get(era()); }
///
/// Extract from date_time_duration numerical value of duration in years
///
inline int year(date_time_duration const &dt) { return dt.get(year()); }
///
/// Extract from date_time_duration numerical value of duration in extended years (for Gregorian/Julian calendars, where 1 BC == 0, 2 BC == -1).
///
inline int extended_year(date_time_duration const &dt) { return dt.get(extended_year()); }
///
/// Extract from date_time_duration numerical value of duration in months
///
inline int month(date_time_duration const &dt) { return dt.get(month()); }
///
/// Extract from date_time_duration numerical value of duration in days of month
///
inline int day(date_time_duration const &dt) { return dt.get(day()); }
///
/// Extract from date_time_duration numerical value of duration in days of year
///
inline int day_of_year(date_time_duration const &dt) { return dt.get(day_of_year()); }
///
/// Extract from date_time_duration numerical value of duration in days of week
///
inline int day_of_week(date_time_duration const &dt) { return dt.get(day_of_week()); }
///
/// Extract from date_time_duration numerical value of duration in original number of the day of the week in month
///
inline int day_of_week_in_month(date_time_duration const &dt) { return dt.get(day_of_week_in_month()); }
///
/// Extract from date_time_duration numerical value of duration in local day of week
///
inline int day_of_week_local(date_time_duration const &dt) { return dt.get(day_of_week_local()); }
///
/// Extract from date_time_duration numerical value of duration in hours
///
inline int hour(date_time_duration const &dt) { return dt.get(hour()); }
///
/// Extract from date_time_duration numerical value of duration in 12 clock hours
///
inline int hour_12(date_time_duration const &dt) { return dt.get(hour_12()); }
///
/// Extract from date_time_duration numerical value of duration in am or pm markers
///
inline int am_pm(date_time_duration const &dt) { return dt.get(am_pm()); }
///
/// Extract from date_time_duration numerical value of duration in minutes
///
inline int minute(date_time_duration const &dt) { return dt.get(minute()); }
///
/// Extract from date_time_duration numerical value of duration in seconds
///
inline int second(date_time_duration const &dt) { return dt.get(second()); }
///
/// Extract from date_time_duration numerical value of duration in the week number in the year
///
inline int week_of_year(date_time_duration const &dt) { return dt.get(week_of_year()); }
///
/// Extract from date_time_duration numerical value of duration in The week number within current month
///
inline int week_of_month(date_time_duration const &dt) { return dt.get(week_of_month()); }
///
/// Extract from date_time_duration numerical value of duration in the first day of week
///
inline int first_day_of_week(date_time_duration const &dt) { return dt.get(first_day_of_week()); }
}
/// @}
} // locale
} // boost
#ifdef BOOST_MSVC
#pragma warning(pop)
#endif
#endif
///
/// \example calendar.cpp
///
/// Example of using date_time functions for generating calendar for current year.
///
// vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
| {
"pile_set_name": "Github"
} |
// underlying_type.hpp ---------------------------------------------------------//
// Copyright Beman Dawes, 2009
// Copyright (C) 2011-2012 Vicente J. Botet Escriba
// Copyright (C) 2012 Anthony Williams
// Copyright (C) 2014 Andrey Semashev
// Distributed under the Boost Software License, Version 1.0.
// See http://www.boost.org/LICENSE_1_0.txt
#ifndef BOOST_CORE_UNDERLYING_TYPE_HPP
#define BOOST_CORE_UNDERLYING_TYPE_HPP
#include <boost/config.hpp>
// GCC 4.7 and later seem to provide std::underlying_type
#if !defined(BOOST_NO_CXX11_HDR_TYPE_TRAITS) || (defined(BOOST_GCC) && BOOST_GCC >= 40700 && defined(__GXX_EXPERIMENTAL_CXX0X__))
#include <type_traits>
#define BOOST_DETAIL_HAS_STD_UNDERLYING_TYPE
#endif
#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
namespace boost {
namespace detail {
template< typename EnumType, typename Void = void >
struct underlying_type_impl;
#if defined(BOOST_NO_CXX11_SCOPED_ENUMS)
// Support for boost/core/scoped_enum.hpp
template< typename EnumType >
struct underlying_type_impl< EnumType, typename EnumType::is_boost_scoped_enum_tag >
{
/**
* The member typedef type names the underlying type of EnumType. It is EnumType::underlying_type when the EnumType is an emulated scoped enum,
*/
typedef typename EnumType::underlying_type type;
};
#endif
#if defined(BOOST_DETAIL_HAS_STD_UNDERLYING_TYPE)
template< typename EnumType, typename Void >
struct underlying_type_impl
{
typedef typename std::underlying_type< EnumType >::type type;
};
#endif
} // namespace detail
#if !defined(BOOST_NO_CXX11_SCOPED_ENUMS) && !defined(BOOST_DETAIL_HAS_STD_UNDERLYING_TYPE)
#define BOOST_NO_UNDERLYING_TYPE
#endif
/**
* Meta-function to get the underlying type of a scoped enum.
*
* Requires EnumType must be an enum type or the emulation of a scoped enum.
* If BOOST_NO_UNDERLYING_TYPE is defined, the implementation will not be able
* to deduce the underlying type of enums. The user is expected to specialize
* this trait in this case.
*/
template< typename EnumType >
struct underlying_type :
public detail::underlying_type_impl< EnumType >
{
};
} // namespace boost
#endif // BOOST_CORE_UNDERLYING_TYPE_HPP
| {
"pile_set_name": "Github"
} |
# encoding: utf-8
# This file is distributed under New Relic's license terms.
# See https://github.com/newrelic/newrelic-ruby-agent/blob/main/LICENSE for complete details.
require 'new_relic/coerce'
module NewRelic
class MetricData
# a NewRelic::MetricSpec object
attr_reader :metric_spec
# the actual statistics object
attr_accessor :stats
def initialize(metric_spec, stats)
@original_spec = nil
@metric_spec = metric_spec
self.stats = stats
end
def eql?(o)
(metric_spec.eql? o.metric_spec) && (stats.eql? o.stats)
end
def original_spec
@original_spec || @metric_spec
end
# assigns a new metric spec, and retains the old metric spec as
# @original_spec if it exists currently
def metric_spec= new_spec
@original_spec = @metric_spec if @metric_spec
@metric_spec = new_spec
end
def hash
metric_spec.hash ^ stats.hash
end
def to_json(*a)
%Q[{"metric_spec":#{metric_spec.to_json},"stats":{"total_exclusive_time":#{stats.total_exclusive_time},"min_call_time":#{stats.min_call_time},"call_count":#{stats.call_count},"sum_of_squares":#{stats.sum_of_squares},"total_call_time":#{stats.total_call_time},"max_call_time":#{stats.max_call_time}}}]
end
def to_s
"#{metric_spec.name}(#{metric_spec.scope}): #{stats}"
end
def inspect
"#<MetricData metric_spec:#{metric_spec.inspect}, stats:#{stats.inspect}>"
end
include NewRelic::Coerce
def to_collector_array(encoder=nil)
stat_key = { 'name' => metric_spec.name, 'scope' => metric_spec.scope }
[ stat_key,
[
int(stats.call_count, stat_key),
float(stats.total_call_time, stat_key),
float(stats.total_exclusive_time, stat_key),
float(stats.min_call_time, stat_key),
float(stats.max_call_time, stat_key),
float(stats.sum_of_squares, stat_key)
]
]
end
end
end
| {
"pile_set_name": "Github"
} |
๏ปฟ// Octopus MFS is an integrated suite for managing a Micro Finance Institution:
// clients, contracts, accounting, reporting and risk
// Copyright ยฉ 2006,2007 OCTO Technology & OXUS Development Network
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License along
// with this program; if not, write to the Free Software Foundation, Inc.,
// 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
//
// Website: http://www.opencbs.com
// Contact: [email protected]
using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using OpenCBS.CoreDomain;
using OpenCBS.CoreDomain.Accounting;
using System.Data.SqlClient;
using OpenCBS.CoreDomain.EconomicActivities;
using OpenCBS.CoreDomain.Events;
using OpenCBS.Enums;
using OpenCBS.CoreDomain.Products;
using OpenCBS.Manager.Currencies;
using OpenCBS.Manager.Products;
using OpenCBS.CoreDomain.FundingLines;
namespace OpenCBS.Manager.Accounting
{
public class AccountingRuleManager : Manager
{
private AccountManager _accountManager;
private LoanProductManager _loanProductManager;
private SavingProductManager _savingProductManager;
private EconomicActivityManager _economicActivityManager;
private FundingLineManager _fundingLineManager;
private CurrencyManager _currencyManager;
private PaymentMethodManager _paymentMethodManager;
public AccountingRuleManager(User pUser) : base(pUser)
{
_accountManager = new AccountManager(pUser);
_loanProductManager = new LoanProductManager(pUser);
_savingProductManager = new SavingProductManager(pUser);
_economicActivityManager = new EconomicActivityManager(pUser);
_fundingLineManager = new FundingLineManager(pUser);
_currencyManager = new CurrencyManager(pUser);
_paymentMethodManager = new PaymentMethodManager(pUser);
}
public AccountingRuleManager(string pTestDB) : base(pTestDB)
{
_accountManager = new AccountManager(pTestDB);
_loanProductManager = new LoanProductManager(pTestDB);
_savingProductManager = new SavingProductManager(pTestDB);
_economicActivityManager = new EconomicActivityManager(pTestDB);
_fundingLineManager = new FundingLineManager(pTestDB);
_currencyManager = new CurrencyManager(pTestDB);
}
public int AddAccountingRule(IAccountingRule pRule)
{
const string sqlText = @"INSERT INTO [AccountingRules] (
[debit_account_number_id],
[credit_account_number_id],
[rule_type],
[booking_direction],
[event_type],
[event_attribute_id],
[order],
[description])
VALUES (@debit_account_number_id,
@credit_account_number_id,
@rule_type,
@booking_direction,
@event_type,
@event_attribute_id,
@order,
@description)
SELECT SCOPE_IDENTITY()";
using (SqlConnection conn = GetConnection())
{
using (OpenCbsCommand insert = new OpenCbsCommand(sqlText, conn))
{
insert.AddParam("@debit_account_number_id", pRule.DebitAccount.Id);
insert.AddParam("@credit_account_number_id", pRule.CreditAccount.Id);
insert.AddParam("@rule_type", pRule is ContractAccountingRule ? 'C' : 'F');
insert.AddParam("@booking_direction", (int)pRule.BookingDirection);
insert.AddParam("@event_type", pRule.EventType.EventCode);
insert.AddParam("@event_attribute_id", pRule.EventAttribute.Id);
insert.AddParam("@order", pRule.Order);
insert.AddParam("@description", pRule.Description);
pRule.Id = Convert.ToInt32(insert.ExecuteScalar());
}
}
if (pRule is ContractAccountingRule)
AddContractAccountingRule(pRule as ContractAccountingRule);
return pRule.Id;
}
private void AddContractAccountingRule(ContractAccountingRule rule)
{
const string sqlText = @"INSERT INTO
[ContractAccountingRules]
([id],
[product_type],
[loan_product_id],
[savings_product_id],
[client_type],
[activity_id],
[currency_id])
VALUES
(@id,
@productType,
@loanProductId,
@savingsProductId,
@clientType,
@activityId,
@currency_id)";
using (SqlConnection conn = GetConnection())
{
using (OpenCbsCommand cmd = new OpenCbsCommand(sqlText, conn))
{
SetAccountingRule(cmd, rule);
cmd.ExecuteNonQuery();
}
}
}
public void UpdateAccountingRule(IAccountingRule rule)
{
const string sqlText = @"UPDATE [AccountingRules]
SET [debit_account_number_id] = @debit_account_number_id,
[credit_account_number_id] = @credit_account_number_id,
[booking_direction] = @booking_direction,
[event_type] = @event_type,
[event_attribute_id] = @event_attribute_id,
[order] = @order,
[description] = @description
WHERE id = @id";
using (SqlConnection conn = GetConnection())
{
using (OpenCbsCommand cmd = new OpenCbsCommand(sqlText, conn))
{
cmd.AddParam("@id", rule.Id);
cmd.AddParam("@debit_account_number_id", rule.DebitAccount.Id);
cmd.AddParam("@credit_account_number_id", rule.CreditAccount.Id);
cmd.AddParam("@event_type", rule.EventType.EventCode);
cmd.AddParam("@event_attribute_id", rule.EventAttribute.Id);
cmd.AddParam("@booking_direction", (int)rule.BookingDirection);
cmd.AddParam("@order", rule.Order);
cmd.AddParam("@description", rule.Description);
cmd.ExecuteNonQuery();
}
}
if (rule is ContractAccountingRule)
UpdateContractAccountingRule(rule as ContractAccountingRule);
}
private void UpdateContractAccountingRule(ContractAccountingRule rule)
{
const string sqlText = @"UPDATE [ContractAccountingRules]
SET [product_type] = @productType,
[loan_product_id] = @loanProductId,
[savings_product_id] = @savingsProductId,
[client_type] = @clientType,
[activity_id] = @activityId,
[currency_id] = @currency_id
WHERE id = @id";
using (SqlConnection conn = GetConnection())
{
using (OpenCbsCommand cmd = new OpenCbsCommand(sqlText, conn))
{
SetAccountingRule(cmd, rule);
cmd.ExecuteNonQuery();
}
}
}
public void DeleteAccountingRule(IAccountingRule pRule)
{
const string sqlText = @"UPDATE [AccountingRules]
SET [deleted] = 1
WHERE id = @id";
using (SqlConnection conn = GetConnection())
{
using (OpenCbsCommand delete = new OpenCbsCommand(sqlText, conn))
{
delete.AddParam("@id", pRule.Id);
delete.ExecuteNonQuery();
}
}
}
public void DeleteAllAccountingRules()
{
const string sqlText1 = @"DELETE FROM [ContractAccountingRules]";
using (SqlConnection conn = GetConnection())
{
using (OpenCbsCommand delete = new OpenCbsCommand(sqlText1, conn))
{
delete.ExecuteNonQuery();
}
const string sqlText2 = @"DELETE FROM [AccountingRules]";
using (OpenCbsCommand delete = new OpenCbsCommand(sqlText2, conn))
{
delete.ExecuteNonQuery();
}
}
}
public IAccountingRule Select(int pId)
{
const string sqlText = @"SELECT rule_type
FROM [AccountingRules]
WHERE deleted = 0
AND id = @id";
IAccountingRule rule = new ContractAccountingRule();
using (SqlConnection conn = GetConnection())
{
using (OpenCbsCommand select = new OpenCbsCommand(sqlText, conn))
{
select.AddParam("@id", pId);
using (OpenCbsReader reader = select.ExecuteReader())
{
if (reader.Empty) return null;
reader.Read();
if (reader.GetChar("rule_type") == 'C')
rule = new ContractAccountingRule {Id = pId};
}
}
}
if (rule is ContractAccountingRule)
rule = SelectContractAccountingRule(rule.Id);
List<Account> accounts = _accountManager.SelectAllAccounts();
rule.DebitAccount = accounts.FirstOrDefault(item => item.Id == rule.DebitAccount.Id);
rule.CreditAccount = accounts.FirstOrDefault(item => item.Id == rule.CreditAccount.Id);
return rule;
}
private ContractAccountingRule SelectContractAccountingRule(int pId)
{
const string sqlText = @"SELECT AccountingRules.id,
AccountingRules.debit_account_number_id,
AccountingRules.credit_account_number_id,
AccountingRules.booking_direction,
AccountingRules.event_type,
AccountingRules.event_attribute_id,
AccountingRules.[order],
AccountingRules.[description] AS rule_description,
EventAttributes.name AS attribute_name,
EventTypes.description AS event_description,
ContractAccountingRules.product_type,
ContractAccountingRules.loan_product_id,
ContractAccountingRules.savings_product_id,
ContractAccountingRules.client_type,
ContractAccountingRules.activity_id,
ContractAccountingRules.currency_id
FROM AccountingRules
INNER JOIN EventAttributes ON EventAttributes.id = AccountingRules.event_attribute_id
INNER JOIN EventTypes ON AccountingRules.event_type = EventTypes.event_type
LEFT JOIN ContractAccountingRules ON AccountingRules.id = ContractAccountingRules.id
WHERE AccountingRules.id = @id";
ContractAccountingRule rule;
using (SqlConnection conn = GetConnection())
{
using (OpenCbsCommand select = new OpenCbsCommand(sqlText, conn))
{
select.AddParam("@id", pId);
using (OpenCbsReader reader = select.ExecuteReader())
{
if (reader.Empty) return null;
reader.Read();
rule = GetContractAccountingRule(reader);
}
}
}
if (rule.LoanProduct != null)
rule.LoanProduct = _loanProductManager.Select(rule.LoanProduct.Id);
if (rule.Currency != null)
rule.Currency = _currencyManager.SelectCurrencyById(rule.Currency.Id);
if (rule.Currency == null)
rule.Currency = null;
if (rule.SavingProduct != null)
rule.SavingProduct = _savingProductManager.SelectSavingProduct(rule.SavingProduct.Id);
if (rule.EconomicActivity != null)
rule.EconomicActivity = _economicActivityManager.SelectEconomicActivity(rule.EconomicActivity.Id);
if (rule.PaymentMethod.Id != 0)
rule.PaymentMethod = _paymentMethodManager.SelectPaymentMethodById(rule.PaymentMethod.Id);
return rule;
}
private FundingLineAccountingRule SelectFundingLineAccountingRule(int pId)
{
const string sqlText = @"SELECT AccountingRules.id,
AccountingRules.debit_account_number_id,
AccountingRules.credit_account_number_id,
AccountingRules.booking_direction,
FundingLineAccountingRules.funding_line_id
FROM AccountingRules
INNER JOIN FundingLineAccountingRules ON AccountingRules.id = FundingLineAccountingRules.id
WHERE AccountingRules.id = @id";
FundingLineAccountingRule rule;
using (SqlConnection conn = GetConnection())
{
using (OpenCbsCommand select = new OpenCbsCommand(sqlText, conn))
{
select.AddParam("@id", pId);
using (OpenCbsReader reader = select.ExecuteReader())
{
if (reader.Empty) return null;
reader.Read();
rule = _getFundingLineAccountingRule(reader);
}
}
}
if (rule.FundingLine != null)
rule.FundingLine = _fundingLineManager.SelectFundingLineById(rule.FundingLine.Id, false);
return rule;
}
public AccountingRuleCollection SelectAll()
{
const string sqlText = @"SELECT id, rule_type
FROM [AccountingRules]
WHERE deleted = 0";
AccountingRuleCollection rules = new AccountingRuleCollection();
using (SqlConnection conn = GetConnection())
{
using (OpenCbsCommand select = new OpenCbsCommand(sqlText, conn))
{
using (OpenCbsReader reader = select.ExecuteReader())
{
if (reader.Empty) return rules;
while (reader.Read())
{
if (reader.GetChar("rule_type") == 'C')
rules.Add(new ContractAccountingRule { Id = reader.GetInt("id") });
}
}
}
}
List<Account> accounts = _accountManager.SelectAllAccounts();
for (int i = 0; i < rules.Count; i++)
{
if (rules[i] is ContractAccountingRule)
{
rules[i] = SelectContractAccountingRule(rules[i].Id);
}
else
{
rules[i] = SelectFundingLineAccountingRule(rules[i].Id);
}
rules[i].DebitAccount = accounts.FirstOrDefault(item => item.Id == rules[i].DebitAccount.Id);
rules[i].CreditAccount = accounts.FirstOrDefault(item => item.Id == rules[i].CreditAccount.Id);
}
return rules;
}
public DataSet GetRuleCollectionDataset()
{
const string sqlText = @"SELECT
event_type,
event_attribute_id,
debit_account_number_id,
credit_account_number_id,
[order],
[description],
product_type,
loan_product_id,
savings_product_id,
client_type,
activity_id,
currency_id
FROM dbo.AccountingRules ar
INNER JOIN dbo.ContractAccountingRules cr ON ar.id = cr.id
WHERE deleted = 0";
using (SqlConnection conn = GetConnection())
{
using (SqlCommand select = new SqlCommand(sqlText, conn))
{
using (SqlDataAdapter adapter = new SqlDataAdapter(select))
{
DataSet dataSet = new DataSet();
adapter.Fill(dataSet);
return dataSet;
}
}
}
}
public AccountingRuleCollection SelectAllByEventType(string type)
{
const string sqlText = @"SELECT id, rule_type
FROM [AccountingRules]
WHERE deleted = 0
AND (event_type = @event_type OR @event_type = '')";
AccountingRuleCollection rules = new AccountingRuleCollection();
using (SqlConnection conn = GetConnection())
{
using (OpenCbsCommand select = new OpenCbsCommand(sqlText, conn))
{
select.AddParam("@event_type", type);
using (OpenCbsReader reader = select.ExecuteReader())
{
if (reader.Empty) return rules;
while (reader.Read())
{
if (reader.GetChar("rule_type") == 'C')
rules.Add(new ContractAccountingRule { Id = reader.GetInt("id") });
}
}
}
}
List<Account> accounts = _accountManager.SelectAllAccounts();
for (int i = 0; i < rules.Count; i++)
{
if (rules[i] is ContractAccountingRule)
{
rules[i] = SelectContractAccountingRule(rules[i].Id);
}
else
{
rules[i] = SelectFundingLineAccountingRule(rules[i].Id);
}
rules[i].DebitAccount = accounts.FirstOrDefault(item => item.Id == rules[i].DebitAccount.Id);
rules[i].CreditAccount = accounts.FirstOrDefault(item => item.Id == rules[i].CreditAccount.Id);
}
return rules;
}
private void SetAccountingRule(OpenCbsCommand cmd, ContractAccountingRule rule)
{
cmd.AddParam("@id", rule.Id);
cmd.AddParam("@productType", (int)rule.ProductType);
if (rule.LoanProduct != null)
cmd.AddParam("@loanProductId", rule.LoanProduct.Id);
else
cmd.AddParam("@loanProductId", null);
if (rule.Currency != null)
cmd.AddParam("@currency_id", rule.Currency.Id);
else
cmd.AddParam("@currency_id", null);
if (rule.SavingProduct != null)
cmd.AddParam("@savingsProductId", rule.SavingProduct.Id);
else
cmd.AddParam("@savingsProductId", null);
if (rule.ClientType == OClientTypes.Corporate)
cmd.AddParam("@clientType", 'C');
else if (rule.ClientType == OClientTypes.Group)
cmd.AddParam("@clientType", 'G');
else if (rule.ClientType == OClientTypes.Person)
cmd.AddParam("@clientType", 'I');
else if (rule.ClientType == OClientTypes.Village)
cmd.AddParam("@clientType", 'V');
else
cmd.AddParam("@clientType", '-');
if (rule.EconomicActivity != null)
cmd.AddParam("@activityId", rule.EconomicActivity.Id);
else
cmd.AddParam("@activityId", null);
}
private static ContractAccountingRule GetContractAccountingRule(OpenCbsReader reader)
{
ContractAccountingRule rule = new ContractAccountingRule();
rule.Id = reader.GetInt("id");
rule.EventType = new EventType
{
EventCode = reader.GetString("event_type"),
Description = reader.GetString("event_description")
};
rule.EventAttribute = new EventAttribute
{
Id = reader.GetInt("event_attribute_id"),
Name = reader.GetString("attribute_name")
};
rule.DebitAccount = new Account { Id = reader.GetInt("debit_account_number_id") };
rule.CreditAccount = new Account { Id = reader.GetInt("credit_account_number_id") };
rule.BookingDirection = (OBookingDirections)reader.GetSmallInt("booking_direction");
rule.Order = reader.GetInt("order");
rule.Description = reader.GetString("rule_description");
rule.ProductType = (OProductTypes)reader.GetSmallInt("product_type");
int? loanProductId = reader.GetNullInt("loan_product_id");
if (loanProductId.HasValue)
rule.LoanProduct = new LoanProduct { Id = loanProductId.Value };
int? currencyId = reader.GetNullInt("currency_id");
if (currencyId.HasValue)
rule.Currency = new Currency { Id = currencyId.Value };
int? savingsProductId = reader.GetNullInt("savings_product_id");
if (savingsProductId.HasValue)
rule.SavingProduct = new SavingsBookProduct { Id = savingsProductId.Value };
rule.ClientType = reader.GetChar("client_type").ConvertToClientType();
int? activityId = reader.GetNullInt("activity_id");
if (activityId.HasValue)
rule.EconomicActivity = new EconomicActivity { Id = activityId.Value };
return rule;
}
private FundingLineAccountingRule _getFundingLineAccountingRule(OpenCbsReader reader)
{
FundingLineAccountingRule rule = new FundingLineAccountingRule();
rule.Id = reader.GetInt("id");
rule.DebitAccount = new Account { Id = reader.GetInt("debit_account_number_id") };
rule.CreditAccount = new Account { Id = reader.GetInt("credit_account_number_id") };
rule.BookingDirection = (OBookingDirections)reader.GetSmallInt("booking_direction");
int? fundingLineId = reader.GetInt("funding_line_id");
if (fundingLineId.HasValue)
rule.FundingLine = new FundingLine { Id = fundingLineId.Value };
return rule;
}
}
}
| {
"pile_set_name": "Github"
} |
#pragma once
#include "coding/reader.hpp"
#include "base/base.hpp"
#include <cstddef>
#include <cstdint>
#include <memory>
#include <string>
// FileReader, cheap to copy, not thread safe.
// It is assumed that file is not modified during FireReader lifetime,
// because of caching and assumption that Size() is constant.
class FileReader : public ModelReader
{
public:
static uint32_t const kDefaultLogPageSize;
static uint32_t const kDefaultLogPageCount;
explicit FileReader(std::string const & fileName);
FileReader(std::string const & fileName, uint32_t logPageSize, uint32_t logPageCount);
// Reader overrides:
uint64_t Size() const override { return m_size; }
void Read(uint64_t pos, void * p, size_t size) const override;
std::unique_ptr<Reader> CreateSubReader(uint64_t pos, uint64_t size) const override;
FileReader SubReader(uint64_t pos, uint64_t size) const;
uint64_t GetOffset() const { return m_offset; }
protected:
// Used in special derived readers.
void SetOffsetAndSize(uint64_t offset, uint64_t size);
private:
class FileReaderData;
FileReader(FileReader const & reader, uint64_t offset, uint64_t size, uint32_t logPageSize,
uint32_t logPageCount);
// Throws an exception if a (pos, size) read would result in an out-of-bounds access.
void CheckPosAndSize(uint64_t pos, uint64_t size) const;
uint32_t m_logPageSize;
uint32_t m_logPageCount;
std::shared_ptr<FileReaderData> m_fileData;
uint64_t m_offset;
uint64_t m_size;
};
| {
"pile_set_name": "Github"
} |
--TEST--
Function snmp_set_oid_output_format
--CREDITS--
Olivier Doucet
--SKIPIF--
<?php
require_once(__DIR__.'/skipif.inc');
if (!function_exists('snmp_set_oid_output_format')) die('skip This function is only available if using NET_SNMP');
?>
--FILE--
<?php
require_once(__DIR__.'/snmp_include.inc');
echo "Checking error handling\n";
try {
var_dump(snmp_set_oid_output_format(123));
} catch (\ValueError $e) {
echo $e->getMessage() . \PHP_EOL;
}
echo "Checking working\n";
var_dump(snmp_set_oid_output_format(SNMP_OID_OUTPUT_FULL));
var_dump(snmp_set_oid_output_format(SNMP_OID_OUTPUT_NUMERIC));
?>
--EXPECT--
Checking error handling
snmp_set_oid_output_format(): Argument #1 ($oid_format) must be an SNMP_OID_OUTPUT_* constant
Checking working
bool(true)
bool(true)
| {
"pile_set_name": "Github"
} |
package com.fasterxml.jackson.databind.introspect;
import java.io.IOException;
import java.io.StringWriter;
import java.util.*;
import javax.xml.namespace.QName;
import com.fasterxml.jackson.annotation.*;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.*;
import com.fasterxml.jackson.databind.annotation.*;
import com.fasterxml.jackson.databind.cfg.MapperConfig;
import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
public class TestJacksonAnnotationIntrospector
extends BaseMapTest
{
public static enum EnumExample {
VALUE1;
}
public static class JacksonExample
{
protected String attributeProperty;
protected String elementProperty;
protected List<String> wrappedElementProperty;
protected EnumExample enumProperty;
protected QName qname;
@JsonSerialize(using=QNameSerializer.class)
public QName getQname()
{
return qname;
}
@JsonDeserialize(using=QNameDeserializer.class)
public void setQname(QName qname)
{
this.qname = qname;
}
@JsonProperty("myattribute")
public String getAttributeProperty()
{
return attributeProperty;
}
@JsonProperty("myattribute")
public void setAttributeProperty(String attributeProperty)
{
this.attributeProperty = attributeProperty;
}
@JsonProperty("myelement")
public String getElementProperty()
{
return elementProperty;
}
@JsonProperty("myelement")
public void setElementProperty(String elementProperty)
{
this.elementProperty = elementProperty;
}
@JsonProperty("mywrapped")
public List<String> getWrappedElementProperty()
{
return wrappedElementProperty;
}
@JsonProperty("mywrapped")
public void setWrappedElementProperty(List<String> wrappedElementProperty)
{
this.wrappedElementProperty = wrappedElementProperty;
}
public EnumExample getEnumProperty()
{
return enumProperty;
}
public void setEnumProperty(EnumExample enumProperty)
{
this.enumProperty = enumProperty;
}
}
public static class QNameSerializer extends JsonSerializer<QName> {
@Override
public void serialize(QName value, JsonGenerator jgen, SerializerProvider provider)
throws IOException, JsonProcessingException
{
jgen.writeString(value.toString());
}
}
public static class QNameDeserializer extends StdDeserializer<QName>
{
public QNameDeserializer() { super(QName.class); }
@Override
public QName deserialize(JsonParser jp, DeserializationContext ctxt)
throws IOException, JsonProcessingException
{
return QName.valueOf(jp.readValueAs(String.class));
}
}
@JsonIgnoreType
static class IgnoredType { }
static class IgnoredSubType extends IgnoredType { }
// Test to ensure we can override enum settings
static class LcEnumIntrospector extends JacksonAnnotationIntrospector
{
private static final long serialVersionUID = 1L;
@Override
public String[] findEnumValues(MapperConfig<?> config,
Class<?> enumType, Enum<?>[] enumValues, String[] names) {
// kinda sorta wrong, but for testing's sake...
for (int i = 0, len = enumValues.length; i < len; ++i) {
names[i] = enumValues[i].name().toLowerCase();
}
return names;
}
}
/*
/**********************************************************
/* Unit tests
/**********************************************************
*/
/**
* tests getting serializer/deserializer instances.
*/
public void testSerializeDeserializeWithJaxbAnnotations() throws Exception
{
ObjectMapper mapper = jsonMapperBuilder()
.enable(SerializationFeature.INDENT_OUTPUT)
.build();
JacksonExample ex = new JacksonExample();
QName qname = new QName("urn:hi", "hello");
ex.setQname(qname);
ex.setAttributeProperty("attributeValue");
ex.setElementProperty("elementValue");
ex.setWrappedElementProperty(Arrays.asList("wrappedElementValue"));
ex.setEnumProperty(EnumExample.VALUE1);
StringWriter writer = new StringWriter();
mapper.writeValue(writer, ex);
writer.flush();
writer.close();
String json = writer.toString();
JacksonExample readEx = mapper.readValue(json, JacksonExample.class);
assertEquals(ex.qname, readEx.qname);
assertEquals(ex.attributeProperty, readEx.attributeProperty);
assertEquals(ex.elementProperty, readEx.elementProperty);
assertEquals(ex.wrappedElementProperty, readEx.wrappedElementProperty);
assertEquals(ex.enumProperty, readEx.enumProperty);
}
public void testEnumHandling() throws Exception
{
ObjectMapper mapper = jsonMapperBuilder()
.annotationIntrospector(new LcEnumIntrospector())
.build();
assertEquals("\"value1\"", mapper.writeValueAsString(EnumExample.VALUE1));
EnumExample result = mapper.readValue(quote("value1"), EnumExample.class);
assertEquals(EnumExample.VALUE1, result);
}
}
| {
"pile_set_name": "Github"
} |
// Range v3 library
//
// Copyright Eric Niebler 2014-present
//
// Use, modification and distribution is subject to the
// Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// Project home: https://github.com/ericniebler/range-v3
//
#define SET_UNION_6
#include "./set_union.hpp"
| {
"pile_set_name": "Github"
} |
#!/usr/bin/python -tt
import eucalyptus_admin.commands.bootstrap.deregisterservice
if __name__ == '__main__':
eucalyptus_admin.commands.bootstrap.deregisterservice.DeregisterService.run()
| {
"pile_set_name": "Github"
} |
---
title: Core.PrintfFormat<'Printer,'State,'Residue,'Result,'Tuple> Constructor (F#)
description: Core.PrintfFormat<'Printer,'State,'Residue,'Result,'Tuple> Constructor (F#)
keywords: visual f#, f#, functional programming
author: dend
manager: danielfe
ms.date: 05/16/2016
ms.topic: language-reference
ms.prod: visual-studio-dev14
ms.technology: devlang-fsharp
ms.assetid: 12f7a2a2-a772-4050-80c2-f88939ae8dbf
---
# Core.PrintfFormat<'Printer,'State,'Residue,'Result,'Tuple> Constructor (F#)
Construct a format string.
**Namespace/Module Path:** Microsoft.FSharp.Core
**Assembly:** FSharp.Core (in FSharp.Core.dll)
## Syntax
```fsharp
// Signature:
new PrintfFormat : string -> PrintfFormat<'Printer,'State,'Residue,'Result,'Tuple>
// Usage:
new PrintfFormat (value)
```
#### Parameters
*value*
Type: [string](https://msdn.microsoft.com/library/12b97856-ec80-4f70-a018-afb0753f755a)
The input string.
## Return Value
The created format string.
## Platforms
Windows 8, Windows 7, Windows Server 2012, Windows Server 2008 R2
## Version Information
**F# Core Library Versions**
Supported in: 2.0, 4.0, Portable
## See Also
[Core.PrintfFormat<'Printer,'State,'Residue,'Result,'Tuple> Class (F#)](Core.PrintfFormat%5B%27Printer%2C%27State%2C%27Residue%2C%27Result%2C%27Tuple%5D-Class-%5BFSharp%5D.md)
[Microsoft.FSharp.Core Namespace (F#)](Microsoft.FSharp.Core-Namespace-%5BFSharp%5D.md) | {
"pile_set_name": "Github"
} |
from stream_framework.tests.feeds.aggregated_feed.base import TestAggregatedFeed
from stream_framework.feeds.aggregated_feed.notification_feed import RedisNotificationFeed
class TestNotificationFeed(TestAggregatedFeed):
feed_cls = RedisNotificationFeed
def test_mark_all(self):
# start by adding one
self.test_feed.insert_activities(self.aggregated.activities)
self.test_feed.add_many_aggregated([self.aggregated])
assert len(self.test_feed[:10]) == 1
assert int(self.test_feed.count_unseen()) == 1
# TODO: don't know why this is broken
# assert int(self.test_feed.get_denormalized_count()) == 1
self.test_feed.mark_all()
assert int(self.test_feed.count_unseen()) == 0
assert int(self.test_feed.get_denormalized_count()) == 0
| {
"pile_set_name": "Github"
} |
class CFMessagePort {
}
var kCFMessagePortSuccess: Int32 { get }
var kCFMessagePortSendTimeout: Int32 { get }
var kCFMessagePortReceiveTimeout: Int32 { get }
var kCFMessagePortIsInvalid: Int32 { get }
var kCFMessagePortTransportError: Int32 { get }
var kCFMessagePortBecameInvalidError: Int32 { get }
struct CFMessagePortContext {
var version: CFIndex
var info: UnsafeMutablePointer<Void>
var retain: (@convention(c) (UnsafePointer<Void>) -> UnsafePointer<Void>)!
var release: (@convention(c) (UnsafePointer<Void>) -> Void)!
var copyDescription: (@convention(c) (UnsafePointer<Void>) -> Unmanaged<CFString>!)!
init()
init(version version: CFIndex, info info: UnsafeMutablePointer<Void>, retain retain: (@convention(c) (UnsafePointer<Void>) -> UnsafePointer<Void>)!, release release: (@convention(c) (UnsafePointer<Void>) -> Void)!, copyDescription copyDescription: (@convention(c) (UnsafePointer<Void>) -> Unmanaged<CFString>!)!)
}
typealias CFMessagePortCallBack = @convention(c) (CFMessagePort!, Int32, CFData!, UnsafeMutablePointer<Void>) -> Unmanaged<CFData>!
typealias CFMessagePortInvalidationCallBack = @convention(c) (CFMessagePort!, UnsafeMutablePointer<Void>) -> Void
func CFMessagePortGetTypeID() -> CFTypeID
func CFMessagePortCreateLocal(_ allocator: CFAllocator!, _ name: CFString!, _ callout: CFMessagePortCallBack!, _ context: UnsafeMutablePointer<CFMessagePortContext>, _ shouldFreeInfo: UnsafeMutablePointer<DarwinBoolean>) -> CFMessagePort!
func CFMessagePortCreateRemote(_ allocator: CFAllocator!, _ name: CFString!) -> CFMessagePort!
func CFMessagePortIsRemote(_ ms: CFMessagePort!) -> Bool
func CFMessagePortGetName(_ ms: CFMessagePort!) -> CFString!
func CFMessagePortSetName(_ ms: CFMessagePort!, _ newName: CFString!) -> Bool
func CFMessagePortGetContext(_ ms: CFMessagePort!, _ context: UnsafeMutablePointer<CFMessagePortContext>)
func CFMessagePortInvalidate(_ ms: CFMessagePort!)
func CFMessagePortIsValid(_ ms: CFMessagePort!) -> Bool
func CFMessagePortGetInvalidationCallBack(_ ms: CFMessagePort!) -> CFMessagePortInvalidationCallBack!
func CFMessagePortSetInvalidationCallBack(_ ms: CFMessagePort!, _ callout: CFMessagePortInvalidationCallBack!)
func CFMessagePortSendRequest(_ remote: CFMessagePort!, _ msgid: Int32, _ data: CFData!, _ sendTimeout: CFTimeInterval, _ rcvTimeout: CFTimeInterval, _ replyMode: CFString!, _ returnData: UnsafeMutablePointer<Unmanaged<CFData>?>) -> Int32
func CFMessagePortCreateRunLoopSource(_ allocator: CFAllocator!, _ local: CFMessagePort!, _ order: CFIndex) -> CFRunLoopSource!
@available(iOS 4.0, *)
func CFMessagePortSetDispatchQueue(_ ms: CFMessagePort!, _ queue: dispatch_queue_t!)
| {
"pile_set_name": "Github"
} |
[
{
"kind": "ObjectTypeDefinition",
"interfaces": [
{
"kind": "ImplementedInterface",
"interface": {
"kind": "NamedType",
"name": {
"kind": "Identifier",
"value": "IDateTime"
}
},
"loc": null
}
],
"fields": [
{
"kind": "FieldDefinition",
"name": {
"kind": "Identifier",
"value": "field"
},
"type": {
"kind": "NamedType",
"name": {
"kind": "Identifier",
"value": "String"
}
},
"description": null,
"directives": [],
"arguments": [
{
"kind": "ArgumentDefinition",
"name": {
"kind": "Identifier",
"value": "arg"
},
"type": {
"kind": "NamedType",
"name": {
"kind": "Identifier",
"value": "DateTime"
}
},
"description": null,
"directives": [],
"defaultValue": null
}
]
}
],
"name": {
"kind": "Identifier",
"value": "TString"
},
"description": null,
"directives": []
},
{
"kind": "InterfaceTypeDefinition",
"interfaces": [],
"fields": [
{
"kind": "FieldDefinition",
"name": {
"kind": "Identifier",
"value": "field"
},
"type": {
"kind": "NamedType",
"name": {
"kind": "Identifier",
"value": "String"
}
},
"description": null,
"directives": [],
"arguments": [
{
"kind": "ArgumentDefinition",
"name": {
"kind": "Identifier",
"value": "arg"
},
"type": {
"kind": "NamedType",
"name": {
"kind": "Identifier",
"value": "String"
}
},
"description": null,
"directives": [],
"defaultValue": null
}
]
}
],
"name": {
"kind": "Identifier",
"value": "IDateTime"
},
"description": null,
"directives": []
}
] | {
"pile_set_name": "Github"
} |
๏ปฟ/*************************************************************************************
Extended WPF Toolkit
Copyright (C) 2007-2013 Xceed Software Inc.
This program is provided to you under the terms of the Microsoft Public
License (Ms-PL) as published at http://wpftoolkit.codeplex.com/license
For more features, controls, and fast professional support,
pick up the Plus Edition at http://xceed.com/wpf_toolkit
Stay informed: follow @datagrid on Twitter or Like http://facebook.com/datagrids
***********************************************************************************/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Xceed.Utils.Wpf.DragDrop;
using System.Windows.Media;
using System.Windows.Shapes;
using System.Windows;
using System.Windows.Documents;
using System.Windows.Media.Imaging;
using System.Diagnostics;
using System.Windows.Controls;
namespace Xceed.Wpf.DataGrid.Views
{
internal class AnimatedDraggedElementAdorner : DraggedElementAdorner
{
#region Constructors
public AnimatedDraggedElementAdorner( UIElement adornedElement, AdornerLayer adornerLayer )
: this( adornedElement, adornerLayer, false )
{
}
public AnimatedDraggedElementAdorner( UIElement adornedElement, AdornerLayer adornerLayer, bool deepCopy )
: base( adornedElement, adornerLayer )
{
this.DeepCopy = deepCopy;
this.IsHitTestVisible = false;
}
#endregion
#region DeepCopy Property
public bool DeepCopy
{
get;
private set;
}
#endregion
#region Protected Methods
protected override Rectangle InitializeAdornedElementImage()
{
Rect adornedBounds = VisualTreeHelper.GetDescendantBounds( this.AdornedElement );
// Round the height and width of the bounds to reduce the
// blur effect caused by the RenderTargetBitmap.
// When drawing Text into RenderTargetBitmap, the ClearType
// reverts to grayscale causing a blur effect. If there is
// also an extrapolation, the blur effect will be worst.
int roundedHeight = ( int )Math.Round( adornedBounds.Height, MidpointRounding.ToEven );
int roundedWidth = ( int )Math.Round( adornedBounds.Width, MidpointRounding.ToEven );
VisualBrush brush = new VisualBrush( this.AdornedElement );
Rectangle rectangle = new Rectangle();
// Only if we have something to adorn
if( this.DeepCopy
&& ( ( roundedWidth > 0 ) && ( roundedHeight > 0 ) ) )
{
try
{
RenderTargetBitmap bitmap = new RenderTargetBitmap( roundedWidth,
roundedHeight,
96,
96,
PixelFormats.Pbgra32 );
DrawingVisual drawingVisual = new DrawingVisual();
using( DrawingContext context = drawingVisual.RenderOpen() )
{
Rect finalRect = new Rect( 0,
0,
roundedWidth,
roundedHeight );
context.DrawRectangle( brush, null, finalRect );
}
bitmap.Render( drawingVisual );
// Ensure to set the Height and Width
// values for the Fill does not resize the
// rectangle if it is larger. This also
// reduce the blur effect.
rectangle.Height = roundedHeight;
rectangle.Width = roundedWidth;
rectangle.UpdateLayout();
// Adding BitmapScallingMode using any other BitmapScalingMode cause some
// blur in the resulting Bitmap
RenderOptions.SetBitmapScalingMode( rectangle, BitmapScalingMode.NearestNeighbor );
rectangle.Fill = new ImageBrush( bitmap );
// Translate the Top Left corner of the rectangle that
// contains the AdornedElement
if( !adornedBounds.Size.IsEmpty )
{
rectangle.RenderTransform = new TranslateTransform( adornedBounds.X, adornedBounds.Y );
}
}
catch( Exception )
{
// Avoid any exception and use the brush itself
rectangle.Fill = brush;
}
}
else
{
rectangle.Fill = brush;
}
return rectangle;
}
protected override Size MeasureOverride( Size constraint )
{
this.AdornedElement.Measure( constraint );
// Ensure to return the DescendantBounds to take Margins and Padding
// into consideration
Rect bounds = VisualTreeHelper.GetDescendantBounds( this.AdornedElement );
return bounds.Size;
}
#endregion
}
}
| {
"pile_set_name": "Github"
} |
temple = {
name = "Temple"
conditions = {
unit_class = lumber_mill
}
}
| {
"pile_set_name": "Github"
} |
/////////////////////////////////////////////////////////////////////////////
// Name: wx/xrc/xh_listbk.h
// Purpose: XML resource handler for wxListbook
// Author: Vaclav Slavik
// Copyright: (c) 2000 Vaclav Slavik
// Licence: wxWindows licence
/////////////////////////////////////////////////////////////////////////////
#ifndef _WX_XH_LISTBK_H_
#define _WX_XH_LISTBK_H_
#include "wx/xrc/xmlres.h"
#if wxUSE_XRC && wxUSE_LISTBOOK
class WXDLLIMPEXP_FWD_CORE wxListbook;
class WXDLLIMPEXP_XRC wxListbookXmlHandler : public wxXmlResourceHandler
{
DECLARE_DYNAMIC_CLASS(wxListbookXmlHandler)
public:
wxListbookXmlHandler();
virtual wxObject *DoCreateResource();
virtual bool CanHandle(wxXmlNode *node);
private:
bool m_isInside;
wxListbook *m_listbook;
};
#endif // wxUSE_XRC && wxUSE_LISTBOOK
#endif // _WX_XH_LISTBK_H_
| {
"pile_set_name": "Github"
} |
'use strict';
// This file contains then/promise specific extensions that are only useful
// for node.js interop
var Promise = require('./core.js');
var asap = require('asap');
module.exports = Promise;
/* Static Functions */
Promise.denodeify = function (fn, argumentCount) {
if (
typeof argumentCount === 'number' && argumentCount !== Infinity
) {
return denodeifyWithCount(fn, argumentCount);
} else {
return denodeifyWithoutCount(fn);
}
}
var callbackFn = (
'function (err, res) {' +
'if (err) { rj(err); } else { rs(res); }' +
'}'
);
function denodeifyWithCount(fn, argumentCount) {
var args = [];
for (var i = 0; i < argumentCount; i++) {
args.push('a' + i);
}
var body = [
'return function (' + args.join(',') + ') {',
'var self = this;',
'return new Promise(function (rs, rj) {',
'var res = fn.call(',
['self'].concat(args).concat([callbackFn]).join(','),
');',
'if (res &&',
'(typeof res === "object" || typeof res === "function") &&',
'typeof res.then === "function"',
') {rs(res);}',
'});',
'};'
].join('');
return Function(['Promise', 'fn'], body)(Promise, fn);
}
function denodeifyWithoutCount(fn) {
var fnLength = Math.max(fn.length - 1, 3);
var args = [];
for (var i = 0; i < fnLength; i++) {
args.push('a' + i);
}
var body = [
'return function (' + args.join(',') + ') {',
'var self = this;',
'var args;',
'var argLength = arguments.length;',
'if (arguments.length > ' + fnLength + ') {',
'args = new Array(arguments.length + 1);',
'for (var i = 0; i < arguments.length; i++) {',
'args[i] = arguments[i];',
'}',
'}',
'return new Promise(function (rs, rj) {',
'var cb = ' + callbackFn + ';',
'var res;',
'switch (argLength) {',
args.concat(['extra']).map(function (_, index) {
return (
'case ' + (index) + ':' +
'res = fn.call(' + ['self'].concat(args.slice(0, index)).concat('cb').join(',') + ');' +
'break;'
);
}).join(''),
'default:',
'args[argLength] = cb;',
'res = fn.apply(self, args);',
'}',
'if (res &&',
'(typeof res === "object" || typeof res === "function") &&',
'typeof res.then === "function"',
') {rs(res);}',
'});',
'};'
].join('');
return Function(
['Promise', 'fn'],
body
)(Promise, fn);
}
Promise.nodeify = function (fn) {
return function () {
var args = Array.prototype.slice.call(arguments);
var callback =
typeof args[args.length - 1] === 'function' ? args.pop() : null;
var ctx = this;
try {
return fn.apply(this, arguments).nodeify(callback, ctx);
} catch (ex) {
if (callback === null || typeof callback == 'undefined') {
return new Promise(function (resolve, reject) {
reject(ex);
});
} else {
asap(function () {
callback.call(ctx, ex);
})
}
}
}
}
Promise.prototype.nodeify = function (callback, ctx) {
if (typeof callback != 'function') return this;
this.then(function (value) {
asap(function () {
callback.call(ctx, null, value);
});
}, function (err) {
asap(function () {
callback.call(ctx, err);
});
});
}
| {
"pile_set_name": "Github"
} |
# Authors
* Ascensio System SIA: <[email protected]>
| {
"pile_set_name": "Github"
} |
from __future__ import print_function, unicode_literals
import win32com.client
import codecs
FILE = 1
DRAFT = 4
def login(user, password):
app = win32com.client.Dispatch('NovellGroupWareSession')
account = app.MultiLogin(user, None, password, 1)
# 0 - promptIfNeeded
# 1 - noverPrompt
# 2 - allowPasswordPrompt
def incoming(account):
return account.MailBox.Messages.Find('(mail) and (box_type = incoming)')
def msg_atts(msg):
'att generator'
for att in msg.Attachments:
if att.ObjType == FILE:
fn = att.FileName
if not fn:
continue
elif fn == 'Mime.822':
# email from Thunderbird through smtp
continue
elif fn == 'Header':
# forwarded from Thunderbird through smtp
continue
yield att
return None
def att_save(att, fpath):
if att.AttachmentSize > 0:
att.Save(fpath)
else:
# GW-error workaround, cat > fpath
with open(fpath, 'wb'):
pass
def msg_move(msg, fromFolder, toFolder):
fromFolder.Messages.Move(msg, toFolder.Messages)
def msg_move2(msg, toFolder):
'move from Inbox'
inbox = msg.Parent.MailBox
folders = msg.EnclosingFolders
if inbox in folders:
msg_move(msg, inbox, toFolder)
elif not toFolder in folders:
toFolder.Messages.Add(msg)
class AttStream:
def __init__(self, att):
self.stream = att.Stream
self.size = att.AttachmentSize
def read(self, size = -1):
if size < 0:
size = self.size
data = self.stream.Read(size)
return str(data)
def close(self):
pass
def att_text(att, encoding):
fp = AttStream(att)
return fp.read().decode(encoding)
def att_reader(att, encoding):
'''
with att_reader(att, encoding) as fp:
do_something
'''
fp = AttStream(att)
return codecs.getreader(encoding)(fp)
def create_msg(folder):
return folder.Messages.Add('GW.MESSAGE.MAIL', DRAFT)
def add_recipients(msg, *addrL):
for addr in addrL:
msg.Recipients.Add(addr)
def add_file(msg, fpath, fn = None):
if fn:
msg.Attachments.Add(fpath, FILE, fn)
else:
msg.Attachments.Add(fpath, FILE)
| {
"pile_set_name": "Github"
} |
/**
* @file
* CSS File for Views Slideshow: ImageFlow
*/
.imageflow {
/* overflow: visible; */
}
| {
"pile_set_name": "Github"
} |
4
3
0 0
1 1
1 0
4
0 0
0 1
1 0
1 1
5
0 0
0 1
0 2
2 2
2 0
4
0 1
2 2
3 0
1 -1 | {
"pile_set_name": "Github"
} |
<?php
namespace oasis\names\specification\ubl\schema\xsd\CommonBasicComponents_2;
/**
* @xmlNamespace urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2
* @xmlType OrderableIndicatorType
* @xmlName OrderableIndicator
* @var oasis\names\specification\ubl\schema\xsd\CommonBasicComponents_2\OrderableIndicator
*/
class OrderableIndicator extends OrderableIndicatorType
{
} // end class OrderableIndicator
| {
"pile_set_name": "Github"
} |
package com.mashibing.service.impl;
import com.mashibing.bean.TblSendLog;
import com.mashibing.mapper.TblSendLogMapper;
import com.mashibing.service.base.TblSendLogService;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.springframework.stereotype.Service;
/**
* <p>
* ๅ้ๆฅๅฟ่กจ ๆๅกๅฎ็ฐ็ฑป
* </p>
*
* @author lian
* @since 2020-04-18
*/
@Service
public class TblSendLogServiceImpl extends ServiceImpl<TblSendLogMapper, TblSendLog> implements TblSendLogService {
}
| {
"pile_set_name": "Github"
} |
{
"@metadata": {
"authors": [
"Cgtdk",
"Christian List",
"EileenSanda",
"Laketown",
"Palnatoke",
"Simeondahl",
"Tehnix",
"Macofe",
"Peter Alberti"
]
},
"ooui-outline-control-move-down": "Flyt ned",
"ooui-outline-control-move-up": "Flyt op",
"ooui-toolbar-more": "Mere",
"ooui-toolgroup-expand": "Mere",
"ooui-toolgroup-collapse": "Fรฆrre",
"ooui-dialog-message-accept": "OK",
"ooui-dialog-message-reject": "Afbryd",
"ooui-dialog-process-error": "Noget gik galt",
"ooui-dialog-process-retry": "Prรธv igen",
"ooui-dialog-process-continue": "Fortsรฆt",
"ooui-selectfile-button-select": "Vรฆlg en fil"
}
| {
"pile_set_name": "Github"
} |
var config = {
type: Phaser.WEBGL,
width: 800,
height: 600,
parent: 'phaser-example',
pixelArt: true,
physics: {
default: 'arcade',
arcade: {
gravity: { y: 100 },
debug: false
}
},
scene: {
preload: preload,
create: create,
update: update
}
};
var controls;
var game = new Phaser.Game(config);
function preload ()
{
this.load.atlas('gems', 'assets/tests/columns/gems.png', 'assets/tests/columns/gems.json');
}
function create ()
{
this.physics.world.setBounds(0, 0, 800 * 8, 600 * 8);
var spriteBounds = Phaser.Geom.Rectangle.Inflate(Phaser.Geom.Rectangle.Clone(this.physics.world.bounds), -100, -100);
this.anims.create({ key: 'diamond', frames: this.anims.generateFrameNames('gems', { prefix: 'diamond_', end: 15, zeroPad: 4 }), repeat: -1 });
this.anims.create({ key: 'prism', frames: this.anims.generateFrameNames('gems', { prefix: 'prism_', end: 6, zeroPad: 4 }), repeat: -1 });
this.anims.create({ key: 'ruby', frames: this.anims.generateFrameNames('gems', { prefix: 'ruby_', end: 6, zeroPad: 4 }), repeat: -1 });
this.anims.create({ key: 'square', frames: this.anims.generateFrameNames('gems', { prefix: 'square_', end: 14, zeroPad: 4 }), repeat: -1 });
// Create loads of random sprites
var anims = [ 'diamond', 'prism', 'ruby', 'square' ];
for (var i = 0; i < 10000; i++)
{
var pos = Phaser.Geom.Rectangle.Random(spriteBounds);
var block = this.physics.add.sprite(pos.x, pos.y, 'gems');
block.setVelocity(Phaser.Math.Between(200, 400), Phaser.Math.Between(200, 400));
block.setBounce(1).setCollideWorldBounds(true);
if (Math.random() > 0.5)
{
block.body.velocity.x *= -1;
}
if (Math.random() > 0.5)
{
block.body.velocity.y *= -1;
}
block.play(Phaser.Math.RND.pick(anims));
}
var cursors = this.input.keyboard.createCursorKeys();
var controlConfig = {
camera: this.cameras.main,
left: cursors.left,
right: cursors.right,
up: cursors.up,
down: cursors.down,
zoomIn: this.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.Q),
zoomOut: this.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.E),
acceleration: 0.06,
drag: 0.0005,
maxSpeed: 1.0
};
controls = new Phaser.Cameras.Controls.SmoothedKeyControl(controlConfig);
this.add.text(0, 0, 'Use Cursors to scroll camera.\nQ / E to zoom in and out', { font: '18px Courier', fill: '#00ff00' });
}
function update (time, delta)
{
controls.update(delta);
}
| {
"pile_set_name": "Github"
} |
๏ปฟ//
// CSharpDocumentOptionsProvider.cs
//
// Author:
// Mike Krรผger <[email protected]>
//
// Copyright (c) 2017 Microsoft Corporation
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using System.ComponentModel.Composition;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.Options;
using Roslyn.Utilities;
using MonoDevelop.CSharp.Formatting;
using MonoDevelop.Ide.Gui.Content;
using MonoDevelop.Ide.TypeSystem;
using MonoDevelop.Projects.Policies;
using Microsoft.VisualStudio.CodingConventions;
using System.IO;
using MonoDevelop.Core;
using System.Linq;
using MonoDevelop.Ide.Editor;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Runtime.CompilerServices;
namespace MonoDevelop.CSharp.OptionProvider
{
class CSharpDocumentOptionsProvider : IDocumentOptionsProvider
{
async Task<IDocumentOptions> IDocumentOptionsProvider.GetOptionsForDocumentAsync (Document document, CancellationToken cancellationToken)
{
var mdws = document.Project.Solution.Workspace as MonoDevelopWorkspace;
var project = mdws?.GetMonoProject (document.Project.Id);
var path = GetPath (document);
ICodingConventionContext conventions = null;
try {
if (path != null)
conventions = await EditorConfigService.GetEditorConfigContext (path, cancellationToken);
} catch (Exception e) {
LoggingService.LogError("Error while loading coding conventions.", e);
}
return new DocumentOptions (project?.Policies, conventions?.CurrentConventions);
}
static string GetPath(Document document)
{
if (document.FilePath != null)
return document.FilePath;
// The file might not actually have a path yet, if it's a file being proposed by a code action. We'll guess a file path to use
if (document.Name != null && document.Project.FilePath != null) {
return Path.Combine (Path.GetDirectoryName (document.Project.FilePath), document.Name);
}
// Really no idea where this is going, so bail
return null;
}
internal class DocumentOptions : IDocumentOptions
{
readonly static IEnumerable<string> types = Ide.IdeServices.DesktopService.GetMimeTypeInheritanceChain (CSharpFormatter.MimeType);
readonly PolicyBag policyBag;
CSharpFormattingPolicy policy;
CSharpFormattingPolicy Policy => policy ?? (policy = policyBag?.Get<CSharpFormattingPolicy> (types) ?? PolicyService.InvariantPolicies.Get<CSharpFormattingPolicy> (types));
TextStylePolicy textpolicy;
TextStylePolicy TextPolicy => textpolicy ?? (textpolicy = policyBag?.Get<TextStylePolicy> (types) ?? PolicyService.InvariantPolicies?.Get<TextStylePolicy> (types));
readonly ICodingConventionsSnapshot codingConventionsSnapshot;
private static readonly ConditionalWeakTable<IReadOnlyDictionary<string, object>, IReadOnlyDictionary<string, string>> s_convertedDictionaryCache =
new ConditionalWeakTable<IReadOnlyDictionary<string, object>, IReadOnlyDictionary<string, string>> ();
public DocumentOptions (PolicyBag policyBag, ICodingConventionsSnapshot codingConventionsSnapshot)
{
this.policyBag = policyBag;
this.codingConventionsSnapshot = codingConventionsSnapshot;
}
public bool TryGetDocumentOption (OptionKey option, out object value)
{
if (codingConventionsSnapshot != null) {
var editorConfigPersistence = option.Option.StorageLocations.OfType<IEditorConfigStorageLocation> ().SingleOrDefault ();
if (editorConfigPersistence != null) {
// Temporarly map our old Dictionary<string, object> to a Dictionary<string, string>. This can go away once we either
// eliminate the legacy editorconfig support, or we change IEditorConfigStorageLocation.TryGetOption to take
// some interface that lets us pass both the Dictionary<string, string> we get from the new system, and the
// Dictionary<string, object> from the old system.
//
// We cache this with a conditional weak table so we're able to maintain the assumptions in EditorConfigNamingStyleParser
// that the instance doesn't regularly change and thus can be used for further caching
var allRawConventions = s_convertedDictionaryCache.GetValue (
codingConventionsSnapshot.AllRawConventions,
d => ImmutableDictionary.CreateRange (d.Select (c => KeyValuePairUtil.Create (c.Key, c.Value.ToString ()))));
try {
if (editorConfigPersistence.TryGetOption (allRawConventions, option.Option.Type, out value))
return true;
} catch (Exception ex) {
LoggingService.LogError ("Error while getting editor config preferences.", ex);
}
}
}
if (option.Option == Microsoft.CodeAnalysis.Formatting.FormattingOptions.IndentationSize) {
value = TextPolicy.IndentWidth;
return true;
}
if (option.Option == Microsoft.CodeAnalysis.Formatting.FormattingOptions.NewLine) {
value = TextPolicy.GetEolMarker ();
return true;
}
if (option.Option == Microsoft.CodeAnalysis.Formatting.FormattingOptions.SmartIndent) {
value = Microsoft.CodeAnalysis.Formatting.FormattingOptions.IndentStyle.Smart;
return true;
}
if (option.Option == Microsoft.CodeAnalysis.Formatting.FormattingOptions.TabSize) {
value = TextPolicy.TabWidth;
return true;
}
if (option.Option == Microsoft.CodeAnalysis.Formatting.FormattingOptions.UseTabs) {
value = !TextPolicy.TabsToSpaces;
return true;
}
var result = Policy.OptionSet.GetOption (option);
value = result;
return true;
}
}
}
}
| {
"pile_set_name": "Github"
} |
/**
* This file is copyright 2017 State of the Netherlands (Ministry of Interior Affairs and Kingdom Relations).
* It is made available under the terms of the GNU Affero General Public License, version 3 as published by the Free Software Foundation.
* The project of which this file is part, may be found at https://github.com/MinBZK/operatieBRP.
*/
package nl.bzk.brp.preview.model;
public class Plaats {
private String code;
private String naam;
public Plaats() {
}
public Plaats(final String naam) {
this.naam = naam;
}
public String getCode() {
return code;
}
public void setCode(final String code) {
this.code = code;
}
public String getNaam() {
return naam;
}
public void setNaam(final String naam) {
this.naam = naam;
}
}
| {
"pile_set_name": "Github"
} |
//------------------------------------------------------------------------------
/*
This file is part of Beast: https://github.com/vinniefalco/Beast
Copyright 2013, Vinnie Falco <[email protected]>
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
//==============================================================================
#ifndef BEAST_UNIT_TEST_PRINT_H_INLCUDED
#define BEAST_UNIT_TEST_PRINT_H_INLCUDED
#include "amount.h"
#include "results.h"
#include "../streams/abstract_ostream.h"
#include "../streams/basic_std_ostream.h"
#include <iostream>
#include <string>
namespace beast {
namespace unit_test {
/** Write test results to the specified output stream. */
/** @{ */
inline
void
print (results const& r, abstract_ostream& stream)
{
for (auto const& s : r)
{
for (auto const& c : s)
{
stream <<
s.name() <<
(c.name().empty() ? "" : ("." + c.name()));
std::size_t i (1);
for (auto const& t : c.tests)
{
if (! t.pass)
stream <<
"#" << i <<
" failed: " << t.reason;
++i;
}
}
}
stream <<
amount (r.size(), "suite") << ", " <<
amount (r.cases(), "case") << ", " <<
amount (r.total(), "test") << " total, " <<
amount (r.failed(), "failure")
;
}
inline
void
print (results const& r, std::ostream& stream = std::cout)
{
auto s (make_std_ostream (stream));
print (r, s);
}
} // unit_test
} // beast
#endif
| {
"pile_set_name": "Github"
} |
// Copyright (c) 1997-2001 ETH Zurich (Switzerland).
// All rights reserved.
//
// This file is part of CGAL (www.cgal.org).
// You can redistribute it and/or modify it under the terms of the GNU
// General Public License as published by the Free Software Foundation,
// either version 3 of the License, or (at your option) any later version.
//
// Licensees holding a valid commercial license may use this file in
// accordance with the commercial license agreement provided with the software.
//
// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
//
// $URL$
// $Id$
//
//
// Author(s) : Kaspar Fischer <[email protected]>
#ifndef CGAL_CGAL_APPROX_MIN_ELL_D_H
#define CGAL_CGAL_APPROX_MIN_ELL_D_H
#include <CGAL/license/Bounding_volumes.h>
#include <cstdlib>
#include <cmath>
#include <vector>
#include <iostream>
#include <CGAL/Simple_cartesian.h>
#include <CGAL/Approximate_min_ellipsoid_d/Khachiyan_approximation.h>
namespace CGAL {
template<class Traits>
class Approximate_min_ellipsoid_d
// An instance of class Approximate_min_ellipsoid_d represents an
// eps-approximation of the smallest enclosing ellipsoid of the
// point set P in R^d, that is, an ellipsoid E satisfying
//
// (i) E contains P,
//
// (ii) the volume of E is at most (1+eps) times larger than the
// volume of the smallest enclosing ellipsoid of P.
{
public: // types:
typedef typename Traits::FT FT;
typedef typename Traits::Point Point;
typedef typename Traits::Cartesian_const_iterator Cartesian_const_iterator;
private:
typedef std::vector<Point> Point_list;
typedef typename Traits::Cartesian_const_iterator C_it;
protected: // member variables:
Traits tco; // traits class object
const Point_list P; // the input points in R^d
int d; // dimension of the input points
const double eps; // the desired epsilon
double e_eps; // (see below)
// We obtain our eps-approximating ellipsoid by embedding the input
// points P into R^{d+1} by mapping p to (p,1). Then we compute in
// R^{d+1} an (1+e_eps)-approximating centrally symmetric ellipsoid E for
// the embedded points from which the desired (1+eps)-approximating
// ellipsoid of the original points can be obtained by projection. The
// following variable E represents the e_eps-approximating centrally
// symmetric ellipsoid in R^{d+1}:
Khachiyan_approximation<true,Traits> *E;
// When the input points do not affinely span the whole space
// (i.e., if dim(aff(P)) < d), then the smallest enclosing
// ellipsoid of P has no volume in R^d and so the points are
// called "degnerate" (see is_degenerate()) below.
// As discussed below (before (*)), the centrally symmetric ellipsoid
// E':= sqrt{(1+a_eps)(d+1)} E contains (under exact arithmetic) the
// embedded points. (Here, a_eps is the value obtained by
// achieved_epsilon().) Denote by M the defining matrix of E; we
// then have
//
// p^T M p <= alpha,
//
// for all p in E and alpha = (1+a_eps)(d+1). Since this is equivalent
// to
//
// p^T M/alpha p <= 1, (***)
//
// we see that E' = sqrt{alpha} E has M/alpha as its defining
// matrix. Consequently, when we return M in the routines
// defining_matrix(), defining_vector(), and defining_scalar() below,
// these numbers need to be scaled (by the user) with the factor
// 1/alpha. (We do not perform the scaling ourselves because we cannot do
// it exactly in double arithmetic.)
public: // construction & destruction:
template<typename InputIterator>
Approximate_min_ellipsoid_d(double eps,
InputIterator first,InputIterator last,
const Traits& traits = Traits())
// Given a range [first,last) of n points P, constructs an
// (1+eps)-approximation of the smallest enclosing ellipsoid of P.
: tco(traits), P(first,last), eps(eps),
has_center(false), has_axes(false)
{
CGAL_APPEL_LOG("appel",
"Entering Approximate_min_ellpsoid_d." << std::endl);
// fetch ambient dimension:
d = tco.dimension(P[0]);
CGAL_APPEL_ASSERT(d >= 2 || eps >= 0.0);
// The ellipsoid E produced by Khachiyan's algorithm has the
// property that E':= sqrt{(1+e_eps) (d+1)} E contains all
// embedded points e_P and has volume bounded by
//
// vol(E') <= (1+e_eps)^{(d+1)/2} vol(E_emb*), (*)
//
// where E_emb* is the smallest centrally symmetric enclosing
// ellipsoid of the embedded points e_P. By requiring that (1+eps)
// <= (1+e_eps)^{(d+1)/2}, we get
//
// e_eps <= (1+eps)^{2/(d+1)} - 1 (**)
//
// and with this e_eps, we have vol(E') <= (1+eps) vol(E_emb*).
// An argument by Khachiyan (see "Rounding of polytopes in the
// real number model of computation", eq. (4.1)) then guarantees
// that the intersection E_int of E' with the hyperplane { (x,y)
// in R^{d+1} | y = 1} is an ellipsoid satisfying vol(E_int) <=
// (1+eps) vol(E*), where E* is the smallest enclosing ellipsoid
// of the original points P. According to (**) we thus set e_eps
// to (a lower bound on) (1+eps)^{2/(d+1)} - 1:
FPU_CW_t old = FPU_get_and_set_cw(CGAL_FE_TOWARDZERO); // round to zero
e_eps = std::exp(2.0/(d+1)*std::log(1.0+eps))-1.0;
FPU_set_cw(old); // restore
// rounding mode
// Find e (1+e_eps)-approximation for the embedded points. This
// only works when the points affinely span R^{d+1}.
E = new Khachiyan_approximation<true,Traits>(d, static_cast<int>(P.size()),tco);
const bool is_deg = !E->add(P.begin(),P.end(),e_eps);
// debugging:
CGAL_APPEL_ASSERT(is_deg == E->is_degenerate());
CGAL_APPEL_LOG("appel",
" Input points are " << (is_deg? "" : "not ") <<
"degnerate." << std::endl);
if (is_deg)
find_lower_dimensional_approximation();
CGAL_APPEL_LOG("appel",
"Leaving Approximate_min_ellipsoid_d." << std::endl);
}
~Approximate_min_ellipsoid_d()
{
// dispose of approximation:
if (E != static_cast<Khachiyan_approximation<true,Traits> *>(0))
delete E;
}
public: // access:
unsigned int number_of_points() const
// Returns the number of points, i.e., |P|.
{
return P.size();
}
bool is_empty() const
// Returns true iff the approximate ellipsoid is empty (which
// implies degeneracy). This is the case iff the number of input
// points was zero at construction time.
{
return P.size() == 0;
}
private: // access:
bool is_degenerate() const
// Returns true iff the approximate ellipsoid is degenerate, i.e.,
// iff the dimension of the affine hull of S doesn't match the
// dimension of the ambient space.
{
return E->is_degenerate();
}
public: // access:
// Here's how the routines defining_matrix(), defining_vector(),
// and defining_scalar() are implemented. From (***) we know that
// the ellipsoid E' = sqrt{alpha} E
//
// (a) encloses all embedded points (p,1), p in P,
// (b) has defining matrix M/alpha, i.e.,
//
// E' = { x | x^T M/alpha x <= 1 },
//
// where alpha = (1+a_eps)(d+1) with a_eps the return value
// of achieved_epsilon().
//
// The ellipsoid E* we actuallly want is the intersection of E' with
// the hyperplane { (y,z) in R^{d+1} | y = 1}. Writing
//
// [ M' m ] [ y ]
// M = [ m^T nu ] and x = [ 1 ] (*****)
//
// we thus obtain
//
// x^T M/alpha x = y^T y alpha/M + 2/alpha y^Tm + nu/alpha.
//
// It follows
//
// E* = { y | y^T M'/alpha y + 2/alpha y^Tm + (nu/alpha-1) <= 0 }. (****)
//
// This is what the routines defining_matrix(), defining_vector(),
// and defining_scalar() implement.
bool is_full_dimensional() const
// Returns !is_degenerate().
{
return !is_degenerate();
}
FT defining_matrix(int i,int j) const
// Returns the entry M(i,j) of the symmetric matrix M in the
// representation
//
// E* = { x | x^T M x + x^T m + mu <= 0 }
//
// of the computed approximation. More precisely, the routine does not
// return M(i,j) but the number (1+achieved_epsilon())*(d+1)*M(i,j).
//
// Precondition: !is_degenerate() && 0<=i<d && 0<=j<d
{
CGAL_APPEL_ASSERT(!is_degenerate() && 0<=i && i<d && 0<=j && j<d);
return E->matrix(i,j);
}
FT defining_vector(int i) const
// Returns the entry m(i) of the vector m in the representation
//
// E* = { x | x^T M x + x^T m + mu <= 0 }
//
// of the computed approximation. More precisely, the routine does not
// return m(i) but the number (1+achieved_epsilon())*(d+1)*m(i).
//
// Precondition: !is_degenerate() && 0<=i<d
{
CGAL_APPEL_ASSERT(!is_degenerate() && 0<=i && i<d);
return FT(2)*E->matrix(d,i); // Note: if FT is double, the
// multiplication by 2.0 is exact.
}
FT defining_scalar() const
// Returns the number mu in the representation
//
// E* = { x | x^T M x + x^T m + mu <= 0}
//
// of the computed approximation. More precisely, the routine does not
// return mu but the number (1+achieved_epsilon())*(d+1)*(mu+1).
//
// Precondition: !is_degenerate()
{
CGAL_APPEL_ASSERT(!is_degenerate());
return E->matrix(d,d);
}
double achieved_epsilon() const
// Returns the approximation ratio; more precisely, this returns a
// number r such that the computed ellipsoid is a (1+r)-approximation
// of MEL(P).
//
// Precondition: !is_degenerate()
{
CGAL_APPEL_ASSERT(!is_degenerate());
// From (*) we known that Khachian's algorithm produces a
// centrally-symmetric ellipsoid in R^{d+1} fulfilling
//
// vol(E') <= (1+k_eps)^{(d+1)/2} vol(E_emb*),
//
// where k_eps = E->exact_epsilon(). The projecting argument
// mentioned after (**) says that these approximation ratio also
// applies to the projected (d-dimensional) ellipsoids, and so the
// actual approximation ratio we obtain is
//
// ratio:= (1+k_eps)^{(d+1)/2}.
//
// So all we need to do is compute the smallest (let's say: a small)
// double number larger or equal to ratio.
//
// Todo: make the calculation below more stable, numerically?
const double k_eps = E->exact_epsilon();
FPU_CW_t old = FPU_get_and_set_cw(CGAL_FE_UPWARD); // round up
const double sum = 1.0 + k_eps;
double tmp = sum;
for (int i=0; i<d; ++i)
tmp *= sum;
const double eps = std::sqrt(tmp)-1.0;
FPU_set_cw(old); // restore
CGAL_APPEL_ASSERT(eps >= 0.0);
return eps;
}
Traits traits() const
{
return tco;
}
int dimension() const
{
return d;
}
public: // miscellaneous:
bool is_valid(bool verbose) const
// Returns true if and only if the computed ellipsoid is indeed an
// approximate ellipsoid, that is ... Todo.
{
return E->is_valid(verbose);
}
public: // miscellaneous 2D/3D support:
typedef std::vector<double>::const_iterator Center_coordinate_iterator;
typedef std::vector<double>::const_iterator Axes_lengths_iterator;
typedef std::vector<double>::const_iterator
Axes_direction_coordinate_iterator;
Center_coordinate_iterator center_cartesian_begin()
// Returns a STL random-access iterator pointing to the first of the d
// Cartesian coordinates of the computed ellipsoid's center. The center
// described in this way is a floating-point approximation to the
// ellipsoid's exact center; no guarantee is given w.r.t. the involved
// relative error.
//
// Precondition: !is_degenerate()
{
CGAL_APPEL_ASSERT(!is_degenerate());
if (!has_center)
compute_center();
return center_.begin();
}
Center_coordinate_iterator center_cartesian_end()
// Returns the past-the-end iterator corresponding to
// center_cartesian_begin().
//
// Precondition: !is_degenerate()
{
CGAL_APPEL_ASSERT(!is_degenerate());
if (!has_center)
compute_center();
return center_.end();
}
Axes_lengths_iterator axes_lengths_begin()
// Returns a STL random-access iterator to the first of the d lengths of
// the computed ellipsoid's axes. The d lengths are floating-point
// approximations to the exact axes-lengths of the computed ellipsoid; no
// guarantee is given w.r.t. the involved relative error. (See also method
// axes_direction_cartesian_begin().) The elements of the iterator are
// sorted descending.
//
// Precondition: !is_degenerate() && (d==2 || d==3)
{
CGAL_APPEL_ASSERT(!is_degenerate() && (d==2 || d==3));
if (!has_axes)
compute_axes_2_3();
return lengths_.begin();
}
Axes_lengths_iterator axes_lengths_end()
// Returns the past-the-end iterator corresponding to
// axes_lengths_begin().
//
// Precondition: !is_degenerate() && (d==2 || d==3)
{
CGAL_APPEL_ASSERT(!is_degenerate() && (d==2 || d==3));
if (!has_axes)
compute_axes_2_3();
return lengths_.end();
}
Axes_direction_coordinate_iterator axis_direction_cartesian_begin(int i)
// Returns a STL random-access iterator pointing to the first of the d
// Cartesian coordinates of the computed ellipsoid's i-th axis direction
// (i.e., unit vector in direction of the ellipsoid's i-th axis). The
// direction described by this iterator is a floating-point approximation
// to the exact axis direction of the computed ellipsoid; no guarantee is
// given w.r.t. the involved relative error. An approximation to the
// length of axis i is given by the i-th entry of axes_lengths_begin().
//
// Precondition: !is_degenerate() && (d==2 || d==3) && (0 <= i < d)
{
CGAL_APPEL_ASSERT(!is_degenerate() && (d==2 || d==3) &&
0 <= i && i < d);
if (!has_axes)
compute_axes_2_3();
return directions_[i].begin();
}
Axes_direction_coordinate_iterator axis_direction_cartesian_end(int i)
// Returns the past-the-end iterator corresponding to
// axis_direction_cartesian_begin().
//
// Precondition: !is_degenerate() && (d==2 || d==3) && (0 <= i < d)
{
CGAL_APPEL_ASSERT(!is_degenerate() && (d==2 || d==3) &&
0 <= i && i < d);
if (!has_axes)
compute_axes_2_3();
return directions_[i].end();
}
public: // internal members for 2D/3D axis/center computation:
bool has_center, has_axes; // true iff the center or axes-directions and
// -lengths, respectively, have already been
// computed
std::vector<double> center_;
std::vector<double> lengths_;
std::vector< std::vector<double> > directions_;
std::vector<double> mi; // contains M^{-1} (see (*****)
// above) iff has_center is true;
// mi[i+d*j] is entry (i,j) of
// M^{-1}
void compute_center();
void compute_axes_2_3();
void compute_axes_2(const double alpha, const double factor);
void compute_axes_3(const double alpha, const double factor);
public: // "debugging" routines:
void write_eps(const std::string& name);
// Writes the and the point set P to an EPS file. Returned is the
// id under which the file was stored (filename 'id.eps').
//
// Precondition: d==2 && !is_degenerate().
private: // internal routines:
void find_lower_dimensional_approximation(); // (does nothing right now)
};
}
#include <CGAL/Approximate_min_ellipsoid_d/Approximate_min_ellipsoid_d_impl.h>
#endif // CGAL_CGAL_APPROX_MIN_ELL_D_H
| {
"pile_set_name": "Github"
} |
<?php
/*
* Copyright (c) 2011-2015 Lp digital system
*
* This file is part of BackBee.
*
* BackBee is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* BackBee is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with BackBee. If not, see <http://www.gnu.org/licenses/>.
*
* @author Charles Rouillon <[email protected]>
*/
namespace BackBee\Security\Role;
use Symfony\Component\Security\Core\Role\Role as SymfonyRole;
/**
* @category BackBee
*
* @copyright Lp digital system
* @author c.rouillon <[email protected]>
*/
class Role extends SymfonyRole
{
}
| {
"pile_set_name": "Github"
} |
import numpy as np
import chainer
from chainer import cuda
import chainer.functions as F
from chainercv.links.model.faster_rcnn.utils.anchor_target_creator import AnchorTargetCreator
from utils.proposal_target_creator import ProposalTargetCreator
from chainer import computational_graph as c
from chainercv.links import PixelwiseSoftmaxClassifier
class MaskRCNNTrainChain(chainer.Chain):
def __init__(self, mask_rcnn, rpn_sigma=3., roi_sigma=1., gamma=1,
anchor_target_creator=AnchorTargetCreator(),
roi_size=7):
super(MaskRCNNTrainChain, self).__init__()
with self.init_scope():
self.mask_rcnn = mask_rcnn
self.rpn_sigma = rpn_sigma
self.roi_sigma = roi_sigma
self.anchor_target_creator = anchor_target_creator
self.proposal_target_creator = ProposalTargetCreator(roi_size=roi_size)
self.loc_normalize_mean = mask_rcnn.loc_normalize_mean
self.loc_normalize_std = mask_rcnn.loc_normalize_std
self.decayrate=0.99
self.avg_loss = None
self.gamma=gamma
def __call__(self, imgs, bboxes, labels, scale, masks):
if isinstance(bboxes, chainer.Variable):
bboxes = bboxes.data
if isinstance(labels, chainer.Variable):
labels = labels.data
if isinstance(scale, chainer.Variable):
scale = scale.data
if isinstance(masks, chainer.Variable):
masks = masks.data
scale = np.asscalar(cuda.to_cpu(scale[0]))
n = bboxes.shape[0]
#if n != 1:
# raise ValueError('only batch size 1 is supported')
_, _, H, W = imgs.shape
img_size = (H, W)
#Extractor (VGG) : img -> features
features = self.mask_rcnn.extractor(imgs)
#Region Proposal Network : features -> rpn_locs, rpn_scores, rois
rpn_loc_loss,rpn_cls_loss, roi_loc_loss, roi_cls_loss, mask_loss= 0,0,0,0,0
for i in range(n):
rpn_locs, rpn_scores, rois, roi_indices, anchor = self.mask_rcnn.rpn(
features[i:i+1], img_size, scale)
bbox, label, mask, rpn_score, rpn_loc, roi = \
bboxes[i], labels[i], masks[i], rpn_scores[0], rpn_locs[0], rois
mask[mask>1]=0
numdata = sum(label>=0)
label = label[0:numdata]
bbox = bbox[0:numdata]
mask = mask[0:numdata]
#proposal target : roi(proposed) , bbox(GT), label(GT) -> sample_roi, gt_roi_loc, gt_roi_label
#the targets are compared with the head output.
sample_roi, gt_roi_loc, gt_roi_label, gt_roi_mask = self.proposal_target_creator(
roi, bbox, label, mask, self.loc_normalize_mean, self.loc_normalize_std)
sample_roi_index = self.xp.zeros((len(sample_roi),), dtype=np.int32)
#Head Network : features, sample_roi -> roi_cls_loc, roi_score
roi_cls_loc, roi_score, roi_cls_mask = self.mask_rcnn.head(
features[i:i+1], sample_roi, sample_roi_index)
#RPN losses
gt_rpn_loc, gt_rpn_label = self.anchor_target_creator(bbox, anchor, img_size)
rpn_loc_loss += _fast_rcnn_loc_loss(rpn_loc, gt_rpn_loc, gt_rpn_label, self.rpn_sigma)
rpn_cls_loss += F.softmax_cross_entropy(rpn_score, gt_rpn_label)
#Head output losses
n_sample = roi_cls_loc.shape[0]
roi_cls_loc = roi_cls_loc.reshape((n_sample, -1, 4))
roi_loc = roi_cls_loc[self.xp.arange(n_sample), gt_roi_label]
roi_mask = roi_cls_mask[self.xp.arange(n_sample), gt_roi_label]
roi_loc_loss += _fast_rcnn_loc_loss(roi_loc, gt_roi_loc, gt_roi_label, self.roi_sigma)
roi_cls_loss += F.softmax_cross_entropy(roi_score, gt_roi_label)
#mask loss: average binary cross-entropy loss
mask_loss += F.sigmoid_cross_entropy(roi_mask[0:gt_roi_mask.shape[0]], gt_roi_mask)
#total loss
loss = rpn_loc_loss + rpn_cls_loss + roi_loc_loss + roi_cls_loss + self.gamma * mask_loss
loss /= n
#avg loss calculation
if self.avg_loss is None:
self.avg_loss = loss.data
else:
self.avg_loss = self.avg_loss * self.decayrate + loss.data*(1-self.decayrate)
chainer.reporter.report({'rpn_loc_loss':rpn_loc_loss/n,
'rpn_cls_loss':rpn_cls_loss/n,
'roi_loc_loss':roi_loc_loss/n,
'roi_cls_loss':roi_cls_loss/n,
'roi_mask_loss':self.gamma * mask_loss/n,
'avg_loss':self.avg_loss,
'loss':loss}, self)
return loss
def _smooth_l1_loss(x, t, in_weight, sigma):
sigma2 = sigma ** 2
diff = in_weight * (x - t)
abs_diff = F.absolute(diff)
flag = (abs_diff.data < (1. / sigma2)).astype(np.float32)
y = (flag * (sigma2 / 2.) * F.square(diff) +
(1 - flag) * (abs_diff - 0.5 / sigma2))
return F.sum(y)
def _fast_rcnn_loc_loss(pred_loc, gt_loc, gt_label, sigma):
xp = chainer.cuda.get_array_module(pred_loc)
in_weight = xp.zeros_like(gt_loc)
in_weight[gt_label > 0] = 1
loc_loss = _smooth_l1_loss(pred_loc, gt_loc, in_weight, sigma)
loc_loss /= xp.sum(gt_label >= 0)
return loc_loss
| {
"pile_set_name": "Github"
} |
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'target_defaults': {
'default_configuration': 'Baz',
'configurations': {
'Baz': {
'msvs_configuration_attributes': {
'OutputDirectory': '<(DEPTH)/foo',
'IntermediateDirectory': '$(OutDir)/bar',
},
},
},
},
}
| {
"pile_set_name": "Github"
} |
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>netcoreapp2.1</TargetFramework>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
<DocumentationFile>bin\Debug\netcoreapp2.1\SecurityDemo.Authentication.JWT.xml</DocumentationFile>
</PropertyGroup>
<ItemGroup>
<Folder Include="wwwroot\" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.App" />
<PackageReference Include="Microsoft.VisualStudio.Web.CodeGeneration.Design" Version="2.1.1" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="4.0.1" />
</ItemGroup>
</Project>
| {
"pile_set_name": "Github"
} |
---
title: Reference documentation
notoc: true
---
This section includes the reference documentation for the Docker platform's
various APIs, CLIs, and file formats.
## File formats
| File format | Description |
|:--------------------------------------------------------------------|:----------------------------------------------------------------|
| [Dockerfile](/engine/reference/builder/) | Defines the contents and startup behavior of a single container |
| [Compose file](/compose/compose-file/) | Defines a multi-container application |
## Command-line interfaces (CLIs)
| CLI | Description |
|:--------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------|
| [Docker CLI](/engine/reference/commandline/cli/) | The main CLI for Docker, includes all `docker` commands |
| [Compose CLI](/compose/reference/overview/) | The CLI for Docker Compose, which allows you to build and run multi-container applications |
| [Daemon CLI (dockerd)](/engine/reference/commandline/dockerd/) | Persistent process that manages containers |
## Application programming interfaces (APIs)
| API | Description |
|:------------------------------------------------------|:---------------------------------------------------------------------------------------|
| [Engine API](/engine/api/) | The main API for Docker, provides programmatic access to a daemon |
| [Registry API](/registry/spec/api/) | Facilitates distribution of images to the engine |
| [Template API](app-template/api-reference)| Allows users to create new Docker applications by using a library of templates.|
## Drivers and specifications
| Driver | Description |
|:-------------------------------------------------------|:-----------------------------------------------------------------------------------|
| [Image specification](/registry/spec/manifest-v2-2/) | Describes the various components of a Docker image |
| [Registry token authentication](/registry/spec/auth/) | Outlines the Docker registry authentication scheme |
| [Registry storage drivers](/registry/storage-drivers/) | Enables support for given cloud providers when storing images with Registry | | {
"pile_set_name": "Github"
} |
<!doctype html>
<html lang="en">
<head>
<title>Code coverage report for socket.io-client/lib/url.js</title>
<meta charset="utf-8">
<link rel="stylesheet" href="../../prettify.css">
<style>
body, html {
margin:0; padding: 0;
}
body {
font-family: Helvetica Neue, Helvetica,Arial;
font-size: 10pt;
}
div.header, div.footer {
background: #eee;
padding: 1em;
}
div.header {
z-index: 100;
position: fixed;
top: 0;
border-bottom: 1px solid #666;
width: 100%;
}
div.footer {
border-top: 1px solid #666;
}
div.body {
margin-top: 10em;
}
div.meta {
font-size: 90%;
text-align: center;
}
h1, h2, h3 {
font-weight: normal;
}
h1 {
font-size: 12pt;
}
h2 {
font-size: 10pt;
}
pre {
font-family: Consolas, Menlo, Monaco, monospace;
margin: 0;
padding: 0;
line-height: 14px;
font-size: 14px;
-moz-tab-size: 2;
-o-tab-size: 2;
tab-size: 2;
}
div.path { font-size: 110%; }
div.path a:link, div.path a:visited { color: #000; }
table.coverage { border-collapse: collapse; margin:0; padding: 0 }
table.coverage td {
margin: 0;
padding: 0;
color: #111;
vertical-align: top;
}
table.coverage td.line-count {
width: 50px;
text-align: right;
padding-right: 5px;
}
table.coverage td.line-coverage {
color: #777 !important;
text-align: right;
border-left: 1px solid #666;
border-right: 1px solid #666;
}
table.coverage td.text {
}
table.coverage td span.cline-any {
display: inline-block;
padding: 0 5px;
width: 40px;
}
table.coverage td span.cline-neutral {
background: #eee;
}
table.coverage td span.cline-yes {
background: #b5d592;
color: #999;
}
table.coverage td span.cline-no {
background: #fc8c84;
}
.cstat-yes { color: #111; }
.cstat-no { background: #fc8c84; color: #111; }
.fstat-no { background: #ffc520; color: #111 !important; }
.cbranch-no { background: yellow !important; color: #111; }
.cstat-skip { background: #ddd; color: #111; }
.fstat-skip { background: #ddd; color: #111 !important; }
.cbranch-skip { background: #ddd !important; color: #111; }
.missing-if-branch {
display: inline-block;
margin-right: 10px;
position: relative;
padding: 0 4px;
background: black;
color: yellow;
}
.skip-if-branch {
display: none;
margin-right: 10px;
position: relative;
padding: 0 4px;
background: #ccc;
color: white;
}
.missing-if-branch .typ, .skip-if-branch .typ {
color: inherit !important;
}
.entity, .metric { font-weight: bold; }
.metric { display: inline-block; border: 1px solid #333; padding: 0.3em; background: white; }
.metric small { font-size: 80%; font-weight: normal; color: #666; }
div.coverage-summary table { border-collapse: collapse; margin: 3em; font-size: 110%; }
div.coverage-summary td, div.coverage-summary table th { margin: 0; padding: 0.25em 1em; border-top: 1px solid #666; border-bottom: 1px solid #666; }
div.coverage-summary th { text-align: left; border: 1px solid #666; background: #eee; font-weight: normal; }
div.coverage-summary th.file { border-right: none !important; }
div.coverage-summary th.pic { border-left: none !important; text-align: right; }
div.coverage-summary th.pct { border-right: none !important; }
div.coverage-summary th.abs { border-left: none !important; text-align: right; }
div.coverage-summary td.pct { text-align: right; border-left: 1px solid #666; }
div.coverage-summary td.abs { text-align: right; font-size: 90%; color: #444; border-right: 1px solid #666; }
div.coverage-summary td.file { text-align: right; border-left: 1px solid #666; white-space: nowrap; }
div.coverage-summary td.pic { min-width: 120px !important; }
div.coverage-summary a:link { text-decoration: none; color: #000; }
div.coverage-summary a:visited { text-decoration: none; color: #333; }
div.coverage-summary a:hover { text-decoration: underline; }
div.coverage-summary tfoot td { border-top: 1px solid #666; }
div.coverage-summary .yui3-datatable-sort-indicator, div.coverage-summary .dummy-sort-indicator {
height: 10px;
width: 7px;
display: inline-block;
margin-left: 0.5em;
}
div.coverage-summary .yui3-datatable-sort-indicator {
background: url("http://yui.yahooapis.com/3.6.0/build/datatable-sort/assets/skins/sam/sort-arrow-sprite.png") no-repeat scroll 0 0 transparent;
}
div.coverage-summary .yui3-datatable-sorted .yui3-datatable-sort-indicator {
background-position: 0 -20px;
}
div.coverage-summary .yui3-datatable-sorted-desc .yui3-datatable-sort-indicator {
background-position: 0 -10px;
}
.high { background: #b5d592 !important; }
.medium { background: #ffe87c !important; }
.low { background: #fc8c84 !important; }
span.cover-fill, span.cover-empty {
display:inline-block;
border:1px solid #444;
background: white;
height: 12px;
}
span.cover-fill {
background: #ccc;
border-right: 1px solid #444;
}
span.cover-empty {
background: white;
border-left: none;
}
span.cover-full {
border-right: none !important;
}
pre.prettyprint {
border: none !important;
padding: 0 !important;
margin: 0 !important;
}
.com { color: #999 !important; }
.ignore-none { color: #999; font-weight: normal; }
</style>
</head>
<body>
<div class="header high">
<h1>Code coverage report for <span class="entity">socket.io-client/lib/url.js</span></h1>
<h2>
Statements: <span class="metric">100% <small>(25 / 25)</small></span>
Branches: <span class="metric">96.15% <small>(25 / 26)</small></span>
Functions: <span class="metric">100% <small>(1 / 1)</small></span>
Lines: <span class="metric">100% <small>(24 / 24)</small></span>
Ignored: <span class="metric"><span class="ignore-none">none</span></span>
</h2>
<div class="path"><a href="../../index.html">All files</a> » <a href="index.html">socket.io-client/lib/</a> » url.js</div>
</div>
<div class="body">
<pre><table class="coverage">
<tr><td class="line-count">1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69</td><td class="line-coverage"><span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">1</span>
<span class="cline-any cline-yes">1</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">1</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">1</span>
<span class="cline-any cline-yes">20</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">20</span>
<span class="cline-any cline-yes">20</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">8</span>
<span class="cline-any cline-yes">8</span>
<span class="cline-any cline-yes">2</span>
<span class="cline-any cline-yes">1</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">8</span>
<span class="cline-any cline-yes">3</span>
<span class="cline-any cline-yes">3</span>
<span class="cline-any cline-yes">2</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">1</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">8</span>
<span class="cline-any cline-yes">8</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">8</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">1</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">8</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">8</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">8</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">8</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span></td><td class="text"><pre class="prettyprint lang-js">
/**
* Module dependencies.
*/
var parseuri = require('parseuri');
var debug = require('debug')('socket.io-client:url');
/**
* Module exports.
*/
module.exports = url;
/**
* URL parser.
*
* @param {String} url
* @param {Object} An object meant to mimic window.location.
* Defaults to window.location.
* @api public
*/
function url(uri, loc){
var obj = uri;
// default to window.location
var loc = loc || global.location;
if (null == uri) uri = loc.protocol + '//' + loc.hostname;
// relative path support
<span class="missing-if-branch" title="else path not taken" >E</span>if ('string' == typeof uri) {
if ('/' == uri.charAt(0)) {
if ('undefined' != typeof loc) {
uri = loc.hostname + uri;
}
}
if (!/^(https?|wss?):\/\//.test(uri)) {
debug('protocol-less url %s', uri);
if ('undefined' != typeof loc) {
uri = loc.protocol + '//' + uri;
} else {
uri = 'https://' + uri;
}
}
// parse
debug('parse %s', uri);
obj = parseuri(uri);
}
// make sure we treat `localhost:80` and `localhost` equally
if ((/(http|ws)/.test(obj.protocol) && 80 == obj.port) ||
(/(http|ws)s/.test(obj.protocol) && 443 == obj.port)) {
delete obj.port;
}
obj.path = obj.path || '/';
// define unique id
obj.id = obj.protocol + obj.host + (obj.port ? (':' + obj.port) : '');
// define href
obj.href = obj.protocol + '://' + obj.host + (obj.port ? (':' + obj.port) : '');
return obj;
}
</pre></td></tr>
</table></pre>
</div>
<div class="footer">
<div class="meta">Generated by <a href="http://istanbul-js.org/" target="_blank">istanbul</a> at Sat May 31 2014 14:57:59 GMT-0700 (PDT)</div>
</div>
<script src="../../prettify.js"></script>
<script src="http://yui.yahooapis.com/3.6.0/build/yui/yui-min.js"></script>
<script>
YUI().use('datatable', function (Y) {
var formatters = {
pct: function (o) {
o.className += o.record.get('classes')[o.column.key];
try {
return o.value.toFixed(2) + '%';
} catch (ex) { return o.value + '%'; }
},
html: function (o) {
o.className += o.record.get('classes')[o.column.key];
return o.record.get(o.column.key + '_html');
}
},
defaultFormatter = function (o) {
o.className += o.record.get('classes')[o.column.key];
return o.value;
};
function getColumns(theadNode) {
var colNodes = theadNode.all('tr th'),
cols = [],
col;
colNodes.each(function (colNode) {
col = {
key: colNode.getAttribute('data-col'),
label: colNode.get('innerHTML') || ' ',
sortable: !colNode.getAttribute('data-nosort'),
className: colNode.getAttribute('class'),
type: colNode.getAttribute('data-type'),
allowHTML: colNode.getAttribute('data-html') === 'true' || colNode.getAttribute('data-fmt') === 'html'
};
col.formatter = formatters[colNode.getAttribute('data-fmt')] || defaultFormatter;
cols.push(col);
});
return cols;
}
function getRowData(trNode, cols) {
var tdNodes = trNode.all('td'),
i,
row = { classes: {} },
node,
name;
for (i = 0; i < cols.length; i += 1) {
name = cols[i].key;
node = tdNodes.item(i);
row[name] = node.getAttribute('data-value') || node.get('innerHTML');
row[name + '_html'] = node.get('innerHTML');
row.classes[name] = node.getAttribute('class');
//Y.log('Name: ' + name + '; Value: ' + row[name]);
if (cols[i].type === 'number') { row[name] = row[name] * 1; }
}
//Y.log(row);
return row;
}
function getData(tbodyNode, cols) {
var data = [];
tbodyNode.all('tr').each(function (trNode) {
data.push(getRowData(trNode, cols));
});
return data;
}
function replaceTable(node) {
if (!node) { return; }
var cols = getColumns(node.one('thead')),
data = getData(node.one('tbody'), cols),
table,
parent = node.get('parentNode');
table = new Y.DataTable({
columns: cols,
data: data,
sortBy: 'file'
});
parent.set('innerHTML', '');
table.render(parent);
}
Y.on('domready', function () {
replaceTable(Y.one('div.coverage-summary table'));
if (typeof prettyPrint === 'function') {
prettyPrint();
}
});
});
</script>
</body>
</html>
| {
"pile_set_name": "Github"
} |
#ifndef __StaticBoundarySimulator_h__
#define __StaticBoundarySimulator_h__
#include "BoundarySimulator.h"
namespace SPH
{
class SimulatorBase;
class TriangleMesh;
class StaticBoundarySimulator : public BoundarySimulator
{
protected:
SimulatorBase *m_base;
void loadObj(const std::string &filename, TriangleMesh &mesh, const Vector3r &scale);
public:
StaticBoundarySimulator(SimulatorBase *base);
virtual ~StaticBoundarySimulator();
virtual void initBoundaryData();
};
}
#endif
| {
"pile_set_name": "Github"
} |
@page lsg-adding Adding to Existing JS Docs
@parent lsg.guides 1
@group lsg-adding-group-intro 0 Intro
@group lsg-adding-group-setup 1 Setup
@group lsg-adding-group-next-steps 2 Next Steps
This guide will:
* Give a designer-friendly explanation of what DocumentJS does
* Make sure you have everything you need installed
* Help you configure DocumentJS to add a Live Style Guide
* Explain how to use [tags](http://documentjs.com/docs/documentjs.tags.html) to write your Live Style Guide
You should start elsewhere if:
* You want to [create a Live Style Guide on a project that doesn't already use DocumentJS](/docs/lsg-quickstart.html)
* You still need to set up DocumentJS for [API documentation](http://documentjs.com/docs/index.html)
* You're just [trying to kill time](https://www.youtube.com/watch?v=6EneCIPJsog)
First, [a brief disclaimer](/docs/lsg-adding-disclaimer.html) | {
"pile_set_name": "Github"
} |
module("luci.controller.verysync", package.seeall)
function index()
if not nixio.fs.access("/etc/config/verysync") then
return
end
entry({"admin", "nas", "verysync"}, cbi("verysync"), _("Verysync"), 10).dependent = true
entry({"admin","nas","verysync","status"},call("act_status")).leaf=true
end
function act_status()
local e={}
e.running=luci.sys.call("pgrep verysync >/dev/null")==0
luci.http.prepare_content("application/json")
luci.http.write_json(e)
end
| {
"pile_set_name": "Github"
} |
๏ปฟ//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by AsyncGenerator.
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using NHibernate;
using NHibernate.Engine;
namespace NHibernate.Context
{
using System.Threading.Tasks;
using System.Threading;
public partial class ThreadLocalSessionContext : ICurrentSessionContext
{
private static async Task CleanupAnyOrphanedSessionAsync(ISessionFactory factory, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
ISession orphan = DoUnbind(factory, false);
if (orphan != null)
{
log.Warn("Already session bound on call to bind(); make sure you clean up your sessions!");
try
{
try
{
var transaction = orphan.GetCurrentTransaction();
if (transaction?.IsActive == true)
await (transaction.RollbackAsync(cancellationToken)).ConfigureAwait(false);
}
catch (OperationCanceledException) { throw; }
catch (Exception ex)
{
log.Debug(ex, "Unable to rollback transaction for orphaned session");
}
orphan.Close();
}
catch (OperationCanceledException) { throw; }
catch (Exception ex)
{
log.Debug(ex, "Unable to close orphaned session");
}
}
}
public static async Task BindAsync(ISession session, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
ISessionFactory factory = session.SessionFactory;
await (CleanupAnyOrphanedSessionAsync(factory, cancellationToken)).ConfigureAwait(false);
DoBind(session, factory);
}
}
}
| {
"pile_set_name": "Github"
} |
/*
* arch/arm/mach-sa1100/include/mach/assabet.h
*
* Created 2000/06/05 by Nicolas Pitre <[email protected]>
*
* This file contains the hardware specific definitions for Assabet
* Only include this file from SA1100-specific files.
*
* 2000/05/23 John Dorsey <[email protected]>
* Definitions for Neponset added.
*/
#ifndef __ASM_ARCH_ASSABET_H
#define __ASM_ARCH_ASSABET_H
/* System Configuration Register flags */
#define ASSABET_SCR_SDRAM_LOW (1<<2) /* SDRAM size (low bit) */
#define ASSABET_SCR_SDRAM_HIGH (1<<3) /* SDRAM size (high bit) */
#define ASSABET_SCR_FLASH_LOW (1<<4) /* Flash size (low bit) */
#define ASSABET_SCR_FLASH_HIGH (1<<5) /* Flash size (high bit) */
#define ASSABET_SCR_GFX (1<<8) /* Graphics Accelerator (0 = present) */
#define ASSABET_SCR_SA1111 (1<<9) /* Neponset (0 = present) */
#define ASSABET_SCR_INIT -1
extern unsigned long SCR_value;
#ifdef CONFIG_ASSABET_NEPONSET
#define machine_has_neponset() ((SCR_value & ASSABET_SCR_SA1111) == 0)
#else
#define machine_has_neponset() (0)
#endif
/* Board Control Register */
#define ASSABET_BCR_BASE 0xf1000000
#define ASSABET_BCR (*(volatile unsigned int *)(ASSABET_BCR_BASE))
#define ASSABET_BCR_CF_PWR (1<<0) /* Compact Flash Power (1 = 3.3v, 0 = off) */
#define ASSABET_BCR_CF_RST (1<<1) /* Compact Flash Reset (1 = power up reset) */
#define ASSABET_BCR_GFX_RST (1<<1) /* Graphics Accelerator Reset (0 = hold reset) */
#define ASSABET_BCR_CODEC_RST (1<<2) /* 0 = Holds UCB1300, ADI7171, and UDA1341 in reset */
#define ASSABET_BCR_IRDA_FSEL (1<<3) /* IRDA Frequency select (0 = SIR, 1 = MIR/ FIR) */
#define ASSABET_BCR_IRDA_MD0 (1<<4) /* Range/Power select */
#define ASSABET_BCR_IRDA_MD1 (1<<5) /* Range/Power select */
#define ASSABET_BCR_STEREO_LB (1<<6) /* Stereo Loopback */
#define ASSABET_BCR_CF_BUS_OFF (1<<7) /* Compact Flash bus (0 = on, 1 = off (float)) */
#define ASSABET_BCR_AUDIO_ON (1<<8) /* Audio power on */
#define ASSABET_BCR_LIGHT_ON (1<<9) /* Backlight */
#define ASSABET_BCR_LCD_12RGB (1<<10) /* 0 = 16RGB, 1 = 12RGB */
#define ASSABET_BCR_LCD_ON (1<<11) /* LCD power on */
#define ASSABET_BCR_RS232EN (1<<12) /* RS232 transceiver enable */
#define ASSABET_BCR_LED_RED (1<<13) /* D9 (0 = on, 1 = off) */
#define ASSABET_BCR_LED_GREEN (1<<14) /* D8 (0 = on, 1 = off) */
#define ASSABET_BCR_VIB_ON (1<<15) /* Vibration motor (quiet alert) */
#define ASSABET_BCR_COM_DTR (1<<16) /* COMport Data Terminal Ready */
#define ASSABET_BCR_COM_RTS (1<<17) /* COMport Request To Send */
#define ASSABET_BCR_RAD_WU (1<<18) /* Radio wake up interrupt */
#define ASSABET_BCR_SMB_EN (1<<19) /* System management bus enable */
#define ASSABET_BCR_TV_IR_DEC (1<<20) /* TV IR Decode Enable (not implemented) */
#define ASSABET_BCR_QMUTE (1<<21) /* Quick Mute */
#define ASSABET_BCR_RAD_ON (1<<22) /* Radio Power On */
#define ASSABET_BCR_SPK_OFF (1<<23) /* 1 = Speaker amplifier power off */
#ifdef CONFIG_SA1100_ASSABET
extern void ASSABET_BCR_frob(unsigned int mask, unsigned int set);
#else
#define ASSABET_BCR_frob(x,y) do { } while (0)
#endif
#define ASSABET_BCR_set(x) ASSABET_BCR_frob((x), (x))
#define ASSABET_BCR_clear(x) ASSABET_BCR_frob((x), 0)
#define ASSABET_BSR_BASE 0xf1000000
#define ASSABET_BSR (*(volatile unsigned int*)(ASSABET_BSR_BASE))
#define ASSABET_BSR_RS232_VALID (1 << 24)
#define ASSABET_BSR_COM_DCD (1 << 25)
#define ASSABET_BSR_COM_CTS (1 << 26)
#define ASSABET_BSR_COM_DSR (1 << 27)
#define ASSABET_BSR_RAD_CTS (1 << 28)
#define ASSABET_BSR_RAD_DSR (1 << 29)
#define ASSABET_BSR_RAD_DCD (1 << 30)
#define ASSABET_BSR_RAD_RI (1 << 31)
/* GPIOs (bitmasks) for which the generic definition doesn't say much */
#define ASSABET_GPIO_RADIO_IRQ GPIO_GPIO (14) /* Radio interrupt request */
#define ASSABET_GPIO_PS_MODE_SYNC GPIO_GPIO (16) /* Power supply mode/sync */
#define ASSABET_GPIO_STEREO_64FS_CLK GPIO_GPIO (19) /* SSP UDA1341 clock input */
#define ASSABET_GPIO_GFX_IRQ GPIO_GPIO (24) /* Graphics IRQ */
#define ASSABET_GPIO_BATT_LOW GPIO_GPIO (26) /* Low battery */
#define ASSABET_GPIO_RCLK GPIO_GPIO (26) /* CCLK/2 */
/* These are gpiolib GPIO numbers, not bitmasks */
#define ASSABET_GPIO_CF_IRQ 21 /* CF IRQ */
#define ASSABET_GPIO_CF_CD 22 /* CF CD */
#define ASSABET_GPIO_CF_BVD2 24 /* CF BVD / IOSPKR */
#define ASSABET_GPIO_CF_BVD1 25 /* CF BVD / IOSTSCHG */
#endif
| {
"pile_set_name": "Github"
} |
<RCC>
<qresource prefix="/">
<file>right_arrow_disable.png</file>
<file>right_arrow.png</file>
<file>left_arrow_disable.png</file>
<file>left_arrow.png</file>
<file>popupCalendar.qml</file>
</qresource>
</RCC>
| {
"pile_set_name": "Github"
} |
### YamlMime:Landing
title: Mobile Development with C++ | Windows UWP, Android and iOS # < 60 chars
summary: Create native C++ apps for iOS, Android, and Windows devices with Visual Studio. # < 160 chars
metadata:
title: Mobile development with C++ documentation # Required; page title displayed in search results. Include the brand. < 60 chars.
description: Create native C++ apps for iOS, Android, and Windows devices with Visual Studio.
ms.topic: landing-page
author: corob-msft
ms.author: corob
ms.date: 05/26/2020
# linkListType: architecture | concept | deploy | download | get-started | how-to-guide | learn | overview | quickstart | reference | tutorial | video | whats-new
landingContent:
# Cards and links should be based on top customer tasks or top subjects
# Start card title with a verb
# Card (optional)
- title: Learn how to build cross-platform mobile apps
linkLists:
- linkListType: get-started
links:
- text: Install cross-platform mobile development with C++
url: install-visual-cpp-for-cross-platform-mobile-development.md
- linkListType: overview
links:
- text: Overview of cross-platform mobile development with C++
url: visual-cpp-for-cross-platform-mobile-development.md
- linkListType: tutorial
links:
- text: Build an OpenGL ES application on Android and iOS
url: build-an-opengl-es-application-on-android-and-ios.md
- text: Create an Android Native Activity App
url: create-an-android-native-activity-app.md
- linkListType: learn
links:
- text: Cross-platform mobile development examples
url: cross-platform-mobile-development-examples.md
# Card (optional)
- title: Cross-platform reference
linkLists:
- linkListType: reference
links:
- text: C++ cross-platform property page reference
url: cross-platform-prop-pages.md
- text: Universal Windows Platform (UWP) documentation
url: /cpp/cppcx/universal-windows-apps-cpp
- text: iOS API documentation
url: https://developer.apple.com/reference
- text: Android API documentation
url: https://developer.android.com/guide/index.html
| {
"pile_set_name": "Github"
} |
# Stubs for networkx.algorithms.assortativity (Python 3.5)
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
from networkx.algorithms.assortativity.connectivity import *
from networkx.algorithms.assortativity.correlation import *
from networkx.algorithms.assortativity.mixing import *
from networkx.algorithms.assortativity.neighbor_degree import *
from networkx.algorithms.assortativity.pairs import *
| {
"pile_set_name": "Github"
} |
# Make sure that connection_control plugin can be loaded
--source ../inc/have_connection_control_plugin.inc
# Save the initial number of concurrent sessions
--source include/count_sessions.inc
--echo #-----------------------------------------------------------------------
--echo # Setup
--echo # Install connection_control plugin
--source ../inc/install_connection_control_plugin.inc
CREATE USER no_privs@localhost IDENTIFIED BY 'abcd';
connect(conn_no_privs, localhost, no_privs, abcd,,,,);
connection default;
--echo #-----------------------------------------------------------------------
--echo # Case 1 : connection_control_failed_connections_threshold
SHOW GRANTS;
SET @saved_value = @@global.connection_control_failed_connections_threshold;
SELECT @saved_value;
SET @@global.connection_control_failed_connections_threshold = @saved_value;
--echo # 1.1 : Setting connection_control_failed_connections_threshold to valid
--echo # value
SET @@global.connection_control_failed_connections_threshold = 20;
SELECT @@global.connection_control_failed_connections_threshold;
SET @@global.connection_control_failed_connections_threshold = 2000;
SELECT @@global.connection_control_failed_connections_threshold;
SET @@global.connection_control_failed_connections_threshold = 2147483647;
SELECT @@global.connection_control_failed_connections_threshold;
SET @@global.connection_control_failed_connections_threshold = DEFAULT;
SELECT @@global.connection_control_failed_connections_threshold;
--echo # 1.2 : Setting connection_control_failed_connections_threshold to
--echo # invalid value
--error ER_WRONG_TYPE_FOR_VAR
SET @@global.connection_control_failed_connections_threshold = NULL;
SELECT @@global.connection_control_failed_connections_threshold;
--error ER_WRONG_TYPE_FOR_VAR
SET @@global.connection_control_failed_connections_threshold = `SELECT * FROM mysql.user`;
SELECT @@global.connection_control_failed_connections_threshold;
--error ER_WRONG_VALUE_FOR_VAR
SET @@global.connection_control_failed_connections_threshold = -20;
SELECT @@global.connection_control_failed_connections_threshold;
--error ER_WRONG_VALUE_FOR_VAR
SET @@global.connection_control_failed_connections_threshold = 9223372036854775808;
SELECT @@global.connection_control_failed_connections_threshold;
--error ER_WRONG_VALUE_FOR_VAR
SET @@global.connection_control_failed_connections_threshold = -9223372036854775808;
SELECT @@global.connection_control_failed_connections_threshold;
--echo # Switch to conn_no_privs
connection conn_no_privs;
--echo # 1.3 : Use no_privs@localhost to set
--echo # connection_control_failed_connections_threshold to valid value
--error ER_SPECIFIC_ACCESS_DENIED_ERROR
SET @@global.connection_control_failed_connections_threshold = 2147483647;
SELECT @@global.connection_control_failed_connections_threshold;
--error ER_SPECIFIC_ACCESS_DENIED_ERROR
SET @@global.connection_control_failed_connections_threshold = DEFAULT;
SELECT @@global.connection_control_failed_connections_threshold;
--echo # 1.4 : Use no_privs@localhost to set
--echo # connection_control_failed_connections_threshold to invalid value
--error ER_SPECIFIC_ACCESS_DENIED_ERROR
SET @@global.connection_control_failed_connections_threshold = NULL;
SELECT @@global.connection_control_failed_connections_threshold;
--error ER_SPECIFIC_ACCESS_DENIED_ERROR
SET @@global.connection_control_failed_connections_threshold = 9223372036854775808;
SELECT @@global.connection_control_failed_connections_threshold;
connection default;
SET @@global.connection_control_failed_connections_threshold = @saved_value;
SELECT @@global.connection_control_failed_connections_threshold;
--echo #-----------------------------------------------------------------------
--echo # Case 2 : connection_control_min_connection_delay
SET @saved_value= @@global.connection_control_min_connection_delay;
SELECT @saved_value;
--echo # 2.1 : Setting connection_control_min_connection_delay to valid
--echo # value
SET @@global.connection_control_min_connection_delay = 20000;
SELECT @@global.connection_control_min_connection_delay;
SET @@global.connection_control_min_connection_delay = 2000;
SELECT @@global.connection_control_min_connection_delay;
SET @@global.connection_control_min_connection_delay = 2147483647;
SELECT @@global.connection_control_min_connection_delay;
SET @@global.connection_control_min_connection_delay = DEFAULT;
SELECT @@global.connection_control_min_connection_delay;
--echo # 2.2 : Setting connection_control_min_connection_delay to
--echo # invalid value
--error ER_WRONG_TYPE_FOR_VAR
SET @@global.connection_control_min_connection_delay = NULL;
SELECT @@global.connection_control_min_connection_delay;
--error ER_WRONG_TYPE_FOR_VAR
SET @@global.connection_control_min_connection_delay = `SELECT * FROM mysql.user`;
SELECT @@global.connection_control_min_connection_delay;
--error ER_WRONG_VALUE_FOR_VAR
SET @@global.connection_control_min_connection_delay = -20;
SELECT @@global.connection_control_min_connection_delay;
--error ER_WRONG_VALUE_FOR_VAR
SET @@global.connection_control_min_connection_delay = 9223372036854775808;
SELECT @@global.connection_control_min_connection_delay;
--error ER_WRONG_VALUE_FOR_VAR
SET @@global.connection_control_min_connection_delay = -9223372036854775808;
SELECT @@global.connection_control_min_connection_delay;
--error ER_WRONG_VALUE_FOR_VAR
SET@@global.connection_control_min_connection_delay = 20;
SELECT @@global.connection_control_min_connection_delay;
--echo # Switch to conn_no_privs
connection conn_no_privs;
--echo # 2.3 : Use no_privs@localhost to set
--echo # connection_control_min_connection_delay to valid value
--error ER_SPECIFIC_ACCESS_DENIED_ERROR
SET @@global.connection_control_min_connection_delay = 2147483647;
SELECT @@global.connection_control_min_connection_delay;
--error ER_SPECIFIC_ACCESS_DENIED_ERROR
SET @@global.connection_control_min_connection_delay = DEFAULT;
SELECT @@global.connection_control_min_connection_delay;
--echo # 2.4 : Use no_privs@localhost to set
--echo # connection_control_min_connection_delay to invalid value
--error ER_SPECIFIC_ACCESS_DENIED_ERROR
SET @@global.connection_control_min_connection_delay = NULL;
SELECT @@global.connection_control_min_connection_delay;
--error ER_SPECIFIC_ACCESS_DENIED_ERROR
SET @@global.connection_control_min_connection_delay = 9223372036854775808;
SELECT @@global.connection_control_min_connection_delay;
--echo # Switch to default connection
connection default;
--echo # 2.5 : Setting connection_control_min_connection_delay to a value
--echo # greater than connection_control_max_connection_delay
SET @saved_max_delay= @@global.connection_control_max_connection_delay;
SET @@global.connection_control_max_connection_delay= 10000;
--error ER_WRONG_VALUE_FOR_VAR
SET @@global.connection_control_min_connection_delay= 11000;
SELECT @@global.connection_control_min_connection_delay;
SET @@global.connection_control_max_connection_delay= @saved_max_delay;
SET @@global.connection_control_min_connection_delay = @saved_value;
SELECT @@global.connection_control_min_connection_delay;
--echo #-----------------------------------------------------------------------
--echo # Case 3 : connection_control_max_connection_delay
SET @saved_value= @@global.connection_control_max_connection_delay;
SELECT @saved_value;
--echo # 3.1 : Setting connection_control_max_connection_delay to valid
--echo # value
SET @@global.connection_control_max_connection_delay = 20000;
SELECT @@global.connection_control_max_connection_delay;
SET @@global.connection_control_max_connection_delay = 2000;
SELECT @@global.connection_control_max_connection_delay;
SET @@global.connection_control_max_connection_delay = 2147483647;
SELECT @@global.connection_control_max_connection_delay;
SET @@global.connection_control_max_connection_delay = DEFAULT;
SELECT @@global.connection_control_max_connection_delay;
--echo # 3.2 : Setting connection_control_max_connection_delay to
--echo # invalid value
--error ER_WRONG_TYPE_FOR_VAR
SET @@global.connection_control_max_connection_delay = NULL;
SELECT @@global.connection_control_max_connection_delay;
--error ER_WRONG_TYPE_FOR_VAR
SET @@global.connection_control_max_connection_delay = `SELECT * FROM mysql.user`;
SELECT @@global.connection_control_max_connection_delay;
--error ER_WRONG_VALUE_FOR_VAR
SET @@global.connection_control_max_connection_delay = -20;
SELECT @@global.connection_control_max_connection_delay;
--error ER_WRONG_VALUE_FOR_VAR
SET @@global.connection_control_max_connection_delay = 9223372036854775808;
SELECT @@global.connection_control_max_connection_delay;
--error ER_WRONG_VALUE_FOR_VAR
SET @@global.connection_control_max_connection_delay = -9223372036854775808;
SELECT @@global.connection_control_max_connection_delay;
--error ER_WRONG_VALUE_FOR_VAR
SET @@global.connection_control_max_connection_delay = 20;
SELECT @@global.connection_control_max_connection_delay;
--echo # Switch to conn_no_privs
connection conn_no_privs;
--echo # 3.3 : Use no_privs@localhost to set
--echo # connection_control_max_connection_delay to valid value
--error ER_SPECIFIC_ACCESS_DENIED_ERROR
SET @@global.connection_control_max_connection_delay = 2147483647;
SELECT @@global.connection_control_max_connection_delay;
--error ER_SPECIFIC_ACCESS_DENIED_ERROR
SET @@global.connection_control_max_connection_delay = DEFAULT;
SELECT @@global.connection_control_max_connection_delay;
--echo # 3.4 : Use no_privs@localhost to set
--echo # connection_control_max_connection_delay to invalid value
--error ER_SPECIFIC_ACCESS_DENIED_ERROR
SET @@global.connection_control_max_connection_delay = NULL;
SELECT @@global.connection_control_max_connection_delay;
--error ER_SPECIFIC_ACCESS_DENIED_ERROR
SET @@global.connection_control_max_connection_delay = 9223372036854775808;
SELECT @@global.connection_control_max_connection_delay;
--echo # Switch to default connection
connection default;
--echo # 3.5 : Setting connection_control_min_connection_delay to a value
--echo # greater than connection_control_max_connection_delay
SET @saved_min_delay= @@global.connection_control_min_connection_delay;
SET @@global.connection_control_min_connection_delay= 11000;
--error ER_WRONG_VALUE_FOR_VAR
SET @@global.connection_control_max_connection_delay= 10000;
SELECT @@global.connection_control_max_connection_delay;
SET @@global.connection_control_min_connection_delay= @saved_min_delay;
SET @@global.connection_control_max_connection_delay = @saved_value;
SELECT @@global.connection_control_max_connection_delay;
--echo #-----------------------------------------------------------------------
--echo # Cleanup
disconnect conn_no_privs;
DROP USER no_privs@localhost;
--echo # Uninstall connection_control plugin
--source ../inc/uninstall_connection_control_plugin.inc
# Wait till all disconnects are completed.
--source include/wait_until_count_sessions.inc
--echo #-----------------------------------------------------------------------
| {
"pile_set_name": "Github"
} |
/***********************************************************************
* *
* This software is part of the ast package *
* Copyright (c) 1989-2011 AT&T Intellectual Property *
* and is licensed under the *
* Eclipse Public License, Version 1.0 *
* by AT&T Intellectual Property *
* *
* A copy of the License is available at *
* http://www.eclipse.org/org/documents/epl-v10.html *
* (with md5 checksum b35adb5213ca9657e911e9befb180842) *
* *
* Information and Software Systems Research *
* AT&T Research *
* Florham Park NJ *
* *
* Glenn Fowler <[email protected]> *
* *
***********************************************************************/
#pragma prototyped
/*
* Glenn Fowler
* AT&T Research
*
* expression library
*/
#include <exlib.h>
/*
* 0 terminate string and optionally vmstrdup() return value
*/
char*
exstash(Sfio_t* sp, Vmalloc_t* vp)
{
char* s;
return ((s = sfstruse(sp)) && (!vp || (s = vmstrdup(vp, s)))) ? s : exnospace();
}
| {
"pile_set_name": "Github"
} |
package meli
import (
"archive/tar"
"bufio"
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
"sync"
"github.com/docker/docker/api/types"
"github.com/pkg/errors"
)
// PullDockerImage pulls a docker from a registry via docker daemon
func PullDockerImage(ctx context.Context, cli APIclient, dc *DockerContainer) error {
imageName := dc.ComposeService.Image
result, _ := AuthInfo.Load("dockerhub")
if strings.Contains(imageName, "quay") {
result, _ = AuthInfo.Load("quay")
}
GetRegistryAuth := result.(map[string]string)["RegistryAuth"]
imagePullResp, err := cli.ImagePull(
ctx,
imageName,
types.ImagePullOptions{RegistryAuth: GetRegistryAuth})
if err != nil {
return errors.Wrapf(err, "unable to pull image %v", imageName)
}
var imgProg imageProgress
scanner := bufio.NewScanner(imagePullResp)
for scanner.Scan() {
_ = json.Unmarshal(scanner.Bytes(), &imgProg)
fmt.Fprintln(dc.LogMedium, dc.ServiceName, "::", imgProg.Status, imgProg.Progress)
}
if err := scanner.Err(); err != nil {
fmt.Println(" :unable to log output for image", imageName, err)
}
imagePullResp.Close()
return nil
}
func walkFnClosure(src string, tw *tar.Writer, buf *bytes.Buffer) filepath.WalkFunc {
return func(path string, info os.FileInfo, err error) error {
if err != nil {
// todo: maybe we should return nil
return err
}
tarHeader, err := tar.FileInfoHeader(info, info.Name())
if err != nil {
return err
}
// update the name to correctly reflect the desired destination when untaring
// https://medium.com/@skdomino/taring-untaring-files-in-go-6b07cf56bc07
tarHeader.Name = strings.TrimPrefix(strings.Replace(path, src, "", -1), string(filepath.Separator))
if src == "." {
// see: issues/74
tarHeader.Name = strings.TrimPrefix(path, string(filepath.Separator))
}
err = tw.WriteHeader(tarHeader)
if err != nil {
return err
}
// return on directories since there will be no content to tar
if info.Mode().IsDir() {
return nil
}
// return on non-regular files since there will be no content to tar
if !info.Mode().IsRegular() {
// non regular files are like symlinks etc; https://golang.org/src/os/types.go?h=ModeSymlink#L49
return nil
}
// open files for taring
f, err := os.Open(path)
if err != nil {
return err
}
defer f.Close()
tr := io.TeeReader(f, tw)
_, err = poolReadFrom(tr)
if err != nil {
return err
}
return nil
}
}
// this is taken from io.util
var blackHolePool = sync.Pool{
New: func() interface{} {
// TODO: change this size accordingly
// we could find the size of the file we want to tar
// then pass that in as the size. That way we will
// always create a right sized slice and not have to incure cost of slice regrowth(if any)
b := make([]byte, 512)
return &b
},
}
// this is taken from io.util
func poolReadFrom(r io.Reader) (n int64, err error) {
bufp := blackHolePool.Get().(*[]byte)
// reset the buffer since it may contain data from a previous round
// see issues/118
for i := range *bufp {
(*bufp)[i] = 0
}
readSize := 0
for {
readSize, err = r.Read(*bufp)
n += int64(readSize)
if err != nil {
blackHolePool.Put(bufp)
if err == io.EOF {
return n, nil
}
return
}
}
}
// BuildDockerImage builds a docker image via docker daemon
func BuildDockerImage(ctx context.Context, cli APIclient, dc *DockerContainer) (string, error) {
// TODO: I dont like the way we are handling paths here.
// look at dirWithComposeFile in container.go
dockerFile := dc.ComposeService.Build.Dockerfile
if dockerFile == "" {
dockerFile = "Dockerfile"
}
dirWithComposeFile := filepath.Dir(dc.DockerComposeFile)
dirWithComposeFileAbs, err := filepath.Abs(dirWithComposeFile)
if err != nil {
return "", errors.Wrapf(err, "unable to get absolute path of %v", dirWithComposeFile)
}
userContext := filepath.Dir(dc.ComposeService.Build.Context + "/")
userContextAbs := filepath.Join(dirWithComposeFileAbs, userContext)
if filepath.IsAbs(userContext) {
// For user Contexts that are absolute paths,
// do NOT join them with anything. They should be used as is.
userContextAbs = userContext
}
if userContextAbs == "/" {
// ie: dc.ComposeService.Build.Context=="" because user didn't provide any
userContextAbs = dirWithComposeFile
}
dockerFilePath, err := filepath.Abs(
filepath.Join(userContextAbs, dockerFile))
if err != nil {
return "", errors.Wrapf(err, "unable to get path to Dockerfile %v", dockerFile)
}
dockerFileReader, err := os.Open(dockerFilePath)
if err != nil {
return "", errors.Wrapf(err, "unable to open Dockerfile %v", dockerFilePath)
}
readDockerFile, err := ioutil.ReadAll(dockerFileReader)
if err != nil {
return "", errors.Wrapf(err, "unable to read dockerfile %v", dockerFile)
}
imageName := "meli_" + strings.ToLower(dc.ServiceName)
splitDockerfile := strings.Split(string(readDockerFile), " ")
splitImageName := strings.Split(splitDockerfile[1], "\n")
imgFromDockerfile := splitImageName[0]
result, _ := AuthInfo.Load("dockerhub")
if strings.Contains(imgFromDockerfile, "quay") {
result, _ = AuthInfo.Load("quay")
}
authInfo := result.(map[string]string)
registryURL := authInfo["registryURL"]
username := authInfo["username"]
password := authInfo["password"]
AuthConfigs := make(map[string]types.AuthConfig)
AuthConfigs[registryURL] = types.AuthConfig{Username: username, Password: password}
buf := new(bytes.Buffer)
tw := tar.NewWriter(buf)
defer tw.Close()
/*
Context is either a path to a directory containing a Dockerfile, or a url to a git repository.
When the value supplied is a relative path, it is interpreted as relative to the location of the Compose file.
This directory is also the build context that is sent to the Docker daemon.
- https://docs.docker.com/compose/compose-file/#context
*/
UserProvidedContextPath := filepath.Dir(userContextAbs + "/")
err = filepath.Walk(UserProvidedContextPath, walkFnClosure(UserProvidedContextPath, tw, buf))
if err != nil {
return "", errors.Wrapf(err, "unable to walk user provided context path %v", UserProvidedContextPath)
}
dockerFileTarReader := bytes.NewReader(buf.Bytes())
imageBuildResponse, err := cli.ImageBuild(
ctx,
dockerFileTarReader,
types.ImageBuildOptions{
//PullParent: true,
//Squash: true, currently only supported in experimenta mode
Tags: []string{imageName},
Remove: true, //remove intermediary containers after build
NoCache: dc.Rebuild,
SuppressOutput: false,
Dockerfile: dockerFile,
Context: dockerFileTarReader,
AuthConfigs: AuthConfigs})
if err != nil {
return "", errors.Wrapf(err, "unable to build docker image %v for service %v", imageName, dc.ServiceName)
}
var imgProg imageProgress
scanner := bufio.NewScanner(imageBuildResponse.Body)
for scanner.Scan() {
_ = json.Unmarshal(scanner.Bytes(), &imgProg)
fmt.Fprint(
dc.LogMedium,
dc.ServiceName,
"::",
imgProg.Status,
imgProg.Progress,
imgProg.Stream)
}
if err := scanner.Err(); err != nil {
fmt.Println(" :unable to log output for image", imageName, err)
}
imageBuildResponse.Body.Close()
return imageName, nil
}
| {
"pile_set_name": "Github"
} |
// Type definitions for Angular JS 1.4+
// Project: http://angularjs.org
// Definitions by: Diego Vilar <http://github.com/diegovilar>
// Definitions: https://github.com/borisyankov/DefinitelyTyped
/// <reference path="../jquery/jquery.d.ts" />
declare var angular: angular.IAngularStatic;
// Support for painless dependency injection
interface Function {
$inject?: string[];
}
// Collapse angular into ng
import ng = angular;
// Support AMD require
declare module 'angular' {
export = angular;
}
///////////////////////////////////////////////////////////////////////////////
// ng module (angular.js)
///////////////////////////////////////////////////////////////////////////////
declare module angular {
// not directly implemented, but ensures that constructed class implements $get
interface IServiceProviderClass {
new (...args: any[]): IServiceProvider;
}
interface IServiceProviderFactory {
(...args: any[]): IServiceProvider;
}
// All service providers extend this interface
interface IServiceProvider {
$get: any;
}
interface IAngularBootstrapConfig {
strictDi?: boolean;
debugInfoEnabled?: boolean;
}
///////////////////////////////////////////////////////////////////////////
// AngularStatic
// see http://docs.angularjs.org/api
///////////////////////////////////////////////////////////////////////////
interface IAngularStatic {
bind(context: any, fn: Function, ...args: any[]): Function;
/**
* Use this function to manually start up angular application.
*
* @param element DOM element which is the root of angular application.
* @param modules An array of modules to load into the application.
* Each item in the array should be the name of a predefined module or a (DI annotated)
* function that will be invoked by the injector as a config block.
* @param config an object for defining configuration options for the application. The following keys are supported:
* - `strictDi`: disable automatic function annotation for the application. This is meant to assist in finding bugs which break minified code.
*/
bootstrap(element: string|Element|JQuery|Document, modules?: (string|Function|any[])[], config?: IAngularBootstrapConfig): auto.IInjectorService;
/**
* Creates a deep copy of source, which should be an object or an array.
*
* - If no destination is supplied, a copy of the object or array is created.
* - If a destination is provided, all of its elements (for array) or properties (for objects) are deleted and then all elements/properties from the source are copied to it.
* - If source is not an object or array (inc. null and undefined), source is returned.
* - If source is identical to 'destination' an exception will be thrown.
*
* @param source The source that will be used to make a copy. Can be any type, including primitives, null, and undefined.
* @param destination Destination into which the source is copied. If provided, must be of the same type as source.
*/
copy<T>(source: T, destination?: T): T;
/**
* Wraps a raw DOM element or HTML string as a jQuery element.
*
* If jQuery is available, angular.element is an alias for the jQuery function. If jQuery is not available, angular.element delegates to Angular's built-in subset of jQuery, called "jQuery lite" or "jqLite."
*/
element: IAugmentedJQueryStatic;
equals(value1: any, value2: any): boolean;
extend(destination: any, ...sources: any[]): any;
/**
* Invokes the iterator function once for each item in obj collection, which can be either an object or an array. The iterator function is invoked with iterator(value, key), where value is the value of an object property or an array element and key is the object property key or array element index. Specifying a context for the function is optional.
*
* It is worth noting that .forEach does not iterate over inherited properties because it filters using the hasOwnProperty method.
*
* @param obj Object to iterate over.
* @param iterator Iterator function.
* @param context Object to become context (this) for the iterator function.
*/
forEach<T>(obj: T[], iterator: (value: T, key: number) => any, context?: any): any;
/**
* Invokes the iterator function once for each item in obj collection, which can be either an object or an array. The iterator function is invoked with iterator(value, key), where value is the value of an object property or an array element and key is the object property key or array element index. Specifying a context for the function is optional.
*
* It is worth noting that .forEach does not iterate over inherited properties because it filters using the hasOwnProperty method.
*
* @param obj Object to iterate over.
* @param iterator Iterator function.
* @param context Object to become context (this) for the iterator function.
*/
forEach<T>(obj: { [index: string]: T; }, iterator: (value: T, key: string) => any, context?: any): any;
/**
* Invokes the iterator function once for each item in obj collection, which can be either an object or an array. The iterator function is invoked with iterator(value, key), where value is the value of an object property or an array element and key is the object property key or array element index. Specifying a context for the function is optional.
*
* It is worth noting that .forEach does not iterate over inherited properties because it filters using the hasOwnProperty method.
*
* @param obj Object to iterate over.
* @param iterator Iterator function.
* @param context Object to become context (this) for the iterator function.
*/
forEach(obj: any, iterator: (value: any, key: any) => any, context?: any): any;
fromJson(json: string): any;
identity<T>(arg?: T): T;
injector(modules?: any[], strictDi?: boolean): auto.IInjectorService;
isArray(value: any): boolean;
isDate(value: any): boolean;
isDefined(value: any): boolean;
isElement(value: any): boolean;
isFunction(value: any): boolean;
isNumber(value: any): boolean;
isObject(value: any): boolean;
isString(value: any): boolean;
isUndefined(value: any): boolean;
lowercase(str: string): string;
/**
* Deeply extends the destination object dst by copying own enumerable properties from the src object(s) to dst. You can specify multiple src objects. If you want to preserve original objects, you can do so by passing an empty object as the target: var object = angular.merge({}, object1, object2).
*
* Unlike extend(), merge() recursively descends into object properties of source objects, performing a deep copy.
*
* @param dst Destination object.
* @param src Source object(s).
*/
merge(dst: any, ...src: any[]): any;
/**
* The angular.module is a global place for creating, registering and retrieving Angular modules. All modules (angular core or 3rd party) that should be available to an application must be registered using this mechanism.
*
* When passed two or more arguments, a new module is created. If passed only one argument, an existing module (the name passed as the first argument to module) is retrieved.
*
* @param name The name of the module to create or retrieve.
* @param requires The names of modules this module depends on. If specified then new module is being created. If unspecified then the module is being retrieved for further configuration.
* @param configFn Optional configuration function for the module.
*/
module(
name: string,
requires?: string[],
configFn?: Function): IModule;
noop(...args: any[]): void;
reloadWithDebugInfo(): void;
toJson(obj: any, pretty?: boolean): string;
uppercase(str: string): string;
version: {
full: string;
major: number;
minor: number;
dot: number;
codeName: string;
};
/**
* If window.name contains prefix NG_DEFER_BOOTSTRAP! when angular.bootstrap is called, the bootstrap process will be paused until angular.resumeBootstrap() is called.
* @param extraModules An optional array of modules that should be added to the original list of modules that the app was about to be bootstrapped with.
*/
resumeBootstrap?(extraModules?: string[]): ng.auto.IInjectorService;
}
///////////////////////////////////////////////////////////////////////////
// Module
// see http://docs.angularjs.org/api/angular.Module
///////////////////////////////////////////////////////////////////////////
interface IModule {
animation(name: string, animationFactory: Function): IModule;
animation(name: string, inlineAnnotatedFunction: any[]): IModule;
animation(object: Object): IModule;
/**
* Use this method to register a component.
*
* @param name The name of the component.
* @param options A definition object passed into the component.
*/
component(name: string, options: IComponentOptions): IModule;
/**
* Use this method to register work which needs to be performed on module loading.
*
* @param configFn Execute this function on module load. Useful for service configuration.
*/
config(configFn: Function): IModule;
/**
* Use this method to register work which needs to be performed on module loading.
*
* @param inlineAnnotatedFunction Execute this function on module load. Useful for service configuration.
*/
config(inlineAnnotatedFunction: any[]): IModule;
config(object: Object): IModule;
/**
* Register a constant service, such as a string, a number, an array, an object or a function, with the $injector. Unlike value it can be injected into a module configuration function (see config) and it cannot be overridden by an Angular decorator.
*
* @param name The name of the constant.
* @param value The constant value.
*/
constant(name: string, value: any): IModule;
constant(object: Object): IModule;
/**
* The $controller service is used by Angular to create new controllers.
*
* This provider allows controller registration via the register method.
*
* @param name Controller name, or an object map of controllers where the keys are the names and the values are the constructors.
* @param controllerConstructor Controller constructor fn (optionally decorated with DI annotations in the array notation).
*/
controller(name: string, controllerConstructor: Function): IModule;
/**
* The $controller service is used by Angular to create new controllers.
*
* This provider allows controller registration via the register method.
*
* @param name Controller name, or an object map of controllers where the keys are the names and the values are the constructors.
* @param controllerConstructor Controller constructor fn (optionally decorated with DI annotations in the array notation).
*/
controller(name: string, inlineAnnotatedConstructor: any[]): IModule;
controller(object: Object): IModule;
/**
* Register a new directive with the compiler.
*
* @param name Name of the directive in camel-case (i.e. ngBind which will match as ng-bind)
* @param directiveFactory An injectable directive factory function.
*/
directive(name: string, directiveFactory: IDirectiveFactory): IModule;
/**
* Register a new directive with the compiler.
*
* @param name Name of the directive in camel-case (i.e. ngBind which will match as ng-bind)
* @param directiveFactory An injectable directive factory function.
*/
directive(name: string, inlineAnnotatedFunction: any[]): IModule;
directive(object: Object): IModule;
/**
* Register a service factory, which will be called to return the service instance. This is short for registering a service where its provider consists of only a $get property, which is the given service factory function. You should use $provide.factory(getFn) if you do not need to configure your service in a provider.
*
* @param name The name of the instance.
* @param $getFn The $getFn for the instance creation. Internally this is a short hand for $provide.provider(name, {$get: $getFn}).
*/
factory(name: string, $getFn: Function): IModule;
/**
* Register a service factory, which will be called to return the service instance. This is short for registering a service where its provider consists of only a $get property, which is the given service factory function. You should use $provide.factory(getFn) if you do not need to configure your service in a provider.
*
* @param name The name of the instance.
* @param inlineAnnotatedFunction The $getFn for the instance creation. Internally this is a short hand for $provide.provider(name, {$get: $getFn}).
*/
factory(name: string, inlineAnnotatedFunction: any[]): IModule;
factory(object: Object): IModule;
filter(name: string, filterFactoryFunction: Function): IModule;
filter(name: string, inlineAnnotatedFunction: any[]): IModule;
filter(object: Object): IModule;
provider(name: string, serviceProviderFactory: IServiceProviderFactory): IModule;
provider(name: string, serviceProviderConstructor: IServiceProviderClass): IModule;
provider(name: string, inlineAnnotatedConstructor: any[]): IModule;
provider(name: string, providerObject: IServiceProvider): IModule;
provider(object: Object): IModule;
/**
* Run blocks are the closest thing in Angular to the main method. A run block is the code which needs to run to kickstart the application. It is executed after all of the service have been configured and the injector has been created. Run blocks typically contain code which is hard to unit-test, and for this reason should be declared in isolated modules, so that they can be ignored in the unit-tests.
*/
run(initializationFunction: Function): IModule;
/**
* Run blocks are the closest thing in Angular to the main method. A run block is the code which needs to run to kickstart the application. It is executed after all of the service have been configured and the injector has been created. Run blocks typically contain code which is hard to unit-test, and for this reason should be declared in isolated modules, so that they can be ignored in the unit-tests.
*/
run(inlineAnnotatedFunction: any[]): IModule;
/**
* Register a service constructor, which will be invoked with new to create the service instance. This is short for registering a service where its provider's $get property is a factory function that returns an instance instantiated by the injector from the service constructor function.
*
* @param name The name of the instance.
* @param serviceConstructor An injectable class (constructor function) that will be instantiated.
*/
service(name: string, serviceConstructor: Function): IModule;
/**
* Register a service constructor, which will be invoked with new to create the service instance. This is short for registering a service where its provider's $get property is a factory function that returns an instance instantiated by the injector from the service constructor function.
*
* @param name The name of the instance.
* @param inlineAnnotatedConstructor An injectable class (constructor function) that will be instantiated.
*/
service(name: string, inlineAnnotatedConstructor: any[]): IModule;
service(object: Object): IModule;
/**
* Register a value service with the $injector, such as a string, a number, an array, an object or a function. This is short for registering a service where its provider's $get property is a factory function that takes no arguments and returns the value service.
Value services are similar to constant services, except that they cannot be injected into a module configuration function (see config) but they can be overridden by an Angular decorator.
*
* @param name The name of the instance.
* @param value The value.
*/
value(name: string, value: any): IModule;
value(object: Object): IModule;
/**
* Register a service decorator with the $injector. A service decorator intercepts the creation of a service, allowing it to override or modify the behaviour of the service. The object returned by the decorator may be the original service, or a new service object which replaces or wraps and delegates to the original service.
* @param name The name of the service to decorate
* @param decorator This function will be invoked when the service needs to be instantiated and should return the decorated service instance. The function is called using the injector.invoke method and is therefore fully injectable. Local injection arguments: $delegate - The original service instance, which can be monkey patched, configured, decorated or delegated to.
*/
decorator(name:string, decoratorConstructor: Function): IModule;
decorator(name:string, inlineAnnotatedConstructor: any[]): IModule;
// Properties
name: string;
requires: string[];
}
///////////////////////////////////////////////////////////////////////////
// Attributes
// see http://docs.angularjs.org/api/ng.$compile.directive.Attributes
///////////////////////////////////////////////////////////////////////////
interface IAttributes {
/**
* this is necessary to be able to access the scoped attributes. it's not very elegant
* because you have to use attrs['foo'] instead of attrs.foo but I don't know of a better way
* this should really be limited to return string but it creates this problem: http://stackoverflow.com/q/17201854/165656
*/
[name: string]: any;
/**
* Converts an attribute name (e.g. dash/colon/underscore-delimited string, optionally prefixed with x- or data-) to its normalized, camelCase form.
*
* Also there is special case for Moz prefix starting with upper case letter.
*
* For further information check out the guide on @see https://docs.angularjs.org/guide/directive#matching-directives
*/
$normalize(name: string): string;
/**
* Adds the CSS class value specified by the classVal parameter to the
* element. If animations are enabled then an animation will be triggered
* for the class addition.
*/
$addClass(classVal: string): void;
/**
* Removes the CSS class value specified by the classVal parameter from the
* element. If animations are enabled then an animation will be triggered for
* the class removal.
*/
$removeClass(classVal: string): void;
/**
* Set DOM element attribute value.
*/
$set(key: string, value: any): void;
/**
* Observes an interpolated attribute.
* The observer function will be invoked once during the next $digest
* following compilation. The observer is then invoked whenever the
* interpolated value changes.
*/
$observe<T>(name: string, fn: (value?: T) => any): Function;
/**
* A map of DOM element attribute names to the normalized name. This is needed
* to do reverse lookup from normalized name back to actual name.
*/
$attr: Object;
}
/**
* form.FormController - type in module ng
* see https://docs.angularjs.org/api/ng/type/form.FormController
*/
interface IFormController {
/**
* Indexer which should return ng.INgModelController for most properties but cannot because of "All named properties must be assignable to string indexer type" constraint - see https://github.com/Microsoft/TypeScript/issues/272
*/
[name: string]: any;
$pristine: boolean;
$dirty: boolean;
$valid: boolean;
$invalid: boolean;
$submitted: boolean;
$error: any;
$addControl(control: INgModelController): void;
$removeControl(control: INgModelController): void;
$setValidity(validationErrorKey: string, isValid: boolean, control: INgModelController): void;
$setDirty(): void;
$setPristine(): void;
$commitViewValue(): void;
$rollbackViewValue(): void;
$setSubmitted(): void;
$setUntouched(): void;
}
///////////////////////////////////////////////////////////////////////////
// NgModelController
// see http://docs.angularjs.org/api/ng.directive:ngModel.NgModelController
///////////////////////////////////////////////////////////////////////////
interface INgModelController {
$render(): void;
$setValidity(validationErrorKey: string, isValid: boolean): void;
// Documentation states viewValue and modelValue to be a string but other
// types do work and it's common to use them.
$setViewValue(value: any, trigger?: string): void;
$setPristine(): void;
$setDirty(): void;
$validate(): void;
$setTouched(): void;
$setUntouched(): void;
$rollbackViewValue(): void;
$commitViewValue(): void;
$isEmpty(value: any): boolean;
$viewValue: any;
$modelValue: any;
$parsers: IModelParser[];
$formatters: IModelFormatter[];
$viewChangeListeners: IModelViewChangeListener[];
$error: any;
$name: string;
$touched: boolean;
$untouched: boolean;
$validators: IModelValidators;
$asyncValidators: IAsyncModelValidators;
$pending: any;
$pristine: boolean;
$dirty: boolean;
$valid: boolean;
$invalid: boolean;
}
interface IModelValidators {
/**
* viewValue is any because it can be an object that is called in the view like $viewValue.name:$viewValue.subName
*/
[index: string]: (modelValue: any, viewValue: any) => boolean;
}
interface IAsyncModelValidators {
[index: string]: (modelValue: any, viewValue: any) => IPromise<any>;
}
interface IModelParser {
(value: any): any;
}
interface IModelFormatter {
(value: any): any;
}
interface IModelViewChangeListener {
(): void;
}
/**
* $rootScope - $rootScopeProvider - service in module ng
* see https://docs.angularjs.org/api/ng/type/$rootScope.Scope and https://docs.angularjs.org/api/ng/service/$rootScope
*/
interface IRootScopeService {
[index: string]: any;
$apply(): any;
$apply(exp: string): any;
$apply(exp: (scope: IScope) => any): any;
$applyAsync(): any;
$applyAsync(exp: string): any;
$applyAsync(exp: (scope: IScope) => any): any;
/**
* Dispatches an event name downwards to all child scopes (and their children) notifying the registered $rootScope.Scope listeners.
*
* The event life cycle starts at the scope on which $broadcast was called. All listeners listening for name event on this scope get notified. Afterwards, the event propagates to all direct and indirect scopes of the current scope and calls all registered listeners along the way. The event cannot be canceled.
*
* Any exception emitted from the listeners will be passed onto the $exceptionHandler service.
*
* @param name Event name to broadcast.
* @param args Optional one or more arguments which will be passed onto the event listeners.
*/
$broadcast(name: string, ...args: any[]): IAngularEvent;
$destroy(): void;
$digest(): void;
/**
* Dispatches an event name upwards through the scope hierarchy notifying the registered $rootScope.Scope listeners.
*
* The event life cycle starts at the scope on which $emit was called. All listeners listening for name event on this scope get notified. Afterwards, the event traverses upwards toward the root scope and calls all registered listeners along the way. The event will stop propagating if one of the listeners cancels it.
*
* Any exception emitted from the listeners will be passed onto the $exceptionHandler service.
*
* @param name Event name to emit.
* @param args Optional one or more arguments which will be passed onto the event listeners.
*/
$emit(name: string, ...args: any[]): IAngularEvent;
$eval(): any;
$eval(expression: string, locals?: Object): any;
$eval(expression: (scope: IScope) => any, locals?: Object): any;
$evalAsync(): void;
$evalAsync(expression: string): void;
$evalAsync(expression: (scope: IScope) => any): void;
// Defaults to false by the implementation checking strategy
$new(isolate?: boolean, parent?: IScope): IScope;
/**
* Listens on events of a given type. See $emit for discussion of event life cycle.
*
* The event listener function format is: function(event, args...).
*
* @param name Event name to listen on.
* @param listener Function to call when the event is emitted.
*/
$on(name: string, listener: (event: IAngularEvent, ...args: any[]) => any): Function;
$watch(watchExpression: string, listener?: string, objectEquality?: boolean): Function;
$watch<T>(watchExpression: string, listener?: (newValue: T, oldValue: T, scope: IScope) => any, objectEquality?: boolean): Function;
$watch(watchExpression: (scope: IScope) => any, listener?: string, objectEquality?: boolean): Function;
$watch<T>(watchExpression: (scope: IScope) => T, listener?: (newValue: T, oldValue: T, scope: IScope) => any, objectEquality?: boolean): Function;
$watchCollection<T>(watchExpression: string, listener: (newValue: T, oldValue: T, scope: IScope) => any): Function;
$watchCollection<T>(watchExpression: (scope: IScope) => T, listener: (newValue: T, oldValue: T, scope: IScope) => any): Function;
$watchGroup(watchExpressions: any[], listener: (newValue: any, oldValue: any, scope: IScope) => any): Function;
$watchGroup(watchExpressions: { (scope: IScope): any }[], listener: (newValue: any, oldValue: any, scope: IScope) => any): Function;
$parent: IScope;
$root: IRootScopeService;
$id: number;
// Hidden members
$$isolateBindings: any;
$$phase: any;
}
interface IScope extends IRootScopeService { }
/**
* $scope for ngRepeat directive.
* see https://docs.angularjs.org/api/ng/directive/ngRepeat
*/
interface IRepeatScope extends IScope {
/**
* iterator offset of the repeated element (0..length-1).
*/
$index: number;
/**
* true if the repeated element is first in the iterator.
*/
$first: boolean;
/**
* true if the repeated element is between the first and last in the iterator.
*/
$middle: boolean;
/**
* true if the repeated element is last in the iterator.
*/
$last: boolean;
/**
* true if the iterator position $index is even (otherwise false).
*/
$even: boolean;
/**
* true if the iterator position $index is odd (otherwise false).
*/
$odd: boolean;
}
interface IAngularEvent {
/**
* the scope on which the event was $emit-ed or $broadcast-ed.
*/
targetScope: IScope;
/**
* the scope that is currently handling the event. Once the event propagates through the scope hierarchy, this property is set to null.
*/
currentScope: IScope;
/**
* name of the event.
*/
name: string;
/**
* calling stopPropagation function will cancel further event propagation (available only for events that were $emit-ed).
*/
stopPropagation?: Function;
/**
* calling preventDefault sets defaultPrevented flag to true.
*/
preventDefault: Function;
/**
* true if preventDefault was called.
*/
defaultPrevented: boolean;
}
///////////////////////////////////////////////////////////////////////////
// WindowService
// see http://docs.angularjs.org/api/ng.$window
///////////////////////////////////////////////////////////////////////////
interface IWindowService extends Window {
[key: string]: any;
}
///////////////////////////////////////////////////////////////////////////
// BrowserService
// TODO undocumented, so we need to get it from the source code
///////////////////////////////////////////////////////////////////////////
interface IBrowserService {
defer: angular.ITimeoutService;
[key: string]: any;
}
///////////////////////////////////////////////////////////////////////////
// TimeoutService
// see http://docs.angularjs.org/api/ng.$timeout
///////////////////////////////////////////////////////////////////////////
interface ITimeoutService {
(delay?: number, invokeApply?: boolean): IPromise<void>;
<T>(fn: (...args: any[]) => T, delay?: number, invokeApply?: boolean, ...args: any[]): IPromise<T>;
cancel(promise?: IPromise<any>): boolean;
}
///////////////////////////////////////////////////////////////////////////
// IntervalService
// see http://docs.angularjs.org/api/ng.$interval
///////////////////////////////////////////////////////////////////////////
interface IIntervalService {
(func: Function, delay: number, count?: number, invokeApply?: boolean, ...args: any[]): IPromise<any>;
cancel(promise: IPromise<any>): boolean;
}
///////////////////////////////////////////////////////////////////////////
// AnimateProvider
// see http://docs.angularjs.org/api/ng/provider/$animateProvider
///////////////////////////////////////////////////////////////////////////
interface IAnimateProvider {
/**
* Registers a new injectable animation factory function.
*
* @param name The name of the animation.
* @param factory The factory function that will be executed to return the animation object.
*/
register(name: string, factory: () => IAnimateCallbackObject): void;
/**
* Gets and/or sets the CSS class expression that is checked when performing an animation.
*
* @param expression The className expression which will be checked against all animations.
* @returns The current CSS className expression value. If null then there is no expression value.
*/
classNameFilter(expression?: RegExp): RegExp;
}
/**
* The animation object which contains callback functions for each event that is expected to be animated.
*/
interface IAnimateCallbackObject {
eventFn(element: Node, doneFn: () => void): Function;
}
/**
* $filter - $filterProvider - service in module ng
*
* Filters are used for formatting data displayed to the user.
*
* see https://docs.angularjs.org/api/ng/service/$filter
*/
interface IFilterService {
(name: 'filter'): IFilterFilter;
(name: 'currency'): IFilterCurrency;
(name: 'number'): IFilterNumber;
(name: 'date'): IFilterDate;
(name: 'json'): IFilterJson;
(name: 'lowercase'): IFilterLowercase;
(name: 'uppercase'): IFilterUppercase;
(name: 'limitTo'): IFilterLimitTo;
(name: 'orderBy'): IFilterOrderBy;
/**
* Usage:
* $filter(name);
*
* @param name Name of the filter function to retrieve
*/
<T>(name: string): T;
}
interface IFilterFilter {
<T>(array: T[], expression: string | IFilterFilterPatternObject | IFilterFilterPredicateFunc<T>, comparator?: IFilterFilterComparatorFunc<T>|boolean): T[];
}
interface IFilterFilterPatternObject {
[name: string]: any;
}
interface IFilterFilterPredicateFunc<T> {
(value: T, index: number, array: T[]): boolean;
}
interface IFilterFilterComparatorFunc<T> {
(actual: T, expected: T): boolean;
}
interface IFilterCurrency {
/**
* Formats a number as a currency (ie $1,234.56). When no currency symbol is provided, default symbol for current locale is used.
* @param amount Input to filter.
* @param symbol Currency symbol or identifier to be displayed.
* @param fractionSize Number of decimal places to round the amount to, defaults to default max fraction size for current locale
* @return Formatted number
*/
(amount: number, symbol?: string, fractionSize?: number): string;
}
interface IFilterNumber {
/**
* Formats a number as text.
* @param number Number to format.
* @param fractionSize Number of decimal places to round the number to. If this is not provided then the fraction size is computed from the current locale's number formatting pattern. In the case of the default locale, it will be 3.
* @return Number rounded to decimalPlaces and places a โ,โ after each third digit.
*/
(value: number|string, fractionSize?: number|string): string;
}
interface IFilterDate {
/**
* Formats date to a string based on the requested format.
*
* @param date Date to format either as Date object, milliseconds (string or number) or various ISO 8601 datetime string formats (e.g. yyyy-MM-ddTHH:mm:ss.sssZ and its shorter versions like yyyy-MM-ddTHH:mmZ, yyyy-MM-dd or yyyyMMddTHHmmssZ). If no timezone is specified in the string input, the time is considered to be in the local timezone.
* @param format Formatting rules (see Description). If not specified, mediumDate is used.
* @param timezone Timezone to be used for formatting. It understands UTC/GMT and the continental US time zone abbreviations, but for general use, use a time zone offset, for example, '+0430' (4 hours, 30 minutes east of the Greenwich meridian) If not specified, the timezone of the browser will be used.
* @return Formatted string or the input if input is not recognized as date/millis.
*/
(date: Date | number | string, format?: string, timezone?: string): string;
}
interface IFilterJson {
/**
* Allows you to convert a JavaScript object into JSON string.
* @param object Any JavaScript object (including arrays and primitive types) to filter.
* @param spacing The number of spaces to use per indentation, defaults to 2.
* @return JSON string.
*/
(object: any, spacing?: number): string;
}
interface IFilterLowercase {
/**
* Converts string to lowercase.
*/
(value: string): string;
}
interface IFilterUppercase {
/**
* Converts string to uppercase.
*/
(value: string): string;
}
interface IFilterLimitTo {
/**
* Creates a new array containing only a specified number of elements. The elements are taken from either the beginning or the end of the source array, string or number, as specified by the value and sign (positive or negative) of limit.
* @param input Source array to be limited.
* @param limit The length of the returned array. If the limit number is positive, limit number of items from the beginning of the source array/string are copied. If the number is negative, limit number of items from the end of the source array are copied. The limit will be trimmed if it exceeds array.length. If limit is undefined, the input will be returned unchanged.
* @param begin Index at which to begin limitation. As a negative index, begin indicates an offset from the end of input. Defaults to 0.
* @return A new sub-array of length limit or less if input array had less than limit elements.
*/
<T>(input: T[], limit: string|number, begin?: string|number): T[];
/**
* Creates a new string containing only a specified number of elements. The elements are taken from either the beginning or the end of the source string or number, as specified by the value and sign (positive or negative) of limit. If a number is used as input, it is converted to a string.
* @param input Source string or number to be limited.
* @param limit The length of the returned string. If the limit number is positive, limit number of items from the beginning of the source string are copied. If the number is negative, limit number of items from the end of the source string are copied. The limit will be trimmed if it exceeds input.length. If limit is undefined, the input will be returned unchanged.
* @param begin Index at which to begin limitation. As a negative index, begin indicates an offset from the end of input. Defaults to 0.
* @return A new substring of length limit or less if input had less than limit elements.
*/
(input: string|number, limit: string|number, begin?: string|number): string;
}
interface IFilterOrderBy {
/**
* Orders a specified array by the expression predicate. It is ordered alphabetically for strings and numerically for numbers. Note: if you notice numbers are not being sorted as expected, make sure they are actually being saved as numbers and not strings.
* @param array The array to sort.
* @param expression A predicate to be used by the comparator to determine the order of elements.
* @param reverse Reverse the order of the array.
* @return Reverse the order of the array.
*/
<T>(array: T[], expression: string|((value: T) => any)|(((value: T) => any)|string)[], reverse?: boolean): T[];
}
/**
* $filterProvider - $filter - provider in module ng
*
* Filters are just functions which transform input to an output. However filters need to be Dependency Injected. To achieve this a filter definition consists of a factory function which is annotated with dependencies and is responsible for creating a filter function.
*
* see https://docs.angularjs.org/api/ng/provider/$filterProvider
*/
interface IFilterProvider extends IServiceProvider {
/**
* register(name);
*
* @param name Name of the filter function, or an object map of filters where the keys are the filter names and the values are the filter factories. Note: Filter names must be valid angular Expressions identifiers, such as uppercase or orderBy. Names with special characters, such as hyphens and dots, are not allowed. If you wish to namespace your filters, then you can use capitalization (myappSubsectionFilterx) or underscores (myapp_subsection_filterx).
*/
register(name: string | {}): IServiceProvider;
}
///////////////////////////////////////////////////////////////////////////
// LocaleService
// see http://docs.angularjs.org/api/ng.$locale
///////////////////////////////////////////////////////////////////////////
interface ILocaleService {
id: string;
// These are not documented
// Check angular's i18n files for exemples
NUMBER_FORMATS: ILocaleNumberFormatDescriptor;
DATETIME_FORMATS: ILocaleDateTimeFormatDescriptor;
pluralCat: (num: any) => string;
}
interface ILocaleNumberFormatDescriptor {
DECIMAL_SEP: string;
GROUP_SEP: string;
PATTERNS: ILocaleNumberPatternDescriptor[];
CURRENCY_SYM: string;
}
interface ILocaleNumberPatternDescriptor {
minInt: number;
minFrac: number;
maxFrac: number;
posPre: string;
posSuf: string;
negPre: string;
negSuf: string;
gSize: number;
lgSize: number;
}
interface ILocaleDateTimeFormatDescriptor {
MONTH: string[];
SHORTMONTH: string[];
DAY: string[];
SHORTDAY: string[];
AMPMS: string[];
medium: string;
short: string;
fullDate: string;
longDate: string;
mediumDate: string;
shortDate: string;
mediumTime: string;
shortTime: string;
}
///////////////////////////////////////////////////////////////////////////
// LogService
// see http://docs.angularjs.org/api/ng.$log
// see http://docs.angularjs.org/api/ng.$logProvider
///////////////////////////////////////////////////////////////////////////
interface ILogService {
debug: ILogCall;
error: ILogCall;
info: ILogCall;
log: ILogCall;
warn: ILogCall;
}
interface ILogProvider extends IServiceProvider {
debugEnabled(): boolean;
debugEnabled(enabled: boolean): ILogProvider;
}
// We define this as separate interface so we can reopen it later for
// the ngMock module.
interface ILogCall {
(...args: any[]): void;
}
///////////////////////////////////////////////////////////////////////////
// ParseService
// see http://docs.angularjs.org/api/ng.$parse
// see http://docs.angularjs.org/api/ng.$parseProvider
///////////////////////////////////////////////////////////////////////////
interface IParseService {
(expression: string): ICompiledExpression;
}
interface IParseProvider {
logPromiseWarnings(): boolean;
logPromiseWarnings(value: boolean): IParseProvider;
unwrapPromises(): boolean;
unwrapPromises(value: boolean): IParseProvider;
}
interface ICompiledExpression {
(context: any, locals?: any): any;
literal: boolean;
constant: boolean;
// If value is not provided, undefined is gonna be used since the implementation
// does not check the parameter. Let's force a value for consistency. If consumer
// whants to undefine it, pass the undefined value explicitly.
assign(context: any, value: any): any;
}
/**
* $location - $locationProvider - service in module ng
* see https://docs.angularjs.org/api/ng/service/$location
*/
interface ILocationService {
absUrl(): string;
hash(): string;
hash(newHash: string): ILocationService;
host(): string;
/**
* Return path of current url
*/
path(): string;
/**
* Change path when called with parameter and return $location.
* Note: Path should always begin with forward slash (/), this method will add the forward slash if it is missing.
*
* @param path New path
*/
path(path: string): ILocationService;
port(): number;
protocol(): string;
replace(): ILocationService;
/**
* Return search part (as object) of current url
*/
search(): any;
/**
* Change search part when called with parameter and return $location.
*
* @param search When called with a single argument the method acts as a setter, setting the search component of $location to the specified value.
*
* If the argument is a hash object containing an array of values, these values will be encoded as duplicate search parameters in the url.
*/
search(search: any): ILocationService;
/**
* Change search part when called with parameter and return $location.
*
* @param search New search params
* @param paramValue If search is a string or a Number, then paramValue will override only a single search property. If paramValue is null, the property specified via the first argument will be deleted. If paramValue is an array, it will override the property of the search component of $location specified via the first argument. If paramValue is true, the property specified via the first argument will be added with no value nor trailing equal sign.
*/
search(search: string, paramValue: string|number|string[]|boolean): ILocationService;
state(): any;
state(state: any): ILocationService;
url(): string;
url(url: string): ILocationService;
}
interface ILocationProvider extends IServiceProvider {
hashPrefix(): string;
hashPrefix(prefix: string): ILocationProvider;
html5Mode(): boolean;
// Documentation states that parameter is string, but
// implementation tests it as boolean, which makes more sense
// since this is a toggler
html5Mode(active: boolean): ILocationProvider;
html5Mode(mode: { enabled?: boolean; requireBase?: boolean; rewriteLinks?: boolean; }): ILocationProvider;
}
///////////////////////////////////////////////////////////////////////////
// DocumentService
// see http://docs.angularjs.org/api/ng.$document
///////////////////////////////////////////////////////////////////////////
interface IDocumentService extends IAugmentedJQuery {}
///////////////////////////////////////////////////////////////////////////
// ExceptionHandlerService
// see http://docs.angularjs.org/api/ng.$exceptionHandler
///////////////////////////////////////////////////////////////////////////
interface IExceptionHandlerService {
(exception: Error, cause?: string): void;
}
///////////////////////////////////////////////////////////////////////////
// RootElementService
// see http://docs.angularjs.org/api/ng.$rootElement
///////////////////////////////////////////////////////////////////////////
interface IRootElementService extends JQuery {}
interface IQResolveReject<T> {
(): void;
(value: T): void;
}
/**
* $q - service in module ng
* A promise/deferred implementation inspired by Kris Kowal's Q.
* See http://docs.angularjs.org/api/ng/service/$q
*/
interface IQService {
new <T>(resolver: (resolve: IQResolveReject<T>) => any): IPromise<T>;
new <T>(resolver: (resolve: IQResolveReject<T>, reject: IQResolveReject<any>) => any): IPromise<T>;
<T>(resolver: (resolve: IQResolveReject<T>) => any): IPromise<T>;
<T>(resolver: (resolve: IQResolveReject<T>, reject: IQResolveReject<any>) => any): IPromise<T>;
/**
* Combines multiple promises into a single promise that is resolved when all of the input promises are resolved.
*
* Returns a single promise that will be resolved with an array of values, each value corresponding to the promise at the same index in the promises array. If any of the promises is resolved with a rejection, this resulting promise will be rejected with the same rejection value.
*
* @param promises An array of promises.
*/
all<T>(promises: IPromise<any>[]): IPromise<T[]>;
/**
* Combines multiple promises into a single promise that is resolved when all of the input promises are resolved.
*
* Returns a single promise that will be resolved with a hash of values, each value corresponding to the promise at the same key in the promises hash. If any of the promises is resolved with a rejection, this resulting promise will be rejected with the same rejection value.
*
* @param promises A hash of promises.
*/
all(promises: { [id: string]: IPromise<any>; }): IPromise<{ [id: string]: any; }>;
all<T extends {}>(promises: { [id: string]: IPromise<any>; }): IPromise<T>;
/**
* Creates a Deferred object which represents a task which will finish in the future.
*/
defer<T>(): IDeferred<T>;
/**
* Creates a promise that is resolved as rejected with the specified reason. This api should be used to forward rejection in a chain of promises. If you are dealing with the last promise in a promise chain, you don't need to worry about it.
*
* When comparing deferreds/promises to the familiar behavior of try/catch/throw, think of reject as the throw keyword in JavaScript. This also means that if you "catch" an error via a promise error callback and you want to forward the error to the promise derived from the current promise, you have to "rethrow" the error by returning a rejection constructed via reject.
*
* @param reason Constant, message, exception or an object representing the rejection reason.
*/
reject(reason?: any): IPromise<any>;
/**
* Wraps an object that might be a value or a (3rd party) then-able promise into a $q promise. This is useful when you are dealing with an object that might or might not be a promise, or if the promise comes from a source that can't be trusted.
*
* @param value Value or a promise
*/
resolve<T>(value: IPromise<T>|T): IPromise<T>;
/**
* Wraps an object that might be a value or a (3rd party) then-able promise into a $q promise. This is useful when you are dealing with an object that might or might not be a promise, or if the promise comes from a source that can't be trusted.
*/
resolve(): IPromise<void>;
/**
* Wraps an object that might be a value or a (3rd party) then-able promise into a $q promise. This is useful when you are dealing with an object that might or might not be a promise, or if the promise comes from a source that can't be trusted.
*
* @param value Value or a promise
*/
when<T>(value: IPromise<T>|T): IPromise<T>;
/**
* Wraps an object that might be a value or a (3rd party) then-able promise into a $q promise. This is useful when you are dealing with an object that might or might not be a promise, or if the promise comes from a source that can't be trusted.
*/
when(): IPromise<void>;
}
interface IPromise<T> {
/**
* Regardless of when the promise was or will be resolved or rejected, then calls one of the success or error callbacks asynchronously as soon as the result is available. The callbacks are called with a single argument: the result or rejection reason. Additionally, the notify callback may be called zero or more times to provide a progress indication, before the promise is resolved or rejected.
* The successCallBack may return IPromise<void> for when a $q.reject() needs to be returned
* This method returns a new promise which is resolved or rejected via the return value of the successCallback, errorCallback. It also notifies via the return value of the notifyCallback method. The promise can not be resolved or rejected from the notifyCallback method.
*/
then<TResult>(successCallback: (promiseValue: T) => IPromise<TResult>|TResult, errorCallback?: (reason: any) => any, notifyCallback?: (state: any) => any): IPromise<TResult>;
/**
* Shorthand for promise.then(null, errorCallback)
*/
catch<TResult>(onRejected: (reason: any) => IPromise<TResult>|TResult): IPromise<TResult>;
/**
* Allows you to observe either the fulfillment or rejection of a promise, but to do so without modifying the final value. This is useful to release resources or do some clean-up that needs to be done whether the promise was rejected or resolved. See the full specification for more information.
*
* Because finally is a reserved word in JavaScript and reserved keywords are not supported as property names by ES3, you'll need to invoke the method like promise['finally'](callback) to make your code IE8 and Android 2.x compatible.
*/
finally(finallyCallback: () => any): IPromise<T>;
}
interface IDeferred<T> {
resolve(value?: T|IPromise<T>): void;
reject(reason?: any): void;
notify(state?: any): void;
promise: IPromise<T>;
}
///////////////////////////////////////////////////////////////////////////
// AnchorScrollService
// see http://docs.angularjs.org/api/ng.$anchorScroll
///////////////////////////////////////////////////////////////////////////
interface IAnchorScrollService {
(): void;
(hash: string): void;
yOffset: any;
}
interface IAnchorScrollProvider extends IServiceProvider {
disableAutoScrolling(): void;
}
/**
* $cacheFactory - service in module ng
*
* Factory that constructs Cache objects and gives access to them.
*
* see https://docs.angularjs.org/api/ng/service/$cacheFactory
*/
interface ICacheFactoryService {
/**
* Factory that constructs Cache objects and gives access to them.
*
* @param cacheId Name or id of the newly created cache.
* @param optionsMap Options object that specifies the cache behavior. Properties:
*
* capacity โ turns the cache into LRU cache.
*/
(cacheId: string, optionsMap?: { capacity?: number; }): ICacheObject;
/**
* Get information about all the caches that have been created.
* @returns key-value map of cacheId to the result of calling cache#info
*/
info(): any;
/**
* Get access to a cache object by the cacheId used when it was created.
*
* @param cacheId Name or id of a cache to access.
*/
get(cacheId: string): ICacheObject;
}
/**
* $cacheFactory.Cache - type in module ng
*
* A cache object used to store and retrieve data, primarily used by $http and the script directive to cache templates and other data.
*
* see https://docs.angularjs.org/api/ng/type/$cacheFactory.Cache
*/
interface ICacheObject {
/**
* Retrieve information regarding a particular Cache.
*/
info(): {
/**
* the id of the cache instance
*/
id: string;
/**
* the number of entries kept in the cache instance
*/
size: number;
//...: any additional properties from the options object when creating the cache.
};
/**
* Inserts a named entry into the Cache object to be retrieved later, and incrementing the size of the cache if the key was not already present in the cache. If behaving like an LRU cache, it will also remove stale entries from the set.
*
* It will not insert undefined values into the cache.
*
* @param key the key under which the cached data is stored.
* @param value the value to store alongside the key. If it is undefined, the key will not be stored.
*/
put<T>(key: string, value?: T): T;
/**
* Retrieves named data stored in the Cache object.
*
* @param key the key of the data to be retrieved
*/
get<T>(key: string): T;
/**
* Removes an entry from the Cache object.
*
* @param key the key of the entry to be removed
*/
remove(key: string): void;
/**
* Clears the cache object of any entries.
*/
removeAll(): void;
/**
* Destroys the Cache object entirely, removing it from the $cacheFactory set.
*/
destroy(): void;
}
///////////////////////////////////////////////////////////////////////////
// CompileService
// see http://docs.angularjs.org/api/ng.$compile
// see http://docs.angularjs.org/api/ng.$compileProvider
///////////////////////////////////////////////////////////////////////////
interface ICompileService {
(element: string, transclude?: ITranscludeFunction, maxPriority?: number): ITemplateLinkingFunction;
(element: Element, transclude?: ITranscludeFunction, maxPriority?: number): ITemplateLinkingFunction;
(element: JQuery, transclude?: ITranscludeFunction, maxPriority?: number): ITemplateLinkingFunction;
}
interface ICompileProvider extends IServiceProvider {
directive(name: string, directiveFactory: Function): ICompileProvider;
// Undocumented, but it is there...
directive(directivesMap: any): ICompileProvider;
aHrefSanitizationWhitelist(): RegExp;
aHrefSanitizationWhitelist(regexp: RegExp): ICompileProvider;
imgSrcSanitizationWhitelist(): RegExp;
imgSrcSanitizationWhitelist(regexp: RegExp): ICompileProvider;
debugInfoEnabled(enabled?: boolean): any;
}
interface ICloneAttachFunction {
// Let's hint but not force cloneAttachFn's signature
(clonedElement?: JQuery, scope?: IScope): any;
}
// This corresponds to the "publicLinkFn" returned by $compile.
interface ITemplateLinkingFunction {
(scope: IScope, cloneAttachFn?: ICloneAttachFunction): IAugmentedJQuery;
}
// This corresponds to $transclude (and also the transclude function passed to link).
interface ITranscludeFunction {
// If the scope is provided, then the cloneAttachFn must be as well.
(scope: IScope, cloneAttachFn: ICloneAttachFunction): IAugmentedJQuery;
// If one argument is provided, then it's assumed to be the cloneAttachFn.
(cloneAttachFn?: ICloneAttachFunction): IAugmentedJQuery;
}
///////////////////////////////////////////////////////////////////////////
// ControllerService
// see http://docs.angularjs.org/api/ng.$controller
// see http://docs.angularjs.org/api/ng.$controllerProvider
///////////////////////////////////////////////////////////////////////////
interface IControllerService {
// Although the documentation doesn't state this, locals are optional
<T>(controllerConstructor: new (...args: any[]) => T, locals?: any, bindToController?: any): T;
<T>(controllerConstructor: Function, locals?: any, bindToController?: any): T;
<T>(controllerName: string, locals?: any, bindToController?: any): T;
}
interface IControllerProvider extends IServiceProvider {
register(name: string, controllerConstructor: Function): void;
register(name: string, dependencyAnnotatedConstructor: any[]): void;
allowGlobals(): void;
}
/**
* HttpService
* see http://docs.angularjs.org/api/ng/service/$http
*/
interface IHttpService {
/**
* Object describing the request to be made and how it should be processed.
*/
<T>(config: IRequestConfig): IHttpPromise<T>;
/**
* Shortcut method to perform GET request.
*
* @param url Relative or absolute URL specifying the destination of the request
* @param config Optional configuration object
*/
get<T>(url: string, config?: IRequestShortcutConfig): IHttpPromise<T>;
/**
* Shortcut method to perform DELETE request.
*
* @param url Relative or absolute URL specifying the destination of the request
* @param config Optional configuration object
*/
delete<T>(url: string, config?: IRequestShortcutConfig): IHttpPromise<T>;
/**
* Shortcut method to perform HEAD request.
*
* @param url Relative or absolute URL specifying the destination of the request
* @param config Optional configuration object
*/
head<T>(url: string, config?: IRequestShortcutConfig): IHttpPromise<T>;
/**
* Shortcut method to perform JSONP request.
*
* @param url Relative or absolute URL specifying the destination of the request
* @param config Optional configuration object
*/
jsonp<T>(url: string, config?: IRequestShortcutConfig): IHttpPromise<T>;
/**
* Shortcut method to perform POST request.
*
* @param url Relative or absolute URL specifying the destination of the request
* @param data Request content
* @param config Optional configuration object
*/
post<T>(url: string, data: any, config?: IRequestShortcutConfig): IHttpPromise<T>;
/**
* Shortcut method to perform PUT request.
*
* @param url Relative or absolute URL specifying the destination of the request
* @param data Request content
* @param config Optional configuration object
*/
put<T>(url: string, data: any, config?: IRequestShortcutConfig): IHttpPromise<T>;
/**
* Shortcut method to perform PATCH request.
*
* @param url Relative or absolute URL specifying the destination of the request
* @param data Request content
* @param config Optional configuration object
*/
patch<T>(url: string, data: any, config?: IRequestShortcutConfig): IHttpPromise<T>;
/**
* Runtime equivalent of the $httpProvider.defaults property. Allows configuration of default headers, withCredentials as well as request and response transformations.
*/
defaults: IHttpProviderDefaults;
/**
* Array of config objects for currently pending requests. This is primarily meant to be used for debugging purposes.
*/
pendingRequests: IRequestConfig[];
}
/**
* Object describing the request to be made and how it should be processed.
* see http://docs.angularjs.org/api/ng/service/$http#usage
*/
interface IRequestShortcutConfig extends IHttpProviderDefaults {
/**
* {Object.<string|Object>}
* Map of strings or objects which will be turned to ?key1=value1&key2=value2 after the url. If the value is not a string, it will be JSONified.
*/
params?: any;
/**
* {string|Object}
* Data to be sent as the request message data.
*/
data?: any;
/**
* Timeout in milliseconds, or promise that should abort the request when resolved.
*/
timeout?: number|IPromise<any>;
/**
* See [XMLHttpRequest.responseType]https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest#xmlhttprequest-responsetype
*/
responseType?: string;
}
/**
* Object describing the request to be made and how it should be processed.
* see http://docs.angularjs.org/api/ng/service/$http#usage
*/
interface IRequestConfig extends IRequestShortcutConfig {
/**
* HTTP method (e.g. 'GET', 'POST', etc)
*/
method: string;
/**
* Absolute or relative URL of the resource that is being requested.
*/
url: string;
}
interface IHttpHeadersGetter {
(): { [name: string]: string; };
(headerName: string): string;
}
interface IHttpPromiseCallback<T> {
(data: T, status: number, headers: IHttpHeadersGetter, config: IRequestConfig): void;
}
interface IHttpPromiseCallbackArg<T> {
data?: T;
status?: number;
headers?: IHttpHeadersGetter;
config?: IRequestConfig;
statusText?: string;
}
interface IHttpPromise<T> extends IPromise<IHttpPromiseCallbackArg<T>> {
success(callback: IHttpPromiseCallback<T>): IHttpPromise<T>;
error(callback: IHttpPromiseCallback<any>): IHttpPromise<T>;
}
// See the jsdoc for transformData() at https://github.com/angular/angular.js/blob/master/src/ng/http.js#L228
interface IHttpRequestTransformer {
(data: any, headersGetter: IHttpHeadersGetter): any;
}
// The definition of fields are the same as IHttpPromiseCallbackArg
interface IHttpResponseTransformer {
(data: any, headersGetter: IHttpHeadersGetter, status: number): any;
}
interface IHttpRequestConfigHeaders {
[requestType: string]: string|(() => string);
common?: string|(() => string);
get?: string|(() => string);
post?: string|(() => string);
put?: string|(() => string);
patch?: string|(() => string);
}
/**
* Object that controls the defaults for $http provider. Not all fields of IRequestShortcutConfig can be configured
* via defaults and the docs do not say which. The following is based on the inspection of the source code.
* https://docs.angularjs.org/api/ng/service/$http#defaults
* https://docs.angularjs.org/api/ng/service/$http#usage
* https://docs.angularjs.org/api/ng/provider/$httpProvider The properties section
*/
interface IHttpProviderDefaults {
/**
* {boolean|Cache}
* If true, a default $http cache will be used to cache the GET request, otherwise if a cache instance built with $cacheFactory, this cache will be used for caching.
*/
cache?: any;
/**
* Transform function or an array of such functions. The transform function takes the http request body and
* headers and returns its transformed (typically serialized) version.
* @see {@link https://docs.angularjs.org/api/ng/service/$http#transforming-requests-and-responses}
*/
transformRequest?: IHttpRequestTransformer |IHttpRequestTransformer[];
/**
* Transform function or an array of such functions. The transform function takes the http response body and
* headers and returns its transformed (typically deserialized) version.
*/
transformResponse?: IHttpResponseTransformer | IHttpResponseTransformer[];
/**
* Map of strings or functions which return strings representing HTTP headers to send to the server. If the
* return value of a function is null, the header will not be sent.
* The key of the map is the request verb in lower case. The "common" key applies to all requests.
* @see {@link https://docs.angularjs.org/api/ng/service/$http#setting-http-headers}
*/
headers?: IHttpRequestConfigHeaders;
/** Name of HTTP header to populate with the XSRF token. */
xsrfHeaderName?: string;
/** Name of cookie containing the XSRF token. */
xsrfCookieName?: string;
/**
* whether to to set the withCredentials flag on the XHR object. See [requests with credentials]https://developer.mozilla.org/en/http_access_control#section_5 for more information.
*/
withCredentials?: boolean;
/**
* A function used to the prepare string representation of request parameters (specified as an object). If
* specified as string, it is interpreted as a function registered with the $injector. Defaults to
* $httpParamSerializer.
*/
paramSerializer?: string | ((obj: any) => string);
}
interface IHttpInterceptor {
request?: (config: IRequestConfig) => IRequestConfig|IPromise<IRequestConfig>;
requestError?: (rejection: any) => any;
response?: <T>(response: IHttpPromiseCallbackArg<T>) => IPromise<T>|T;
responseError?: (rejection: any) => any;
}
interface IHttpInterceptorFactory {
(...args: any[]): IHttpInterceptor;
}
interface IHttpProvider extends IServiceProvider {
defaults: IHttpProviderDefaults;
/**
* Register service factories (names or implementations) for interceptors which are called before and after
* each request.
*/
interceptors: (string|IHttpInterceptorFactory|(string|IHttpInterceptorFactory)[])[];
useApplyAsync(): boolean;
useApplyAsync(value: boolean): IHttpProvider;
/**
*
* @param {boolean=} value If true, `$http` will return a normal promise without the `success` and `error` methods.
* @returns {boolean|Object} If a value is specified, returns the $httpProvider for chaining.
* otherwise, returns the current configured value.
*/
useLegacyPromiseExtensions(value:boolean) : boolean | IHttpProvider;
}
///////////////////////////////////////////////////////////////////////////
// HttpBackendService
// see http://docs.angularjs.org/api/ng.$httpBackend
// You should never need to use this service directly.
///////////////////////////////////////////////////////////////////////////
interface IHttpBackendService {
// XXX Perhaps define callback signature in the future
(method: string, url: string, post?: any, callback?: Function, headers?: any, timeout?: number, withCredentials?: boolean): void;
}
///////////////////////////////////////////////////////////////////////////
// InterpolateService
// see http://docs.angularjs.org/api/ng.$interpolate
// see http://docs.angularjs.org/api/ng.$interpolateProvider
///////////////////////////////////////////////////////////////////////////
interface IInterpolateService {
(text: string, mustHaveExpression?: boolean, trustedContext?: string, allOrNothing?: boolean): IInterpolationFunction;
endSymbol(): string;
startSymbol(): string;
}
interface IInterpolationFunction {
(context: any): string;
}
interface IInterpolateProvider extends IServiceProvider {
startSymbol(): string;
startSymbol(value: string): IInterpolateProvider;
endSymbol(): string;
endSymbol(value: string): IInterpolateProvider;
}
///////////////////////////////////////////////////////////////////////////
// TemplateCacheService
// see http://docs.angularjs.org/api/ng.$templateCache
///////////////////////////////////////////////////////////////////////////
interface ITemplateCacheService extends ICacheObject {}
///////////////////////////////////////////////////////////////////////////
// SCEService
// see http://docs.angularjs.org/api/ng.$sce
///////////////////////////////////////////////////////////////////////////
interface ISCEService {
getTrusted(type: string, mayBeTrusted: any): any;
getTrustedCss(value: any): any;
getTrustedHtml(value: any): any;
getTrustedJs(value: any): any;
getTrustedResourceUrl(value: any): any;
getTrustedUrl(value: any): any;
parse(type: string, expression: string): (context: any, locals: any) => any;
parseAsCss(expression: string): (context: any, locals: any) => any;
parseAsHtml(expression: string): (context: any, locals: any) => any;
parseAsJs(expression: string): (context: any, locals: any) => any;
parseAsResourceUrl(expression: string): (context: any, locals: any) => any;
parseAsUrl(expression: string): (context: any, locals: any) => any;
trustAs(type: string, value: any): any;
trustAsHtml(value: any): any;
trustAsJs(value: any): any;
trustAsResourceUrl(value: any): any;
trustAsUrl(value: any): any;
isEnabled(): boolean;
}
///////////////////////////////////////////////////////////////////////////
// SCEProvider
// see http://docs.angularjs.org/api/ng.$sceProvider
///////////////////////////////////////////////////////////////////////////
interface ISCEProvider extends IServiceProvider {
enabled(value: boolean): void;
}
///////////////////////////////////////////////////////////////////////////
// SCEDelegateService
// see http://docs.angularjs.org/api/ng.$sceDelegate
///////////////////////////////////////////////////////////////////////////
interface ISCEDelegateService {
getTrusted(type: string, mayBeTrusted: any): any;
trustAs(type: string, value: any): any;
valueOf(value: any): any;
}
///////////////////////////////////////////////////////////////////////////
// SCEDelegateProvider
// see http://docs.angularjs.org/api/ng.$sceDelegateProvider
///////////////////////////////////////////////////////////////////////////
interface ISCEDelegateProvider extends IServiceProvider {
resourceUrlBlacklist(blacklist: any[]): void;
resourceUrlWhitelist(whitelist: any[]): void;
resourceUrlBlacklist(): any[];
resourceUrlWhitelist(): any[];
}
/**
* $templateRequest service
* see http://docs.angularjs.org/api/ng/service/$templateRequest
*/
interface ITemplateRequestService {
/**
* Downloads a template using $http and, upon success, stores the
* contents inside of $templateCache.
*
* If the HTTP request fails or the response data of the HTTP request is
* empty then a $compile error will be thrown (unless
* {ignoreRequestError} is set to true).
*
* @param tpl The template URL.
* @param ignoreRequestError Whether or not to ignore the exception
* when the request fails or the template is
* empty.
*
* @return A promise whose value is the template content.
*/
(tpl: string, ignoreRequestError?: boolean): IPromise<string>;
/**
* total amount of pending template requests being downloaded.
* @type {number}
*/
totalPendingRequests: number;
}
///////////////////////////////////////////////////////////////////////////
// Component
// see http://angularjs.blogspot.com.br/2015/11/angularjs-15-beta2-and-14-releases.html
// and http://toddmotto.com/exploring-the-angular-1-5-component-method/
///////////////////////////////////////////////////////////////////////////
/**
* Runtime representation a type that a Component or other object is instances of.
*
* An example of a `Type` is `MyCustomComponent` class, which in JavaScript is be represented by
* the `MyCustomComponent` constructor function.
*/
interface Type extends Function {
}
/**
* `RouteDefinition` defines a route within a {@link RouteConfig} decorator.
*
* Supported keys:
* - `path` or `aux` (requires exactly one of these)
* - `component`, `loader`, `redirectTo` (requires exactly one of these)
* - `name` or `as` (optional) (requires exactly one of these)
* - `data` (optional)
*
* See also {@link Route}, {@link AsyncRoute}, {@link AuxRoute}, and {@link Redirect}.
*/
interface RouteDefinition {
path?: string;
aux?: string;
component?: Type | ComponentDefinition | string;
loader?: Function;
redirectTo?: any[];
as?: string;
name?: string;
data?: any;
useAsDefault?: boolean;
}
/**
* Represents either a component type (`type` is `component`) or a loader function
* (`type` is `loader`).
*
* See also {@link RouteDefinition}.
*/
interface ComponentDefinition {
type: string;
loader?: Function;
component?: Type;
}
/**
* Component definition object (a simplified directive definition object)
*/
interface IComponentOptions {
/**
* Controller constructor function that should be associated with newly created scope or the name of a registered
* controller if passed as a string. Empty function by default.
*/
controller?: any;
/**
* An identifier name for a reference to the controller. If present, the controller will be published to scope under
* the controllerAs name. If not present, this will default to be the same as the component name.
*/
controllerAs?: string;
/**
* html template as a string or a function that returns an html template as a string which should be used as the
* contents of this component. Empty string by default.
* If template is a function, then it is injected with the following locals:
* $element - Current element
* $attrs - Current attributes object for the element
*/
template?: string | Function;
/**
* path or function that returns a path to an html template that should be used as the contents of this component.
* If templateUrl is a function, then it is injected with the following locals:
* $element - Current element
* $attrs - Current attributes object for the element
*/
templateUrl?: string | Function;
/**
* Define DOM attribute binding to component properties. Component properties are always bound to the component
* controller and not to the scope.
*/
bindings?: any;
/**
* Whether transclusion is enabled. Enabled by default.
*/
transclude?: boolean;
require? : Object;
$canActivate?: () => boolean;
$routeConfig?: RouteDefinition[];
}
interface IComponentTemplateFn {
( $element?: IAugmentedJQuery, $attrs?: IAttributes ): string;
}
///////////////////////////////////////////////////////////////////////////
// Directive
// see http://docs.angularjs.org/api/ng.$compileProvider#directive
// and http://docs.angularjs.org/guide/directive
///////////////////////////////////////////////////////////////////////////
interface IDirectiveFactory {
(...args: any[]): IDirective;
}
interface IDirectiveLinkFn {
(
scope: IScope,
instanceElement: IAugmentedJQuery,
instanceAttributes: IAttributes,
controller: {},
transclude: ITranscludeFunction
): void;
}
interface IDirectivePrePost {
pre?: IDirectiveLinkFn;
post?: IDirectiveLinkFn;
}
interface IDirectiveCompileFn {
(
templateElement: IAugmentedJQuery,
templateAttributes: IAttributes,
transclude: ITranscludeFunction
): IDirectivePrePost;
}
interface IDirective {
compile?: IDirectiveCompileFn;
controller?: any;
controllerAs?: string;
bindToController?: boolean|Object;
link?: IDirectiveLinkFn | IDirectivePrePost;
name?: string;
priority?: number;
replace?: boolean;
require? : any;
restrict?: string;
scope?: any;
template?: string | Function;
templateNamespace?: string;
templateUrl?: string | Function;
terminal?: boolean;
transclude?: any;
}
/**
* angular.element
* when calling angular.element, angular returns a jQuery object,
* augmented with additional methods like e.g. scope.
* see: http://docs.angularjs.org/api/angular.element
*/
interface IAugmentedJQueryStatic extends JQueryStatic {
(selector: string, context?: any): IAugmentedJQuery;
(element: Element): IAugmentedJQuery;
(object: {}): IAugmentedJQuery;
(elementArray: Element[]): IAugmentedJQuery;
(object: JQuery): IAugmentedJQuery;
(func: Function): IAugmentedJQuery;
(array: any[]): IAugmentedJQuery;
(): IAugmentedJQuery;
}
interface IAugmentedJQuery extends JQuery {
// TODO: events, how to define?
//$destroy
find(selector: string): IAugmentedJQuery;
find(element: any): IAugmentedJQuery;
find(obj: JQuery): IAugmentedJQuery;
controller(): any;
controller(name: string): any;
injector(): any;
scope(): IScope;
isolateScope(): IScope;
inheritedData(key: string, value: any): JQuery;
inheritedData(obj: { [key: string]: any; }): JQuery;
inheritedData(key?: string): any;
}
///////////////////////////////////////////////////////////////////////////
// AUTO module (angular.js)
///////////////////////////////////////////////////////////////////////////
export module auto {
///////////////////////////////////////////////////////////////////////
// InjectorService
// see http://docs.angularjs.org/api/AUTO.$injector
///////////////////////////////////////////////////////////////////////
interface IInjectorService {
annotate(fn: Function, strictDi?: boolean): string[];
annotate(inlineAnnotatedFunction: any[]): string[];
get<T>(name: string, caller?: string): T;
has(name: string): boolean;
instantiate<T>(typeConstructor: Function, locals?: any): T;
invoke(inlineAnnotatedFunction: any[]): any;
invoke(func: Function, context?: any, locals?: any): any;
strictDi: boolean;
}
///////////////////////////////////////////////////////////////////////
// ProvideService
// see http://docs.angularjs.org/api/AUTO.$provide
///////////////////////////////////////////////////////////////////////
interface IProvideService {
// Documentation says it returns the registered instance, but actual
// implementation does not return anything.
// constant(name: string, value: any): any;
/**
* Register a constant service, such as a string, a number, an array, an object or a function, with the $injector. Unlike value it can be injected into a module configuration function (see config) and it cannot be overridden by an Angular decorator.
*
* @param name The name of the constant.
* @param value The constant value.
*/
constant(name: string, value: any): void;
/**
* Register a service decorator with the $injector. A service decorator intercepts the creation of a service, allowing it to override or modify the behaviour of the service. The object returned by the decorator may be the original service, or a new service object which replaces or wraps and delegates to the original service.
*
* @param name The name of the service to decorate.
* @param decorator This function will be invoked when the service needs to be instantiated and should return the decorated service instance. The function is called using the injector.invoke method and is therefore fully injectable. Local injection arguments:
*
* $delegate - The original service instance, which can be monkey patched, configured, decorated or delegated to.
*/
decorator(name: string, decorator: Function): void;
/**
* Register a service decorator with the $injector. A service decorator intercepts the creation of a service, allowing it to override or modify the behaviour of the service. The object returned by the decorator may be the original service, or a new service object which replaces or wraps and delegates to the original service.
*
* @param name The name of the service to decorate.
* @param inlineAnnotatedFunction This function will be invoked when the service needs to be instantiated and should return the decorated service instance. The function is called using the injector.invoke method and is therefore fully injectable. Local injection arguments:
*
* $delegate - The original service instance, which can be monkey patched, configured, decorated or delegated to.
*/
decorator(name: string, inlineAnnotatedFunction: any[]): void;
factory(name: string, serviceFactoryFunction: Function): IServiceProvider;
factory(name: string, inlineAnnotatedFunction: any[]): IServiceProvider;
provider(name: string, provider: IServiceProvider): IServiceProvider;
provider(name: string, serviceProviderConstructor: Function): IServiceProvider;
service(name: string, constructor: Function): IServiceProvider;
service(name: string, inlineAnnotatedFunction: any[]): IServiceProvider;
value(name: string, value: any): IServiceProvider;
}
}
}
| {
"pile_set_name": "Github"
} |
sheet.fx.getRemoteCell = function(sheet, address){
var identifier = $(sheet).attr('data-calx-identifier');
if(!identifier || typeof(calx.sheetRegistry[identifier]) == 'undefined'){
return false;
}
return calx.sheetRegistry[identifier].getCell(address);
};
| {
"pile_set_name": "Github"
} |
import { IBounding } from '../interfaces';
const normalizeCssColor = require('normalize-css-color');
export interface IRGBA { r: number; g: number; b: number; a: number; }
export function safeToLower(input: string | any): string | any {
if (typeof input === 'string') {
return input.toLowerCase();
}
return input;
}
/**
* Rounds a number to a float with default 4 digits after the comma
*
* @param num number
* @param round number -> Digits after the comma
*/
export function round(num: number, round: number = 4) {
return parseFloat(num.toFixed(round));
}
/**
* Check if the values of an array are equal
*
* @param array any
*/
export function arrayContentEquals(array: any[]) {
if (!array.length) {
return true;
}
return !array.filter((element) => {
return element !== array[0];
}).length;
}
export function boundingClientRectToBounding(bcr: ClientRect | DOMRect): IBounding {
return {
height: Math.round(bcr.height),
width: Math.round(bcr.width),
x: Math.round(bcr.left),
y: Math.round(bcr.top),
};
}
export function cssToRGBA(input: string | any): IRGBA {
const nullableColor = normalizeCssColor(safeToLower(input));
const colorInt = nullableColor === null ? 0x00000000 : nullableColor;
return normalizeCssColor.rgba(colorInt);
}
export function parseBorderRadius(borderRadius, width, height) {
const matches = borderRadius.match(/^([0-9.]+)(.+)$/);
// Sketch uses 'px' units for border radius, so we need to convert % to px
if (matches && matches[2] === '%') {
const baseVal = Math.max(width, height);
const percentageApplied = baseVal * (parseInt(matches[1], 10) / 100);
return Math.round(percentageApplied);
}
return parseInt(borderRadius, 10);
}
export function calcPadding(padding: string, bcr: IBounding): IBounding {
const spaces = padding.split(' ');
const bounding = { ...bcr, x: 0, y: 0 };
switch (spaces.length) {
case 1:
bounding.x = parseInt(spaces[0], 10);
bounding.y = parseInt(spaces[0], 10);
bounding.height -= parseInt(spaces[0], 10) * 2;
bounding.width -= parseInt(spaces[0], 10) * 2; break;
case 2:
bounding.x = parseInt(spaces[1], 10);
bounding.y = parseInt(spaces[0], 10);
bounding.height -= parseInt(spaces[0], 10) * 2;
bounding.width -= parseInt(spaces[1], 10) * 2; break;
case 3:
bounding.x = parseInt(spaces[1], 10);
bounding.y = parseInt(spaces[0], 10);
bounding.height -= parseInt(spaces[0], 10) + parseInt(spaces[2], 10);
bounding.width -= parseInt(spaces[1], 10) * 2; break;
case 4:
bounding.x = parseInt(spaces[3], 10);
bounding.y = parseInt(spaces[0], 10);
bounding.height -= parseInt(spaces[0], 10) + parseInt(spaces[2], 10);
bounding.width -= parseInt(spaces[1], 10) + parseInt(spaces[3], 10); break;
}
return bounding;
}
/**
* Compares two maps if the key value pares are equal
*
* @param map1 Map<any, any>
* @param map2 Map<any, any>
* @returns boolean
*/
export function compareMaps(map1: Map<any, any>, map2: Map<any, any>): boolean {
if (map1.size !== map2.size) {
return false;
}
let testVal;
for (const [key, val] of map1) {
testVal = map2.get(key);
// in cases of an undefined value, make sure the key
// actually exists on the object so there are no false positives
if (testVal !== val || (testVal === undefined && !map2.has(key))) {
return false;
}
}
return true;
}
| {
"pile_set_name": "Github"
} |
// This file is part of Eigen, a lightweight C++ template library
// for linear algebra.
//
// Copyright (C) 2006-2008 Benoit Jacob <[email protected]>
//
// This Source Code Form is subject to the terms of the Mozilla
// Public License v. 2.0. If a copy of the MPL was not distributed
// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
#ifndef EIGEN_SWAP_H
#define EIGEN_SWAP_H
namespace Eigen {
/** \class SwapWrapper
* \ingroup Core_Module
*
* \internal
*
* \brief Internal helper class for swapping two expressions
*/
namespace internal {
template<typename ExpressionType>
struct traits<SwapWrapper<ExpressionType> > : traits<ExpressionType> {};
}
template<typename ExpressionType> class SwapWrapper
: public internal::dense_xpr_base<SwapWrapper<ExpressionType> >::type
{
public:
typedef typename internal::dense_xpr_base<SwapWrapper>::type Base;
EIGEN_DENSE_PUBLIC_INTERFACE(SwapWrapper)
typedef typename internal::packet_traits<Scalar>::type Packet;
inline SwapWrapper(ExpressionType& xpr) : m_expression(xpr) {}
inline Index rows() const { return m_expression.rows(); }
inline Index cols() const { return m_expression.cols(); }
inline Index outerStride() const { return m_expression.outerStride(); }
inline Index innerStride() const { return m_expression.innerStride(); }
typedef typename internal::conditional<
internal::is_lvalue<ExpressionType>::value,
Scalar,
const Scalar
>::type ScalarWithConstIfNotLvalue;
inline ScalarWithConstIfNotLvalue* data() { return m_expression.data(); }
inline const Scalar* data() const { return m_expression.data(); }
inline Scalar& coeffRef(Index rowId, Index colId)
{
return m_expression.const_cast_derived().coeffRef(rowId, colId);
}
inline Scalar& coeffRef(Index index)
{
return m_expression.const_cast_derived().coeffRef(index);
}
inline Scalar& coeffRef(Index rowId, Index colId) const
{
return m_expression.coeffRef(rowId, colId);
}
inline Scalar& coeffRef(Index index) const
{
return m_expression.coeffRef(index);
}
template<typename OtherDerived>
void copyCoeff(Index rowId, Index colId, const DenseBase<OtherDerived>& other)
{
OtherDerived& _other = other.const_cast_derived();
eigen_internal_assert(rowId >= 0 && rowId < rows()
&& colId >= 0 && colId < cols());
Scalar tmp = m_expression.coeff(rowId, colId);
m_expression.coeffRef(rowId, colId) = _other.coeff(rowId, colId);
_other.coeffRef(rowId, colId) = tmp;
}
template<typename OtherDerived>
void copyCoeff(Index index, const DenseBase<OtherDerived>& other)
{
OtherDerived& _other = other.const_cast_derived();
eigen_internal_assert(index >= 0 && index < m_expression.size());
Scalar tmp = m_expression.coeff(index);
m_expression.coeffRef(index) = _other.coeff(index);
_other.coeffRef(index) = tmp;
}
template<typename OtherDerived, int StoreMode, int LoadMode>
void copyPacket(Index rowId, Index colId, const DenseBase<OtherDerived>& other)
{
OtherDerived& _other = other.const_cast_derived();
eigen_internal_assert(rowId >= 0 && rowId < rows()
&& colId >= 0 && colId < cols());
Packet tmp = m_expression.template packet<StoreMode>(rowId, colId);
m_expression.template writePacket<StoreMode>(rowId, colId,
_other.template packet<LoadMode>(rowId, colId)
);
_other.template writePacket<LoadMode>(rowId, colId, tmp);
}
template<typename OtherDerived, int StoreMode, int LoadMode>
void copyPacket(Index index, const DenseBase<OtherDerived>& other)
{
OtherDerived& _other = other.const_cast_derived();
eigen_internal_assert(index >= 0 && index < m_expression.size());
Packet tmp = m_expression.template packet<StoreMode>(index);
m_expression.template writePacket<StoreMode>(index,
_other.template packet<LoadMode>(index)
);
_other.template writePacket<LoadMode>(index, tmp);
}
ExpressionType& expression() const { return m_expression; }
protected:
ExpressionType& m_expression;
};
} // end namespace Eigen
#endif // EIGEN_SWAP_H
| {
"pile_set_name": "Github"
} |
//
// NSTimer+YYAdd.h
// YYCategories <https://github.com/ibireme/YYCategories>
//
// Created by ibireme on 14/15/11.
// Copyright (c) 2015 ibireme.
//
// This source code is licensed under the MIT-style license found in the
// LICENSE file in the root directory of this source tree.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
/**
Provides extensions for `NSTimer`.
*/
@interface NSTimer (YYAdd)
/**
Creates and returns a new NSTimer object and schedules it on the current run
loop in the default mode.
@discussion After seconds seconds have elapsed, the timer fires,
sending the message aSelector to target.
@param seconds The number of seconds between firings of the timer. If seconds
is less than or equal to 0.0, this method chooses the
nonnegative value of 0.1 milliseconds instead.
@param block The block to invoke when the timer fires. The timer maintains
a strong reference to the block until it (the timer) is invalidated.
@param repeats If YES, the timer will repeatedly reschedule itself until
invalidated. If NO, the timer will be invalidated after it fires.
@return A new NSTimer object, configured according to the specified parameters.
*/
+ (NSTimer *)scheduledTimerWithTimeInterval:(NSTimeInterval)seconds block:(void (^)(NSTimer *timer))block repeats:(BOOL)repeats;
/**
Creates and returns a new NSTimer object initialized with the specified block.
@discussion You must add the new timer to a run loop, using addTimer:forMode:.
Then, after seconds have elapsed, the timer fires, invoking
block. (If the timer is configured to repeat, there is no need
to subsequently re-add the timer to the run loop.)
@param seconds The number of seconds between firings of the timer. If seconds
is less than or equal to 0.0, this method chooses the
nonnegative value of 0.1 milliseconds instead.
@param block The block to invoke when the timer fires. The timer instructs
the block to maintain a strong reference to its arguments.
@param repeats If YES, the timer will repeatedly reschedule itself until
invalidated. If NO, the timer will be invalidated after it fires.
@return A new NSTimer object, configured according to the specified parameters.
*/
+ (NSTimer *)timerWithTimeInterval:(NSTimeInterval)seconds block:(void (^)(NSTimer *timer))block repeats:(BOOL)repeats;
@end
NS_ASSUME_NONNULL_END
| {
"pile_set_name": "Github"
} |
GNU GENERAL PUBLIC LICENSE
Version 2, June 1991
Copyright (C) 1989, 1991 Free Software Foundation, Inc.
59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public
License is intended to guarantee your freedom to share and change free
software--to make sure the software is free for all its users. This
General Public License applies to most of the Free Software
Foundation's software and to any other program whose authors commit to
using it. (Some other Free Software Foundation software is covered by
the GNU Library General Public License instead.) You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
this service if you wish), that you receive source code or can get it
if you want it, that you can change the software or use pieces of it
in new free programs; and that you know you can do these things.
To protect your rights, we need to make restrictions that forbid
anyone to deny you these rights or to ask you to surrender the rights.
These restrictions translate to certain responsibilities for you if you
distribute copies of the software, or if you modify it.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must give the recipients all the rights that
you have. You must make sure that they, too, receive or can get the
source code. And you must show them these terms so they know their
rights.
We protect your rights with two steps: (1) copyright the software, and
(2) offer you this license which gives you legal permission to copy,
distribute and/or modify the software.
Also, for each author's protection and ours, we want to make certain
that everyone understands that there is no warranty for this free
software. If the software is modified by someone else and passed on, we
want its recipients to know that what they have is not the original, so
that any problems introduced by others will not reflect on the original
authors' reputations.
Finally, any free program is threatened constantly by software
patents. We wish to avoid the danger that redistributors of a free
program will individually obtain patent licenses, in effect making the
program proprietary. To prevent this, we have made it clear that any
patent must be licensed for everyone's free use or not licensed at all.
The precise terms and conditions for copying, distribution and
modification follow.
GNU GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License applies to any program or other work which contains
a notice placed by the copyright holder saying it may be distributed
under the terms of this General Public License. The "Program", below,
refers to any such program or work, and a "work based on the Program"
means either the Program or any derivative work under copyright law:
that is to say, a work containing the Program or a portion of it,
either verbatim or with modifications and/or translated into another
language. (Hereinafter, translation is included without limitation in
the term "modification".) Each licensee is addressed as "you".
Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running the Program is not restricted, and the output from the Program
is covered only if its contents constitute a work based on the
Program (independent of having been made by running the Program).
Whether that is true depends on what the Program does.
1. You may copy and distribute verbatim copies of the Program's
source code as you receive it, in any medium, provided that you
conspicuously and appropriately publish on each copy an appropriate
copyright notice and disclaimer of warranty; keep intact all the
notices that refer to this License and to the absence of any warranty;
and give any other recipients of the Program a copy of this License
along with the Program.
You may charge a fee for the physical act of transferring a copy, and
you may at your option offer warranty protection in exchange for a fee.
2. You may modify your copy or copies of the Program or any portion
of it, thus forming a work based on the Program, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
a) You must cause the modified files to carry prominent notices
stating that you changed the files and the date of any change.
b) You must cause any work that you distribute or publish, that in
whole or in part contains or is derived from the Program or any
part thereof, to be licensed as a whole at no charge to all third
parties under the terms of this License.
c) If the modified program normally reads commands interactively
when run, you must cause it, when started running for such
interactive use in the most ordinary way, to print or display an
announcement including an appropriate copyright notice and a
notice that there is no warranty (or else, saying that you provide
a warranty) and that users may redistribute the program under
these conditions, and telling the user how to view a copy of this
License. (Exception: if the Program itself is interactive but
does not normally print such an announcement, your work based on
the Program is not required to print an announcement.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Program,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Program, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote it.
Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Program.
In addition, mere aggregation of another work not based on the Program
with the Program (or with a work based on the Program) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.
3. You may copy and distribute the Program (or a work based on it,
under Section 2) in object code or executable form under the terms of
Sections 1 and 2 above provided that you also do one of the following:
a) Accompany it with the complete corresponding machine-readable
source code, which must be distributed under the terms of Sections
1 and 2 above on a medium customarily used for software interchange; or,
b) Accompany it with a written offer, valid for at least three
years, to give any third party, for a charge no more than your
cost of physically performing source distribution, a complete
machine-readable copy of the corresponding source code, to be
distributed under the terms of Sections 1 and 2 above on a medium
customarily used for software interchange; or,
c) Accompany it with the information you received as to the offer
to distribute corresponding source code. (This alternative is
allowed only for noncommercial distribution and only if you
received the program in object code or executable form with such
an offer, in accord with Subsection b above.)
The source code for a work means the preferred form of the work for
making modifications to it. For an executable work, complete source
code means all the source code for all modules it contains, plus any
associated interface definition files, plus the scripts used to
control compilation and installation of the executable. However, as a
special exception, the source code distributed need not include
anything that is normally distributed (in either source or binary
form) with the major components (compiler, kernel, and so on) of the
operating system on which the executable runs, unless that component
itself accompanies the executable.
If distribution of executable or object code is made by offering
access to copy from a designated place, then offering equivalent
access to copy the source code from the same place counts as
distribution of the source code, even though third parties are not
compelled to copy the source along with the object code.
4. You may not copy, modify, sublicense, or distribute the Program
except as expressly provided under this License. Any attempt
otherwise to copy, modify, sublicense or distribute the Program is
void, and will automatically terminate your rights under this License.
However, parties who have received copies, or rights, from you under
this License will not have their licenses terminated so long as such
parties remain in full compliance.
5. You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Program or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Program (or any work based on the
Program), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Program or works based on it.
6. Each time you redistribute the Program (or any work based on the
Program), the recipient automatically receives a license from the
original licensor to copy, distribute or modify the Program subject to
these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties to
this License.
7. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Program at all. For example, if a patent
license would not permit royalty-free redistribution of the Program by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Program.
If any portion of this section is held invalid or unenforceable under
any particular circumstance, the balance of the section is intended to
apply and the section as a whole is intended to apply in other
circumstances.
It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system, which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.
This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.
8. If the distribution and/or use of the Program is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Program under this License
may add an explicit geographical distribution limitation excluding
those countries, so that distribution is permitted only in or among
countries not thus excluded. In such case, this License incorporates
the limitation as if written in the body of this License.
9. The Free Software Foundation may publish revised and/or new versions
of the General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the Program
specifies a version number of this License which applies to it and "any
later version", you have the option of following the terms and conditions
either of that version or of any later version published by the Free
Software Foundation. If the Program does not specify a version number of
this License, you may choose any version ever published by the Free Software
Foundation.
10. If you wish to incorporate parts of the Program into other free
programs whose distribution conditions are different, write to the author
to ask for permission. For software which is copyrighted by the Free
Software Foundation, write to the Free Software Foundation; we sometimes
make exceptions for this. Our decision will be guided by the two goals
of preserving the free status of all derivatives of our free software and
of promoting the sharing and reuse of software generally.
NO WARRANTY
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
REPAIR OR CORRECTION.
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGES.
END OF TERMS AND CONDITIONS
------------
PCRE LICENCE
------------
PCRE is a library of functions to support regular expressions whose syntax
and semantics are as close as possible to those of the Perl 5 language.
Release 6 of PCRE is distributed under the terms of the "BSD" licence, as
specified below. The documentation for PCRE, supplied in the "doc"
directory, is distributed under the same terms as the software itself.
The basic library functions are written in C and are freestanding. Also
included in the distribution is a set of C++ wrapper functions.
THE BASIC LIBRARY FUNCTIONS
---------------------------
Written by: Philip Hazel
Email local part: ph10
Email domain: cam.ac.uk
University of Cambridge Computing Service,
Cambridge, England. Phone: +44 1223 334714.
Copyright (c) 1997-2006 University of Cambridge
All rights reserved.
THE C++ WRAPPER FUNCTIONS
-------------------------
Contributed by: Google Inc.
Copyright (c) 2006, Google Inc.
All rights reserved.
THE "BSD" LICENCE
-----------------
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the University of Cambridge nor the name of Google
Inc. nor the names of their contributors may be used to endorse or
promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
End
| {
"pile_set_name": "Github"
} |
/* SPDX-License-Identifier: GPL-2.0 WITH Linux-syscall-note */
#ifndef _LINUX_KDEV_T_H
#define _LINUX_KDEV_T_H
/*
Some programs want their definitions of MAJOR and MINOR and MKDEV
from the kernel sources. These must be the externally visible ones.
*/
#define MAJOR(dev) ((dev)>>8)
#define MINOR(dev) ((dev) & 0xff)
#define MKDEV(ma,mi) ((ma)<<8 | (mi))
#endif /* _LINUX_KDEV_T_H */
| {
"pile_set_name": "Github"
} |
// Copyright (C) 2016 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.acceptance.api.config;
import static com.google.common.truth.Truth.assertWithMessage;
import static com.google.gerrit.acceptance.AssertUtil.assertPrefs;
import com.google.gerrit.acceptance.AbstractDaemonTest;
import com.google.gerrit.acceptance.NoHttpd;
import com.google.gerrit.extensions.client.DiffPreferencesInfo;
import org.junit.Test;
@NoHttpd
public class DiffPreferencesIT extends AbstractDaemonTest {
@Test
public void getDiffPreferences() throws Exception {
DiffPreferencesInfo result = gApi.config().server().getDefaultDiffPreferences();
assertPrefs(result, DiffPreferencesInfo.defaults());
}
@Test
public void setDiffPreferences() throws Exception {
int newLineLength = DiffPreferencesInfo.defaults().lineLength + 10;
DiffPreferencesInfo update = new DiffPreferencesInfo();
update.lineLength = newLineLength;
DiffPreferencesInfo result = gApi.config().server().setDefaultDiffPreferences(update);
assertWithMessage("lineLength").that(result.lineLength).isEqualTo(newLineLength);
result = gApi.config().server().getDefaultDiffPreferences();
DiffPreferencesInfo expected = DiffPreferencesInfo.defaults();
expected.lineLength = newLineLength;
assertPrefs(result, expected);
}
}
| {
"pile_set_name": "Github"
} |
"""A file that wraps c++ functions used in kernels submodule of grakel"""
# Author: Ioannis Siglidis <[email protected]>
# License: BSD 3 clause"
import numpy as np
import cython
from functools import reduce as freduce
from itertools import combinations
from collections import defaultdict
from numpy import floor, sqrt
cimport numpy as np
from libc.string cimport const_char
from libc.stdlib cimport malloc, free
from grakel.kernels._c_functions.header cimport ArashPartov, sm_core_init
def APHash(word):
"""C++ wrapped implementation of Arash Partov Hashing."""
bs = word.encode('UTF-8')
cdef int length = len(word);
cdef const_char* string = bs;
return ArashPartov(string, length)
def sm_kernel(x, y, kv, ke, k):
"""Calculate the weighted product graph and calculate the sm_kernel.
For a definition of the weighted product graph see
:cite:`Kriege2012SubgraphMK` (p.5, Definition 5).
Parameters
----------
x, y : tuples, size=4
A tuple corresponding to the number of verices,
the edge dictionarie starting from vertices of
index zero, the labels for nodes, the labels for edges.
kv : function
A kernel for vertex labels.
ke : function
A kernel for edge labels.
k : int
The upper bound for the maximum size of subgraphs.
Returns
-------
tv_np : np.array
An array holding values for all clique sizes from 0 to k.
"""
nx, Ex, Lx, Lex = x
ny, Ey, Ly, Ley = y
# Costs for vertices
cv_l = list()
if kv is not None:
# Calculate valid vertices
Vp = list()
# calculate product graph vertex set
nv = 0
for i in range(nx):
for j in range(ny):
value = kv(Lx[i], Ly[j])
if(value != .0):
# add to vertex set
Vp.append((i, j))
# initialise an empty set for neighbors
cv_l.append(value)
nv += 1
else:
Vp = [(i, j) for i in range(nx) for j in range(ny)]
nv = nx*ny
cv_l = iter(1. for _ in range(nv))
# Initialise c arrays.
cdef int *enum
cdef double *cv
cdef double **ce
enum = <int *>malloc(nv*cython.sizeof(int))
ce = <double **>malloc(nv*cython.sizeof(cython.p_double))
cv = <double *>malloc(nv*cython.sizeof(cython.double))
for (i, v) in enumerate(cv_l):
enum[i] = i
cv[i] = v
ce[i] = <double *>malloc(nv*cython.sizeof(cython.double))
with cython.boundscheck(False):
# calculate product graph valid edges
if ke is not None:
for (i, v) in enumerate(Vp):
for (j, w) in enumerate(Vp):
if i == j:
ce[j][i] = .0
break
if v[0] == w[0] or v[1] == w[1]:
value = .0
else:
ea, eb = (v[0], w[0]), (v[1], w[1])
conda, condb = ea not in Ex, eb not in Ey
if conda and condb:
# d-edge
value = -1.
elif conda or condb:
value = .0
else:
# possible c-edge
try:
lea = Lex[ea]
leb = Ley[eb]
except KeyError as key_error:
raise KeyError(key_error +
'\nEdge labels must exist for '
'all edges.')
value = ke(lea, leb)
ce[j][i] = ce[i][j] = value
else:
for (i, v) in enumerate(Vp):
for (j, w) in enumerate(Vp):
if i == j:
ce[j][i] = .0
break
if v[0] == w[0] or v[1] == w[1]:
value = .0
else:
ea, eb = (v[0], w[0]), (v[1], w[1])
conda, condb = ea not in Ex, eb not in Ey
if conda and condb:
# d-edge
value = -1.
elif conda or condb:
value = .0
else:
value = 1.
ce[j][i] = ce[i][j] = value
# Initialize values
cdef np.ndarray[double, ndim=1] tv_np = np.zeros(shape=(k + 1))
cdef double *tv = &tv_np[0]
try:
# Run the core function
sm_core_init(1, enum, nv, k, cv, ce, tv)
tv_np.reshape((k+1, 1))
return tv_np
finally:
# Deallocate memory
free(enum)
free(cv)
for i in range(nv):
free(ce[i])
free(ce)
def k_to_ij_triangular(k, dim):
i = int(dim - 1 - floor(sqrt(-8*k + 4*(dim+1)*dim-7)/2.0 - 0.5))
j = int(k + i - (dim+1)*(dim)//2 + (dim-i+1)*(dim-i)//2)
return (i, j)
def k_to_ij_rectangular(k, dim):
i = k % dim
j = k // dim
return (i, j)
# ConSubg from:
# Karakashian, Shant Kirakos et al. โAn Algorithm for Generating All Connected Subgraphs with k Vertices of a Graph.โ (2013).
def ConSubg(G, k, symmetric):
# G: dict of sets
l = set()
if symmetric:
sG = G
for u in G.keys():
l |= CombinationsWithV(u, k, sG)
sGP = dict()
for v in sG.keys():
if u != v:
sGP[v] = sG[v] - {u}
sG = sGP
else:
for u in G.keys():
l |= CombinationsWithV(u, k, G)
return l
def CombinationsWithV(u, k, G_init):
l = list()
tree = defaultdict(set)
treeL = {0: u}
MarkN = dict()
def CombinationTree(u, k, G):
root = u
l = [set() for i in range(k)]
l[0].add(u)
MarkV = dict()
def BuildTree(nt, depth, k):
# globals l, MarkN, MarkV, tree
l[depth] = set(l[depth-1])
for v in G[treeL[nt]]:
if v != nt and v not in l[depth]:
ntp = len(treeL)
treeL[ntp] = v
tree[nt].add(ntp)
l[depth].add(v)
if not MarkV.get(v, False):
MarkN[ntp], MarkV[v] = True, True
else:
MarkN[ntp] = False
if depth + 1 <= k-1:
BuildTree(ntp, depth + 1, k)
BuildTree(0, 1, k)
def unionProduct(S1, S2):
# globals tree, MarkN
# print("To compare", S1, S2)
if not len(S1):
return set()
elif not len(S2):
return {S1}
else:
return {s1 | s2 for s1 in S1 for s2 in S2 for s1p, s2p in [({treeL[i] for i in s1}, {treeL[i] for i in s2})] if not len(s1p & {treeL[i] for i in s2}) and (any(MarkN[j] for j in s2) or all(not len({treeL[j] for j in tree[i]} & s2p) for i in s1))}
# Memoization
CFM = dict()
def CombinationsFromTree(root, k):
# Globals tree
t = root
lnodesets = set()
if k == 1:
return {frozenset({t})}
for i in range(1, min(len(tree[t]), k - 1) + 1):
for NodeComb in combinations(tree[t], i):
for string in compositions(k - 1, i):
fail = False
S = list()
for pos in range(i):
stRoot = NodeComb[pos]
size = string[pos]
m = CFM.get((stRoot, size), None)
if m is None:
m = CFM[stRoot, size] = CombinationsFromTree(stRoot, size)
S.append(m)
if not len(S[-1]):
fail = True
break
if fail:
continue
for combProduct in freduce(unionProduct, S):
lnodesets.add(frozenset(combProduct | {t}))
return lnodesets
CombinationTree(u, k, G_init)
return {frozenset({treeL[f] for f in fs}) for fs in CombinationsFromTree(0, k)}
def compositions(n, k):
if n < 0 or k < 0:
return
elif k == 0:
if n == 0:
yield []
return
elif k == 1:
yield [n]
return
else:
for i in range(1, n):
for comp in compositions(n-i, k-1):
yield [i] + comp
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.