text
stringlengths 2
100k
| meta
dict |
---|---|
#ifndef BOOST_SMART_PTR_DETAIL_SP_COUNTED_BASE_VACPP_PPC_HPP_INCLUDED
#define BOOST_SMART_PTR_DETAIL_SP_COUNTED_BASE_VACPP_PPC_HPP_INCLUDED
//
// detail/sp_counted_base_vacpp_ppc.hpp - xlC(vacpp) on POWER
// based on: detail/sp_counted_base_w32.hpp
//
// Copyright (c) 2001, 2002, 2003 Peter Dimov and Multi Media Ltd.
// Copyright 2004-2005 Peter Dimov
// Copyright 2006 Michael van der Westhuizen
// Copyright 2012 IBM Corp.
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
//
// Lock-free algorithm by Alexander Terekhov
//
// Thanks to Ben Hitchings for the #weak + (#shared != 0)
// formulation
//
#include <boost/detail/sp_typeinfo.hpp>
extern "builtin" void __lwsync(void);
extern "builtin" void __isync(void);
extern "builtin" int __fetch_and_add(volatile int* addr, int val);
extern "builtin" int __compare_and_swap(volatile int*, int*, int);
namespace boost
{
namespace detail
{
inline void atomic_increment( int *pw )
{
// ++*pw;
__lwsync();
__fetch_and_add(pw, 1);
__isync();
}
inline int atomic_decrement( int *pw )
{
// return --*pw;
__lwsync();
int originalValue = __fetch_and_add(pw, -1);
__isync();
return (originalValue - 1);
}
inline int atomic_conditional_increment( int *pw )
{
// if( *pw != 0 ) ++*pw;
// return *pw;
__lwsync();
int v = *const_cast<volatile int*>(pw);
for (;;)
// loop until state is known
{
if (v == 0) return 0;
if (__compare_and_swap(pw, &v, v + 1))
{
__isync(); return (v + 1);
}
}
}
class sp_counted_base
{
private:
sp_counted_base( sp_counted_base const & );
sp_counted_base & operator= ( sp_counted_base const & );
int use_count_; // #shared
int weak_count_; // #weak + (#shared != 0)
char pad[64] __attribute__((__aligned__(64)));
// pad to prevent false sharing
public:
sp_counted_base(): use_count_( 1 ), weak_count_( 1 )
{
}
virtual ~sp_counted_base() // nothrow
{
}
// dispose() is called when use_count_ drops to zero, to release
// the resources managed by *this.
virtual void dispose() = 0; // nothrow
// destroy() is called when weak_count_ drops to zero.
virtual void destroy() // nothrow
{
delete this;
}
virtual void * get_deleter( sp_typeinfo const & ti ) = 0;
virtual void * get_untyped_deleter() = 0;
void add_ref_copy()
{
atomic_increment( &use_count_ );
}
bool add_ref_lock() // true on success
{
return atomic_conditional_increment( &use_count_ ) != 0;
}
void release() // nothrow
{
if( atomic_decrement( &use_count_ ) == 0 )
{
dispose();
weak_release();
}
}
void weak_add_ref() // nothrow
{
atomic_increment( &weak_count_ );
}
void weak_release() // nothrow
{
if( atomic_decrement( &weak_count_ ) == 0 )
{
destroy();
}
}
long use_count() const // nothrow
{
return *const_cast<volatile int*>(&use_count_);
}
};
} // namespace detail
} // namespace boost
#endif // #ifndef BOOST_SMART_PTR_DETAIL_SP_COUNTED_BASE_VACPP_PPC_HPP_INCLUDED
| {
"pile_set_name": "Github"
} |
/*-
* #%L
* JSQLParser library
* %%
* Copyright (C) 2004 - 2020 JSQLParser
* %%
* Dual licensed under GNU LGPL 2.1 or Apache License 2.0
* #L%
*/
package net.sf.jsqlparser.util;
import java.lang.reflect.Array;
import java.lang.reflect.InvocationTargetException;
import java.sql.Date;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.lang3.RandomStringUtils;
/**
* An utility to get a random value for any type given - see
* {@link #getRandomValueForType(Class)}
*
* @author gitmotte
* @see #pushObjects(List)
*/
public class RandomUtils {
private static final Logger LOG = Logger.getLogger(RandomUtils.class.getName());
private static final Random RANDOM = new Random();
private static final ThreadLocal<Map<Class<?>, Object>> OBJECTS = new ThreadLocal<>();
/**
* register models (for use within method {@link #getRandomValueForType(Class)}
*/
public static void pushObjects(List<Object> obj) {
Map<Class<?>, Object> m = new HashMap<>();
OBJECTS.set(m);
obj.stream().forEach(o -> {
m.put(o.getClass(), o);
for (Class<?> iface : o.getClass().getInterfaces()) {
// register object with its implemented interfaces
// if we need an object for interface requested, an instance is available
m.put(iface, o);
}
Class<?> cls = o.getClass();
while ((cls = cls.getSuperclass()) != null) {
if (!Object.class.equals(cls)) {
// register object with its parent classes
// if we need an object for parent class requested, an instance is available
m.put(cls, o);
}
}
});
}
/**
* @param <T>
* @param type
* @return a random non-<code>null</code> value for given type or
* <code>null</code> if not supported.
*/
public static <T> T getRandomValueForType(Class<T> type) {
Object value = null;
if (Integer.class.equals(type) || int.class.equals(type)) {
value = RandomUtils.RANDOM.nextInt();
} else if (Long.class.equals(type) || long.class.equals(type)) {
value = RandomUtils.RANDOM.nextLong();
} else if (Boolean.class.equals(type) || boolean.class.equals(type)) {
value = RandomUtils.RANDOM.nextBoolean();
} else if (Float.class.equals(type) || float.class.equals(type)) {
value = RandomUtils.RANDOM.nextFloat();
} else if (Double.class.equals(type) || double.class.equals(type)) {
value = RandomUtils.RANDOM.nextDouble();
} else if (Byte.class.equals(type) || byte.class.equals(type)) {
byte[] b = new byte[1];
RandomUtils.RANDOM.nextBytes(b);
value = b[0];
} else if (Short.class.equals(type) || short.class.equals(type)) {
value = (short) RandomUtils.RANDOM.nextInt(15);
} else if (char.class.equals(type)) {
value = RandomStringUtils.random(1).toCharArray()[0];
} else if (Time.class.equals(type)) {
value = new Time(Math.abs(RandomUtils.RANDOM.nextLong()));
} else if (Timestamp.class.equals(type)) {
value = new Timestamp(Math.abs(RandomUtils.RANDOM.nextLong()));
} else if (Date.class.equals(type)) {
value = new Date(Math.abs(RandomUtils.RANDOM.nextLong()));
} else {
int size = RandomUtils.RANDOM.nextInt(10);
if (String.class.equals(type)) {
value = RandomStringUtils.random(size);
} else if (Collection.class.equals(type) || List.class.equals(type)) {
List<Object> c = new ArrayList<>();
value = c;
} else if (Set.class.equals(type)) {
Set<Object> c = new HashSet<>();
value = c;
} else if (type.isArray()) {
Object [] a = (Object[]) Array.newInstance(type.getComponentType(), size);
for (int i = 0; i < size; i++) {
a[i] = getRandomValueForType(type.getComponentType());
}
value = a;
} else if (Map.class.equals(type)) {
Map<Object, Object> c = new HashMap<>();
value = c;
} else if (LocalDateTime.class.equals(type)) {
value = LocalDateTime.now();
} else {
// try to get an object from test-objects
value = OBJECTS.get().get(type);
if (value == null) {
if (type.isEnum()) {
@SuppressWarnings("unchecked")
EnumSet<?> enums = EnumSet.allOf(type.asSubclass(Enum.class));
value = new ArrayList<>(enums).get(RandomUtils.RANDOM.nextInt(enums.size()));
} else {
try {
value = type.getConstructor().newInstance();
} catch (InstantiationException | IllegalAccessException | IllegalArgumentException
| InvocationTargetException | NoSuchMethodException | SecurityException e) {
// cannot get default instance with empty constructor
LOG.log(Level.WARNING, "cannot get default instance with reflection for type " + type);
}
}
}
}
}
return type.cast(value);
}
}
| {
"pile_set_name": "Github"
} |
{{- if .Values.ingress.enabled -}}
{{- $fullName := include "openiban.fullname" . -}}
{{- $servicePort := .Values.service.port -}}
{{- $ingressPath := .Values.ingress.path -}}
apiVersion: extensions/v1beta1
kind: Ingress
metadata:
name: {{ $fullName }}
labels:
app: {{ template "openiban.name" . }}
chart: {{ template "openiban.chart" . }}
release: {{ .Release.Name }}
heritage: {{ .Release.Service }}
{{- with .Values.ingress.annotations }}
annotations:
{{ toYaml . | indent 4 }}
{{- end }}
spec:
{{- if .Values.ingress.tls }}
tls:
{{- range .Values.ingress.tls }}
- hosts:
{{- range .hosts }}
- {{ . }}
{{- end }}
secretName: {{ .secretName }}
{{- end }}
{{- end }}
rules:
{{- range .Values.ingress.hosts }}
- host: {{ . }}
http:
paths:
- path: {{ $ingressPath }}
backend:
serviceName: {{ $fullName }}
servicePort: http
{{- end }}
{{- end }}
| {
"pile_set_name": "Github"
} |
/*!
* dependencyLibs/inputmask.dependencyLib.jqlite.js
* https://github.com/RobinHerbots/Inputmask
* Copyright (c) 2010 - 2017 Robin Herbots
* Licensed under the MIT license (http://www.opensource.org/licenses/mit-license.php)
* Version: 3.3.11
*/
!function(factory) {
"function" == typeof define && define.amd ? define([ "jqlite", "../global/window", "../global/document]" ], factory) : "object" == typeof exports ? module.exports = factory(require("jqlite"), require("../global/window"), require("../global/document")) : window.dependencyLib = factory(jqlite, window, document);
}(function($, window, document) {
function indexOf(list, elem) {
for (var i = 0, len = list.length; i < len; i++) if (list[i] === elem) return i;
return -1;
}
function type(obj) {
return null == obj ? obj + "" : "object" == typeof obj || "function" == typeof obj ? class2type[class2type.toString.call(obj)] || "object" : typeof obj;
}
function isWindow(obj) {
return null != obj && obj === obj.window;
}
function isArraylike(obj) {
var length = "length" in obj && obj.length, ltype = type(obj);
return "function" !== ltype && !isWindow(obj) && (!(1 !== obj.nodeType || !length) || ("array" === ltype || 0 === length || "number" == typeof length && length > 0 && length - 1 in obj));
}
for (var class2type = {}, classTypes = "Boolean Number String Function Array Date RegExp Object Error".split(" "), nameNdx = 0; nameNdx < classTypes.length; nameNdx++) class2type["[object " + classTypes[nameNdx] + "]"] = classTypes[nameNdx].toLowerCase();
return $.inArray = function(elem, arr, i) {
return null == arr ? -1 : indexOf(arr, elem);
}, $.isFunction = function(obj) {
return "function" === type(obj);
}, $.isArray = Array.isArray, $.isPlainObject = function(obj) {
return "object" === type(obj) && !obj.nodeType && !isWindow(obj) && !(obj.constructor && !class2type.hasOwnProperty.call(obj.constructor.prototype, "isPrototypeOf"));
}, $.extend = function() {
var options, name, src, copy, copyIsArray, clone, target = arguments[0] || {}, i = 1, length = arguments.length, deep = !1;
for ("boolean" == typeof target && (deep = target, target = arguments[i] || {},
i++), "object" == typeof target || $.isFunction(target) || (target = {}), i === length && (target = this,
i--); i < length; i++) if (null != (options = arguments[i])) for (name in options) src = target[name],
target !== (copy = options[name]) && (deep && copy && ($.isPlainObject(copy) || (copyIsArray = $.isArray(copy))) ? (copyIsArray ? (copyIsArray = !1,
clone = src && $.isArray(src) ? src : []) : clone = src && $.isPlainObject(src) ? src : {},
target[name] = $.extend(deep, clone, copy)) : void 0 !== copy && (target[name] = copy));
return target;
}, $.each = function(obj, callback) {
var i = 0;
if (isArraylike(obj)) for (var length = obj.length; i < length && !1 !== callback.call(obj[i], i, obj[i]); i++) ; else for (i in obj) if (!1 === callback.call(obj[i], i, obj[i])) break;
return obj;
}, $.map = function(elems, callback) {
var value, i = 0, length = elems.length, ret = [];
if (isArraylike(elems)) for (;i < length; i++) null != (value = callback(elems[i], i)) && ret.push(value); else for (i in elems) null != (value = callback(elems[i], i)) && ret.push(value);
return [].concat(ret);
}, $.data = function(elem, name, data) {
return $(elem).data(name, data);
}, $.Event = $.Event || function(event, params) {
params = params || {
bubbles: !1,
cancelable: !1,
detail: void 0
};
var evt = document.createEvent("CustomEvent");
return evt.initCustomEvent(event, params.bubbles, params.cancelable, params.detail),
evt;
}, $.Event.prototype = window.Event.prototype, $;
}); | {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="refresh" content="0; URL='https://www.npmjs.com/package/@bazel/typescript#ts_config'" />
</head>
</html>
| {
"pile_set_name": "Github"
} |
export const mockConnectedApps = [
{
id: '0oa3s6dlvxgsZr62p2p7',
type: 'okta_redis_apps',
attributes: {
title: 'Apple Health',
logo: 'https://ok6static.oktacdn.com/fs/bco/4/fs06uplrfh5ML4ubr2p7',
grants: [
{
title: 'Launch as patient',
id: 'oag8ffjmglG7PDb3W2p6',
created: '2019-08-05T18:32:25.000Z',
},
{
title: 'Conditions',
id: 'oag8ffjmglG7PDb3W2p6',
created: '2019-08-05T18:32:25.000Z',
},
],
},
},
{
id: '10oa3s6dlvxgsZr62p2p7',
type: 'okta_redis_apps',
attributes: {
title: 'Test App 2',
logo: 'https://ok6static.oktacdn.com/fs/bco/4/fs06uplrfh5ML4ubr2p7',
grants: [
{
title: 'Launch as patient',
id: '1oag8ffjmglG7PDb3W2p6',
created: '2019-08-05T18:32:25.000Z',
},
{
title: 'Conditions',
id: '1oag8ffjmglG7PDb3W2p6',
created: '2019-08-05T18:32:25.000Z',
},
],
},
},
];
| {
"pile_set_name": "Github"
} |
/**
* Determinate whether dom1 and dom2 is the same dom or not.
*
* @param {HTMLElement} dom1
* @param {HTMLElement} dom2
* @return {Boolean}
*/
export default function isDOMEquals(dom1, dom2) {
return dom1 === dom2;
}
| {
"pile_set_name": "Github"
} |
### YamlMime:ManagedReference
items:
- uid: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult
commentId: T:EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult
id: ProjectionManagementMessage.Command.GetResult
parent: EventStore.Projections.Core.Messages
children:
- EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.#ctor(EventStore.Core.Messaging.IEnvelope,System.String,System.String)
- EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Envelope
- EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.MsgTypeId
- EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Name
- EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Partition
langs:
- csharp
- vb
name: ProjectionManagementMessage.Command.GetResult
nameWithType: ProjectionManagementMessage.Command.GetResult
fullName: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult
type: Class
source:
remote:
path: src/EventStore.Projections.Core/Messages/ProjectionManagementMessage.cs
branch: feature/docs-connection-code
repo: [email protected]:EventStore/EventStore.git
id: GetResult
path: ../EventStore/src/EventStore.Projections.Core/Messages/ProjectionManagementMessage.cs
startLine: 540
assemblies:
- EventStore.Projections.Core
namespace: EventStore.Projections.Core.Messages
syntax:
content: 'public class GetResult : Message'
content.vb: >-
Public Class GetResult
Inherits Message
inheritance:
- System.Object
- EventStore.Core.Messaging.Message
inheritedMembers:
- EventStore.Core.Messaging.Message.NextMsgId
modifiers.csharp:
- public
- class
modifiers.vb:
- Public
- Class
- uid: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.MsgTypeId
commentId: P:EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.MsgTypeId
id: MsgTypeId
parent: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult
langs:
- csharp
- vb
name: MsgTypeId
nameWithType: ProjectionManagementMessage.Command.GetResult.MsgTypeId
fullName: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.MsgTypeId
type: Property
source:
remote:
path: src/EventStore.Projections.Core/Messages/ProjectionManagementMessage.cs
branch: feature/docs-connection-code
repo: [email protected]:EventStore/EventStore.git
id: MsgTypeId
path: ../EventStore/src/EventStore.Projections.Core/Messages/ProjectionManagementMessage.cs
startLine: 543
assemblies:
- EventStore.Projections.Core
namespace: EventStore.Projections.Core.Messages
syntax:
content: public override int MsgTypeId { get; }
parameters: []
return:
type: System.Int32
content.vb: Public Overrides ReadOnly Property MsgTypeId As Integer
overridden: EventStore.Core.Messaging.Message.MsgTypeId
overload: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.MsgTypeId*
modifiers.csharp:
- public
- override
- get
modifiers.vb:
- Public
- Overrides
- ReadOnly
- uid: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.#ctor(EventStore.Core.Messaging.IEnvelope,System.String,System.String)
commentId: M:EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.#ctor(EventStore.Core.Messaging.IEnvelope,System.String,System.String)
id: '#ctor(EventStore.Core.Messaging.IEnvelope,System.String,System.String)'
parent: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult
langs:
- csharp
- vb
name: GetResult(IEnvelope, String, String)
nameWithType: ProjectionManagementMessage.Command.GetResult.GetResult(IEnvelope, String, String)
fullName: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.GetResult(EventStore.Core.Messaging.IEnvelope, System.String, System.String)
type: Constructor
source:
remote:
path: src/EventStore.Projections.Core/Messages/ProjectionManagementMessage.cs
branch: feature/docs-connection-code
repo: [email protected]:EventStore/EventStore.git
id: .ctor
path: ../EventStore/src/EventStore.Projections.Core/Messages/ProjectionManagementMessage.cs
startLine: 551
assemblies:
- EventStore.Projections.Core
namespace: EventStore.Projections.Core.Messages
syntax:
content: public GetResult(IEnvelope envelope, string name, string partition)
parameters:
- id: envelope
type: EventStore.Core.Messaging.IEnvelope
- id: name
type: System.String
- id: partition
type: System.String
content.vb: Public Sub New(envelope As IEnvelope, name As String, partition As String)
overload: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.#ctor*
modifiers.csharp:
- public
modifiers.vb:
- Public
- uid: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Name
commentId: P:EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Name
id: Name
parent: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult
langs:
- csharp
- vb
name: Name
nameWithType: ProjectionManagementMessage.Command.GetResult.Name
fullName: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Name
type: Property
source:
remote:
path: src/EventStore.Projections.Core/Messages/ProjectionManagementMessage.cs
branch: feature/docs-connection-code
repo: [email protected]:EventStore/EventStore.git
id: Name
path: ../EventStore/src/EventStore.Projections.Core/Messages/ProjectionManagementMessage.cs
startLine: 560
assemblies:
- EventStore.Projections.Core
namespace: EventStore.Projections.Core.Messages
syntax:
content: public string Name { get; }
parameters: []
return:
type: System.String
content.vb: Public ReadOnly Property Name As String
overload: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Name*
modifiers.csharp:
- public
- get
modifiers.vb:
- Public
- ReadOnly
- uid: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Envelope
commentId: P:EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Envelope
id: Envelope
parent: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult
langs:
- csharp
- vb
name: Envelope
nameWithType: ProjectionManagementMessage.Command.GetResult.Envelope
fullName: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Envelope
type: Property
source:
remote:
path: src/EventStore.Projections.Core/Messages/ProjectionManagementMessage.cs
branch: feature/docs-connection-code
repo: [email protected]:EventStore/EventStore.git
id: Envelope
path: ../EventStore/src/EventStore.Projections.Core/Messages/ProjectionManagementMessage.cs
startLine: 564
assemblies:
- EventStore.Projections.Core
namespace: EventStore.Projections.Core.Messages
syntax:
content: public IEnvelope Envelope { get; }
parameters: []
return:
type: EventStore.Core.Messaging.IEnvelope
content.vb: Public ReadOnly Property Envelope As IEnvelope
overload: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Envelope*
modifiers.csharp:
- public
- get
modifiers.vb:
- Public
- ReadOnly
- uid: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Partition
commentId: P:EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Partition
id: Partition
parent: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult
langs:
- csharp
- vb
name: Partition
nameWithType: ProjectionManagementMessage.Command.GetResult.Partition
fullName: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Partition
type: Property
source:
remote:
path: src/EventStore.Projections.Core/Messages/ProjectionManagementMessage.cs
branch: feature/docs-connection-code
repo: [email protected]:EventStore/EventStore.git
id: Partition
path: ../EventStore/src/EventStore.Projections.Core/Messages/ProjectionManagementMessage.cs
startLine: 568
assemblies:
- EventStore.Projections.Core
namespace: EventStore.Projections.Core.Messages
syntax:
content: public string Partition { get; }
parameters: []
return:
type: System.String
content.vb: Public ReadOnly Property Partition As String
overload: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Partition*
modifiers.csharp:
- public
- get
modifiers.vb:
- Public
- ReadOnly
references:
- uid: EventStore.Projections.Core.Messages
commentId: N:EventStore.Projections.Core.Messages
name: EventStore.Projections.Core.Messages
nameWithType: EventStore.Projections.Core.Messages
fullName: EventStore.Projections.Core.Messages
- uid: System.Object
commentId: T:System.Object
parent: System
isExternal: true
name: Object
nameWithType: Object
fullName: System.Object
- uid: EventStore.Core.Messaging.Message
commentId: T:EventStore.Core.Messaging.Message
parent: EventStore.Core.Messaging
name: Message
nameWithType: Message
fullName: EventStore.Core.Messaging.Message
- uid: EventStore.Core.Messaging.Message.NextMsgId
commentId: F:EventStore.Core.Messaging.Message.NextMsgId
parent: EventStore.Core.Messaging.Message
name: NextMsgId
nameWithType: Message.NextMsgId
fullName: EventStore.Core.Messaging.Message.NextMsgId
- uid: System
commentId: N:System
isExternal: true
name: System
nameWithType: System
fullName: System
- uid: EventStore.Core.Messaging
commentId: N:EventStore.Core.Messaging
name: EventStore.Core.Messaging
nameWithType: EventStore.Core.Messaging
fullName: EventStore.Core.Messaging
- uid: EventStore.Core.Messaging.Message.MsgTypeId
commentId: P:EventStore.Core.Messaging.Message.MsgTypeId
parent: EventStore.Core.Messaging.Message
name: MsgTypeId
nameWithType: Message.MsgTypeId
fullName: EventStore.Core.Messaging.Message.MsgTypeId
- uid: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.MsgTypeId*
commentId: Overload:EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.MsgTypeId
name: MsgTypeId
nameWithType: ProjectionManagementMessage.Command.GetResult.MsgTypeId
fullName: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.MsgTypeId
- uid: System.Int32
commentId: T:System.Int32
parent: System
isExternal: true
name: Int32
nameWithType: Int32
fullName: System.Int32
- uid: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.#ctor*
commentId: Overload:EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.#ctor
name: GetResult
nameWithType: ProjectionManagementMessage.Command.GetResult.GetResult
fullName: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.GetResult
- uid: EventStore.Core.Messaging.IEnvelope
commentId: T:EventStore.Core.Messaging.IEnvelope
parent: EventStore.Core.Messaging
name: IEnvelope
nameWithType: IEnvelope
fullName: EventStore.Core.Messaging.IEnvelope
- uid: System.String
commentId: T:System.String
parent: System
isExternal: true
name: String
nameWithType: String
fullName: System.String
- uid: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Name*
commentId: Overload:EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Name
name: Name
nameWithType: ProjectionManagementMessage.Command.GetResult.Name
fullName: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Name
- uid: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Envelope*
commentId: Overload:EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Envelope
name: Envelope
nameWithType: ProjectionManagementMessage.Command.GetResult.Envelope
fullName: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Envelope
- uid: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Partition*
commentId: Overload:EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Partition
name: Partition
nameWithType: ProjectionManagementMessage.Command.GetResult.Partition
fullName: EventStore.Projections.Core.Messages.ProjectionManagementMessage.Command.GetResult.Partition
| {
"pile_set_name": "Github"
} |
distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.5.0/apache-maven-3.5.0-bin.zip
| {
"pile_set_name": "Github"
} |
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @format
* @flow
*/
'use strict';
const registerGeneratedViewConfig = require('../../Utilities/registerGeneratedViewConfig');
const requireNativeComponent = require('../../ReactNative/requireNativeComponent');
import type {HostComponent} from '../../Renderer/shims/ReactNativeTypes';
import type {ViewProps} from '../View/ViewPropTypes';
const ScrollContentViewViewConfig = {
uiViewClassName: 'RCTScrollContentView',
bubblingEventTypes: {},
directEventTypes: {},
validAttributes: {},
};
let ScrollContentViewNativeComponent;
if (global.RN$Bridgeless) {
registerGeneratedViewConfig(
'RCTScrollContentView',
ScrollContentViewViewConfig,
);
ScrollContentViewNativeComponent = 'RCTScrollContentView';
} else {
ScrollContentViewNativeComponent = requireNativeComponent<ViewProps>(
'RCTScrollContentView',
);
}
export default ((ScrollContentViewNativeComponent: any): HostComponent<ViewProps>);
| {
"pile_set_name": "Github"
} |
"""
Support for installing and building the "wheel" binary package format.
"""
from __future__ import absolute_import
import compileall
import csv
import errno
import functools
import hashlib
import logging
import os
import os.path
import re
import shutil
import stat
import sys
import tempfile
import warnings
from base64 import urlsafe_b64encode
from email.parser import Parser
from pip._vendor.six import StringIO
import pip
from pip.compat import expanduser
from pip.download import path_to_url, unpack_url
from pip.exceptions import (
InstallationError, InvalidWheelFilename, UnsupportedWheel)
from pip.locations import distutils_scheme, PIP_DELETE_MARKER_FILENAME
from pip import pep425tags
from pip.utils import (
call_subprocess, ensure_dir, captured_stdout, rmtree, read_chunks,
)
from pip.utils.ui import open_spinner
from pip.utils.logging import indent_log
from pip.utils.setuptools_build import SETUPTOOLS_SHIM
from pip._vendor.distlib.scripts import ScriptMaker
from pip._vendor import pkg_resources
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.six.moves import configparser
wheel_ext = '.whl'
VERSION_COMPATIBLE = (1, 0)
logger = logging.getLogger(__name__)
class WheelCache(object):
"""A cache of wheels for future installs."""
def __init__(self, cache_dir, format_control):
"""Create a wheel cache.
:param cache_dir: The root of the cache.
:param format_control: A pip.index.FormatControl object to limit
binaries being read from the cache.
"""
self._cache_dir = expanduser(cache_dir) if cache_dir else None
self._format_control = format_control
def cached_wheel(self, link, package_name):
return cached_wheel(
self._cache_dir, link, self._format_control, package_name)
def _cache_for_link(cache_dir, link):
"""
Return a directory to store cached wheels in for link.
Because there are M wheels for any one sdist, we provide a directory
to cache them in, and then consult that directory when looking up
cache hits.
We only insert things into the cache if they have plausible version
numbers, so that we don't contaminate the cache with things that were not
unique. E.g. ./package might have dozens of installs done for it and build
a version of 0.0...and if we built and cached a wheel, we'd end up using
the same wheel even if the source has been edited.
:param cache_dir: The cache_dir being used by pip.
:param link: The link of the sdist for which this will cache wheels.
"""
# We want to generate an url to use as our cache key, we don't want to just
# re-use the URL because it might have other items in the fragment and we
# don't care about those.
key_parts = [link.url_without_fragment]
if link.hash_name is not None and link.hash is not None:
key_parts.append("=".join([link.hash_name, link.hash]))
key_url = "#".join(key_parts)
# Encode our key url with sha224, we'll use this because it has similar
# security properties to sha256, but with a shorter total output (and thus
# less secure). However the differences don't make a lot of difference for
# our use case here.
hashed = hashlib.sha224(key_url.encode()).hexdigest()
# We want to nest the directories some to prevent having a ton of top level
# directories where we might run out of sub directories on some FS.
parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
# Inside of the base location for cached wheels, expand our parts and join
# them all together.
return os.path.join(cache_dir, "wheels", *parts)
def cached_wheel(cache_dir, link, format_control, package_name):
if not cache_dir:
return link
if not link:
return link
if link.is_wheel:
return link
if not link.is_artifact:
return link
if not package_name:
return link
canonical_name = canonicalize_name(package_name)
formats = pip.index.fmt_ctl_formats(format_control, canonical_name)
if "binary" not in formats:
return link
root = _cache_for_link(cache_dir, link)
try:
wheel_names = os.listdir(root)
except OSError as e:
if e.errno in (errno.ENOENT, errno.ENOTDIR):
return link
raise
candidates = []
for wheel_name in wheel_names:
try:
wheel = Wheel(wheel_name)
except InvalidWheelFilename:
continue
if not wheel.supported():
# Built for a different python/arch/etc
continue
candidates.append((wheel.support_index_min(), wheel_name))
if not candidates:
return link
candidates.sort()
path = os.path.join(root, candidates[0][1])
return pip.index.Link(path_to_url(path))
def rehash(path, algo='sha256', blocksize=1 << 20):
"""Return (hash, length) for path using hashlib.new(algo)"""
h = hashlib.new(algo)
length = 0
with open(path, 'rb') as f:
for block in read_chunks(f, size=blocksize):
length += len(block)
h.update(block)
digest = 'sha256=' + urlsafe_b64encode(
h.digest()
).decode('latin1').rstrip('=')
return (digest, length)
def open_for_csv(name, mode):
if sys.version_info[0] < 3:
nl = {}
bin = 'b'
else:
nl = {'newline': ''}
bin = ''
return open(name, mode + bin, **nl)
def fix_script(path):
"""Replace #!python with #!/path/to/python
Return True if file was changed."""
# XXX RECORD hashes will need to be updated
if os.path.isfile(path):
with open(path, 'rb') as script:
firstline = script.readline()
if not firstline.startswith(b'#!python'):
return False
exename = sys.executable.encode(sys.getfilesystemencoding())
firstline = b'#!' + exename + os.linesep.encode("ascii")
rest = script.read()
with open(path, 'wb') as script:
script.write(firstline)
script.write(rest)
return True
dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>\d.+?))?)
\.dist-info$""", re.VERBOSE)
def root_is_purelib(name, wheeldir):
"""
Return True if the extracted wheel in wheeldir should go into purelib.
"""
name_folded = name.replace("-", "_")
for item in os.listdir(wheeldir):
match = dist_info_re.match(item)
if match and match.group('name') == name_folded:
with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel:
for line in wheel:
line = line.lower().rstrip()
if line == "root-is-purelib: true":
return True
return False
def get_entrypoints(filename):
if not os.path.exists(filename):
return {}, {}
# This is done because you can pass a string to entry_points wrappers which
# means that they may or may not be valid INI files. The attempt here is to
# strip leading and trailing whitespace in order to make them valid INI
# files.
with open(filename) as fp:
data = StringIO()
for line in fp:
data.write(line.strip())
data.write("\n")
data.seek(0)
cp = configparser.RawConfigParser()
cp.optionxform = lambda option: option
cp.readfp(data)
console = {}
gui = {}
if cp.has_section('console_scripts'):
console = dict(cp.items('console_scripts'))
if cp.has_section('gui_scripts'):
gui = dict(cp.items('gui_scripts'))
return console, gui
def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
pycompile=True, scheme=None, isolated=False, prefix=None):
"""Install a wheel"""
if not scheme:
scheme = distutils_scheme(
name, user=user, home=home, root=root, isolated=isolated,
prefix=prefix,
)
if root_is_purelib(name, wheeldir):
lib_dir = scheme['purelib']
else:
lib_dir = scheme['platlib']
info_dir = []
data_dirs = []
source = wheeldir.rstrip(os.path.sep) + os.path.sep
# Record details of the files moved
# installed = files copied from the wheel to the destination
# changed = files changed while installing (scripts #! line typically)
# generated = files newly generated during the install (script wrappers)
installed = {}
changed = set()
generated = []
# Compile all of the pyc files that we're going to be installing
if pycompile:
with captured_stdout() as stdout:
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
compileall.compile_dir(source, force=True, quiet=True)
logger.debug(stdout.getvalue())
def normpath(src, p):
return os.path.relpath(src, p).replace(os.path.sep, '/')
def record_installed(srcfile, destfile, modified=False):
"""Map archive RECORD paths to installation RECORD paths."""
oldpath = normpath(srcfile, wheeldir)
newpath = normpath(destfile, lib_dir)
installed[oldpath] = newpath
if modified:
changed.add(destfile)
def clobber(source, dest, is_base, fixer=None, filter=None):
ensure_dir(dest) # common for the 'include' path
for dir, subdirs, files in os.walk(source):
basedir = dir[len(source):].lstrip(os.path.sep)
destdir = os.path.join(dest, basedir)
if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
continue
for s in subdirs:
destsubdir = os.path.join(dest, basedir, s)
if is_base and basedir == '' and destsubdir.endswith('.data'):
data_dirs.append(s)
continue
elif (is_base and
s.endswith('.dist-info') and
canonicalize_name(s).startswith(
canonicalize_name(req.name))):
assert not info_dir, ('Multiple .dist-info directories: ' +
destsubdir + ', ' +
', '.join(info_dir))
info_dir.append(destsubdir)
for f in files:
# Skip unwanted files
if filter and filter(f):
continue
srcfile = os.path.join(dir, f)
destfile = os.path.join(dest, basedir, f)
# directory creation is lazy and after the file filtering above
# to ensure we don't install empty dirs; empty dirs can't be
# uninstalled.
ensure_dir(destdir)
# We use copyfile (not move, copy, or copy2) to be extra sure
# that we are not moving directories over (copyfile fails for
# directories) as well as to ensure that we are not copying
# over any metadata because we want more control over what
# metadata we actually copy over.
shutil.copyfile(srcfile, destfile)
# Copy over the metadata for the file, currently this only
# includes the atime and mtime.
st = os.stat(srcfile)
if hasattr(os, "utime"):
os.utime(destfile, (st.st_atime, st.st_mtime))
# If our file is executable, then make our destination file
# executable.
if os.access(srcfile, os.X_OK):
st = os.stat(srcfile)
permissions = (
st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
)
os.chmod(destfile, permissions)
changed = False
if fixer:
changed = fixer(destfile)
record_installed(srcfile, destfile, changed)
clobber(source, lib_dir, True)
assert info_dir, "%s .dist-info directory not found" % req
# Get the defined entry points
ep_file = os.path.join(info_dir[0], 'entry_points.txt')
console, gui = get_entrypoints(ep_file)
def is_entrypoint_wrapper(name):
# EP, EP.exe and EP-script.py are scripts generated for
# entry point EP by setuptools
if name.lower().endswith('.exe'):
matchname = name[:-4]
elif name.lower().endswith('-script.py'):
matchname = name[:-10]
elif name.lower().endswith(".pya"):
matchname = name[:-4]
else:
matchname = name
# Ignore setuptools-generated scripts
return (matchname in console or matchname in gui)
for datadir in data_dirs:
fixer = None
filter = None
for subdir in os.listdir(os.path.join(wheeldir, datadir)):
fixer = None
if subdir == 'scripts':
fixer = fix_script
filter = is_entrypoint_wrapper
source = os.path.join(wheeldir, datadir, subdir)
dest = scheme[subdir]
clobber(source, dest, False, fixer=fixer, filter=filter)
maker = ScriptMaker(None, scheme['scripts'])
# Ensure old scripts are overwritten.
# See https://github.com/pypa/pip/issues/1800
maker.clobber = True
# Ensure we don't generate any variants for scripts because this is almost
# never what somebody wants.
# See https://bitbucket.org/pypa/distlib/issue/35/
maker.variants = set(('', ))
# This is required because otherwise distlib creates scripts that are not
# executable.
# See https://bitbucket.org/pypa/distlib/issue/32/
maker.set_mode = True
# Simplify the script and fix the fact that the default script swallows
# every single stack trace.
# See https://bitbucket.org/pypa/distlib/issue/34/
# See https://bitbucket.org/pypa/distlib/issue/33/
def _get_script_text(entry):
if entry.suffix is None:
raise InstallationError(
"Invalid script entry point: %s for req: %s - A callable "
"suffix is required. Cf https://packaging.python.org/en/"
"latest/distributing.html#console-scripts for more "
"information." % (entry, req)
)
return maker.script_template % {
"module": entry.prefix,
"import_name": entry.suffix.split(".")[0],
"func": entry.suffix,
}
maker._get_script_text = _get_script_text
maker.script_template = """# -*- coding: utf-8 -*-
import re
import sys
from %(module)s import %(import_name)s
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(%(func)s())
"""
# Special case pip and setuptools to generate versioned wrappers
#
# The issue is that some projects (specifically, pip and setuptools) use
# code in setup.py to create "versioned" entry points - pip2.7 on Python
# 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
# the wheel metadata at build time, and so if the wheel is installed with
# a *different* version of Python the entry points will be wrong. The
# correct fix for this is to enhance the metadata to be able to describe
# such versioned entry points, but that won't happen till Metadata 2.0 is
# available.
# In the meantime, projects using versioned entry points will either have
# incorrect versioned entry points, or they will not be able to distribute
# "universal" wheels (i.e., they will need a wheel per Python version).
#
# Because setuptools and pip are bundled with _ensurepip and virtualenv,
# we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
# override the versioned entry points in the wheel and generate the
# correct ones. This code is purely a short-term measure until Metadata 2.0
# is available.
#
# To add the level of hack in this section of code, in order to support
# ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
# variable which will control which version scripts get installed.
#
# ENSUREPIP_OPTIONS=altinstall
# - Only pipX.Y and easy_install-X.Y will be generated and installed
# ENSUREPIP_OPTIONS=install
# - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
# that this option is technically if ENSUREPIP_OPTIONS is set and is
# not altinstall
# DEFAULT
# - The default behavior is to install pip, pipX, pipX.Y, easy_install
# and easy_install-X.Y.
pip_script = console.pop('pip', None)
if pip_script:
if "ENSUREPIP_OPTIONS" not in os.environ:
spec = 'pip = ' + pip_script
generated.extend(maker.make(spec))
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
spec = 'pip%s = %s' % (sys.version[:1], pip_script)
generated.extend(maker.make(spec))
spec = 'pip%s = %s' % (sys.version[:3], pip_script)
generated.extend(maker.make(spec))
# Delete any other versioned pip entry points
pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
for k in pip_ep:
del console[k]
easy_install_script = console.pop('easy_install', None)
if easy_install_script:
if "ENSUREPIP_OPTIONS" not in os.environ:
spec = 'easy_install = ' + easy_install_script
generated.extend(maker.make(spec))
spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
generated.extend(maker.make(spec))
# Delete any other versioned easy_install entry points
easy_install_ep = [
k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
]
for k in easy_install_ep:
del console[k]
# Generate the console and GUI entry points specified in the wheel
if len(console) > 0:
generated.extend(
maker.make_multiple(['%s = %s' % kv for kv in console.items()])
)
if len(gui) > 0:
generated.extend(
maker.make_multiple(
['%s = %s' % kv for kv in gui.items()],
{'gui': True}
)
)
# Record pip as the installer
installer = os.path.join(info_dir[0], 'INSTALLER')
temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip')
with open(temp_installer, 'wb') as installer_file:
installer_file.write(b'pip\n')
shutil.move(temp_installer, installer)
generated.append(installer)
# Record details of all files installed
record = os.path.join(info_dir[0], 'RECORD')
temp_record = os.path.join(info_dir[0], 'RECORD.pip')
with open_for_csv(record, 'r') as record_in:
with open_for_csv(temp_record, 'w+') as record_out:
reader = csv.reader(record_in)
writer = csv.writer(record_out)
for row in reader:
row[0] = installed.pop(row[0], row[0])
if row[0] in changed:
row[1], row[2] = rehash(row[0])
writer.writerow(row)
for f in generated:
h, l = rehash(f)
writer.writerow((normpath(f, lib_dir), h, l))
for f in installed:
writer.writerow((installed[f], '', ''))
shutil.move(temp_record, record)
def _unique(fn):
@functools.wraps(fn)
def unique(*args, **kw):
seen = set()
for item in fn(*args, **kw):
if item not in seen:
seen.add(item)
yield item
return unique
# TODO: this goes somewhere besides the wheel module
@_unique
def uninstallation_paths(dist):
"""
Yield all the uninstallation paths for dist based on RECORD-without-.pyc
Yield paths to all the files in RECORD. For each .py file in RECORD, add
the .pyc in the same directory.
UninstallPathSet.add() takes care of the __pycache__ .pyc.
"""
from pip.utils import FakeFile # circular import
r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
for row in r:
path = os.path.join(dist.location, row[0])
yield path
if path.endswith('.py'):
dn, fn = os.path.split(path)
base = fn[:-3]
path = os.path.join(dn, base + '.pyc')
yield path
def wheel_version(source_dir):
"""
Return the Wheel-Version of an extracted wheel, if possible.
Otherwise, return False if we couldn't parse / extract it.
"""
try:
dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0]
wheel_data = dist.get_metadata('WHEEL')
wheel_data = Parser().parsestr(wheel_data)
version = wheel_data['Wheel-Version'].strip()
version = tuple(map(int, version.split('.')))
return version
except:
return False
def check_compatibility(version, name):
"""
Raises errors or warns if called with an incompatible Wheel-Version.
Pip should refuse to install a Wheel-Version that's a major series
ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
installing a version only minor version ahead (e.g 1.2 > 1.1).
version: a 2-tuple representing a Wheel-Version (Major, Minor)
name: name of wheel or package to raise exception about
:raises UnsupportedWheel: when an incompatible Wheel-Version is given
"""
if not version:
raise UnsupportedWheel(
"%s is in an unsupported or invalid wheel" % name
)
if version[0] > VERSION_COMPATIBLE[0]:
raise UnsupportedWheel(
"%s's Wheel-Version (%s) is not compatible with this version "
"of pip" % (name, '.'.join(map(str, version)))
)
elif version > VERSION_COMPATIBLE:
logger.warning(
'Installing from a newer Wheel-Version (%s)',
'.'.join(map(str, version)),
)
class Wheel(object):
"""A wheel file"""
# TODO: maybe move the install code into this class
wheel_file_re = re.compile(
r"""^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))
((-(?P<build>\d.*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
\.whl|\.dist-info)$""",
re.VERBOSE
)
def __init__(self, filename):
"""
:raises InvalidWheelFilename: when the filename is invalid for a wheel
"""
wheel_info = self.wheel_file_re.match(filename)
if not wheel_info:
raise InvalidWheelFilename(
"%s is not a valid wheel filename." % filename
)
self.filename = filename
self.name = wheel_info.group('name').replace('_', '-')
# we'll assume "_" means "-" due to wheel naming scheme
# (https://github.com/pypa/pip/issues/1150)
self.version = wheel_info.group('ver').replace('_', '-')
self.pyversions = wheel_info.group('pyver').split('.')
self.abis = wheel_info.group('abi').split('.')
self.plats = wheel_info.group('plat').split('.')
# All the tag combinations from this file
self.file_tags = set(
(x, y, z) for x in self.pyversions
for y in self.abis for z in self.plats
)
def support_index_min(self, tags=None):
"""
Return the lowest index that one of the wheel's file_tag combinations
achieves in the supported_tags list e.g. if there are 8 supported tags,
and one of the file tags is first in the list, then return 0. Returns
None is the wheel is not supported.
"""
if tags is None: # for mock
tags = pep425tags.supported_tags
indexes = [tags.index(c) for c in self.file_tags if c in tags]
return min(indexes) if indexes else None
def supported(self, tags=None):
"""Is this wheel supported on this system?"""
if tags is None: # for mock
tags = pep425tags.supported_tags
return bool(set(tags).intersection(self.file_tags))
class WheelBuilder(object):
"""Build wheels from a RequirementSet."""
def __init__(self, requirement_set, finder, build_options=None,
global_options=None):
self.requirement_set = requirement_set
self.finder = finder
self._cache_root = requirement_set._wheel_cache._cache_dir
self._wheel_dir = requirement_set.wheel_download_dir
self.build_options = build_options or []
self.global_options = global_options or []
def _build_one(self, req, output_dir, python_tag=None):
"""Build one wheel.
:return: The filename of the built wheel, or None if the build failed.
"""
tempd = tempfile.mkdtemp('pip-wheel-')
try:
if self.__build_one(req, tempd, python_tag=python_tag):
try:
wheel_name = os.listdir(tempd)[0]
wheel_path = os.path.join(output_dir, wheel_name)
shutil.move(os.path.join(tempd, wheel_name), wheel_path)
logger.info('Stored in directory: %s', output_dir)
return wheel_path
except:
pass
# Ignore return, we can't do anything else useful.
self._clean_one(req)
return None
finally:
rmtree(tempd)
def _base_setup_args(self, req):
return [
sys.executable, "-u", '-c',
SETUPTOOLS_SHIM % req.setup_py
] + list(self.global_options)
def __build_one(self, req, tempd, python_tag=None):
base_args = self._base_setup_args(req)
spin_message = 'Running setup.py bdist_wheel for %s' % (req.name,)
with open_spinner(spin_message) as spinner:
logger.debug('Destination directory: %s', tempd)
wheel_args = base_args + ['bdist_wheel', '-d', tempd] \
+ self.build_options
if python_tag is not None:
wheel_args += ["--python-tag", python_tag]
try:
call_subprocess(wheel_args, cwd=req.setup_py_dir,
show_stdout=False, spinner=spinner)
return True
except:
spinner.finish("error")
logger.error('Failed building wheel for %s', req.name)
return False
def _clean_one(self, req):
base_args = self._base_setup_args(req)
logger.info('Running setup.py clean for %s', req.name)
clean_args = base_args + ['clean', '--all']
try:
call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False)
return True
except:
logger.error('Failed cleaning build dir for %s', req.name)
return False
def build(self, autobuilding=False):
"""Build wheels.
:param unpack: If True, replace the sdist we built from with the
newly built wheel, in preparation for installation.
:return: True if all the wheels built correctly.
"""
assert self._wheel_dir or (autobuilding and self._cache_root)
# unpack sdists and constructs req set
self.requirement_set.prepare_files(self.finder)
reqset = self.requirement_set.requirements.values()
buildset = []
for req in reqset:
if req.constraint:
continue
if req.is_wheel:
if not autobuilding:
logger.info(
'Skipping %s, due to already being wheel.', req.name)
elif autobuilding and req.editable:
pass
elif autobuilding and req.link and not req.link.is_artifact:
pass
elif autobuilding and not req.source_dir:
pass
else:
if autobuilding:
link = req.link
base, ext = link.splitext()
if pip.index.egg_info_matches(base, None, link) is None:
# Doesn't look like a package - don't autobuild a wheel
# because we'll have no way to lookup the result sanely
continue
if "binary" not in pip.index.fmt_ctl_formats(
self.finder.format_control,
canonicalize_name(req.name)):
logger.info(
"Skipping bdist_wheel for %s, due to binaries "
"being disabled for it.", req.name)
continue
buildset.append(req)
if not buildset:
return True
# Build the wheels.
logger.info(
'Building wheels for collected packages: %s',
', '.join([req.name for req in buildset]),
)
with indent_log():
build_success, build_failure = [], []
for req in buildset:
python_tag = None
if autobuilding:
python_tag = pep425tags.implementation_tag
output_dir = _cache_for_link(self._cache_root, req.link)
try:
ensure_dir(output_dir)
except OSError as e:
logger.warning("Building wheel for %s failed: %s",
req.name, e)
build_failure.append(req)
continue
else:
output_dir = self._wheel_dir
wheel_file = self._build_one(
req, output_dir,
python_tag=python_tag,
)
if wheel_file:
build_success.append(req)
if autobuilding:
# XXX: This is mildly duplicative with prepare_files,
# but not close enough to pull out to a single common
# method.
# The code below assumes temporary source dirs -
# prevent it doing bad things.
if req.source_dir and not os.path.exists(os.path.join(
req.source_dir, PIP_DELETE_MARKER_FILENAME)):
raise AssertionError(
"bad source dir - missing marker")
# Delete the source we built the wheel from
req.remove_temporary_source()
# set the build directory again - name is known from
# the work prepare_files did.
req.source_dir = req.build_location(
self.requirement_set.build_dir)
# Update the link for this.
req.link = pip.index.Link(
path_to_url(wheel_file))
assert req.link.is_wheel
# extract the wheel into the dir
unpack_url(
req.link, req.source_dir, None, False,
session=self.requirement_set.session)
else:
build_failure.append(req)
# notify success/failure
if build_success:
logger.info(
'Successfully built %s',
' '.join([req.name for req in build_success]),
)
if build_failure:
logger.info(
'Failed to build %s',
' '.join([req.name for req in build_failure]),
)
# Return True if all builds were successful
return len(build_failure) == 0
| {
"pile_set_name": "Github"
} |
const _ = require('underscore');
const Backbone = require('backbone');
const UserModel = require('dashboard/data/user-model');
const checkAndBuildOpts = require('builder/helpers/required-opts');
const REQUIRED_OPTS = [
'organization',
'configModel'
];
// helper to manage organization users
module.exports = Backbone.Collection.extend({
model: UserModel,
_DEFAULT_EXCLUDE_CURRENT_USER: true,
url: function () {
return '/api/v1/organization/' + this.organization.id + '/users';
},
initialize: function (models, options) {
checkAndBuildOpts(options, REQUIRED_OPTS, this);
this.organization = this._organization;
this.currentUserId = options.currentUserId;
this._excludeCurrentUser = this._DEFAULT_EXCLUDE_CURRENT_USER;
},
comparator: function (model) {
return model.get('username');
},
excludeCurrentUser: function (exclude) {
exclude = !!exclude;
this._excludeCurrentUser = exclude;
if (exclude && !this.currentUserId) {
console.error('set excludeCurrentUser to true, but there is no current user id set to exclude!');
}
},
restoreExcludeCurrentUser: function () {
this.excludeCurrentUser(this._DEFAULT_EXCLUDE_CURRENT_USER);
},
parse: function (r) {
this.total_entries = r.total_entries;
this.total_user_entries = r.total_user_entries;
return _.reduce(r.users, function (memo, user) {
if (this._excludeCurrentUser && user.id === this.currentUserId) {
this.total_user_entries--;
this.total_entries--;
} else {
memo.push(user);
}
return memo;
}, [], this);
},
// @return {Number, undefined} may be undefined until a first fetch is done
totalCount: function () {
return this.total_user_entries;
}
});
| {
"pile_set_name": "Github"
} |
export default {
failFast: true,
files: ['src/**/__tests__/**.js'],
require: ['./tests/helpers/browser-env.js']
};
| {
"pile_set_name": "Github"
} |
var traverse = require('../');
var assert = require('assert');
exports.subexpr = function () {
var obj = [ 'a', 4, 'b', 5, 'c', 6 ];
var r = traverse(obj).map(function (x) {
if (typeof x === 'number') {
this.update([ x - 0.1, x, x + 0.1 ], true);
}
});
assert.deepEqual(obj, [ 'a', 4, 'b', 5, 'c', 6 ]);
assert.deepEqual(r, [
'a', [ 3.9, 4, 4.1 ],
'b', [ 4.9, 5, 5.1 ],
'c', [ 5.9, 6, 6.1 ],
]);
};
exports.block = function () {
var obj = [ [ 1 ], [ 2 ], [ 3 ] ];
var r = traverse(obj).map(function (x) {
if (Array.isArray(x) && !this.isRoot) {
if (x[0] === 5) this.block()
else this.update([ [ x[0] + 1 ] ])
}
});
assert.deepEqual(r, [
[ [ [ [ [ 5 ] ] ] ] ],
[ [ [ [ 5 ] ] ] ],
[ [ [ 5 ] ] ],
]);
};
| {
"pile_set_name": "Github"
} |
/**
* Copyright (C) 2001-2020 by RapidMiner and the contributors
*
* Complete list of developers available at our web site:
*
* http://rapidminer.com
*
* This program is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either version 3
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this program.
* If not, see http://www.gnu.org/licenses/.
*/
package com.rapidminer.operator.postprocessing;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
/**
* A convenience class that contains the parameters of a PlattScalingModel.
*
* @author Martin Scholz
*/
public class PlattParameters implements Serializable {
private static final long serialVersionUID = 7677598913328136657L;
double a;
double b;
public PlattParameters() {}
public PlattParameters(double a, double b) {
this.a = a;
this.b = b;
}
public double getA() {
return a;
}
public double getB() {
return b;
}
void readParameters(ObjectInputStream in) throws IOException {
this.a = in.readDouble();
this.b = in.readDouble();
}
void writeParameters(ObjectOutputStream out) throws IOException {
out.writeDouble(this.a);
out.writeDouble(this.b);
}
@Override
public String toString() {
return ("Platt's scaling parameters: A=" + this.getA() + ", B=" + this.getB());
}
}
| {
"pile_set_name": "Github"
} |
//
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Sep 17 2017 16:24:48).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard.
//
#import <MMCommon/MMObject.h>
#import "WCDataProviderDelegate-Protocol.h"
@class MyWCDB, NSMutableArray, NSMutableDictionary, NSString, WCAdvertiseDataHelper, WCAdvertiseLogicMgr, WCTimelineDataProvider;
@protocol WCTimelineDelegate;
@interface WCTimelineMgr : MMObject <WCDataProviderDelegate>
{
id <WCTimelineDelegate> m_delegate;
NSMutableArray *m_cache;
NSMutableArray *m_timelineDataList;
NSMutableArray *m_advertiseDataList;
MyWCDB *m_database;
WCAdvertiseDataHelper *m_adDataHelper;
WCTimelineDataProvider *m_dataProvider;
WCAdvertiseLogicMgr *m_adLogicMgr;
_Bool isNoMoreData;
unsigned int m_showFlag;
NSString *nsLatestID;
_Bool m_needWriteBackDataItemUpdateTime;
_Bool m_needLoadDataItemUpdateTime;
NSMutableDictionary *m_dataUpdateTime;
NSMutableDictionary *m_dicUpdateDataList;
NSMutableArray *m_receivedAdList;
}
@property(nonatomic) unsigned int m_showFlag; // @synthesize m_showFlag;
@property(nonatomic) _Bool isNoMoreData; // @synthesize isNoMoreData;
@property(retain, nonatomic) WCAdvertiseLogicMgr *m_adLogicMgr; // @synthesize m_adLogicMgr;
@property(retain, nonatomic) WCTimelineDataProvider *m_dataProvider; // @synthesize m_dataProvider;
@property(retain, nonatomic) WCAdvertiseDataHelper *m_adDataHelper; // @synthesize m_adDataHelper;
@property(retain, nonatomic) MyWCDB *m_database; // @synthesize m_database;
@property(retain, nonatomic) NSMutableArray *m_cache; // @synthesize m_cache;
@property(nonatomic) __weak id <WCTimelineDelegate> m_delegate; // @synthesize m_delegate;
- (void).cxx_destruct;
- (void)onPreDownloadCanvasDataItemResrc:(id)arg1;
- (_Bool)hasAdvertiseInCache;
- (void)deleteAdvertiseDataItem:(id)arg1;
- (void)setChangedTimeForDatas:(id)arg1 withChangedTime:(unsigned int)arg2;
- (unsigned int)getMinChangedTimeBetween:(id)arg1 andMinID:(id)arg2;
- (void)trySaveDataItemUpdateTime;
- (void)tryDeleteDataItemUpdateTime;
- (void)tryLoadDataItemUpdateTime;
- (id)pathForDataItemUpdateTime;
- (void)onServiceCleanDataProviderMd5Cache;
- (unsigned long long)onServiceCleanCache;
- (void)onServiceTerminate;
- (void)onServiceEnterBackground;
- (void)onServiceMemoryWarning;
- (id)findDataItemInCacheByItemID:(id)arg1;
- (void)modifyCache:(id)arg1;
- (void)deleteDataItem:(id)arg1 notify:(_Bool)arg2;
- (void)modifyDataItem:(id)arg1 notify:(_Bool)arg2;
- (void)addDataItem:(id)arg1 notify:(_Bool)arg2;
- (void)onReturnServerConfig:(id)arg1;
- (void)onReturnShowFlag:(unsigned int)arg1;
- (void)onNoMoreDataWithRet:(int)arg1;
- (void)onReturnIsAllData:(id)arg1 andAdData:(id)arg2;
- (void)onDataUpdated:(id)arg1 withChangedTime:(unsigned int)arg2;
- (void)onDataUpdated:(id)arg1 maxItemID:(unsigned long long)arg2 minItemID:(unsigned long long)arg3 withChangedTime:(unsigned int)arg4;
- (void)onDataUpdated:(id)arg1 andData:(id)arg2 andAdData:(id)arg3 withChangedTime:(unsigned int)arg4;
- (void)tryRemoveCachesOfLikeUserWithNewTimelineList:(id)arg1;
- (void)removeCachesOfContentWithAdDataItemList:(id)arg1;
- (id)getCachedAdItemList;
- (void)removeAllCacheAdvertiseMsgXml;
- (id)getAdMsgXmlList;
- (_Bool)isAdReceived:(id)arg1;
- (_Bool)mergeReceivedAdList:(id)arg1;
- (id)receivedAdList;
- (_Bool)hasExistAdInLocal:(id)arg1;
- (_Bool)updateDataTail:(id)arg1;
- (unsigned int)getNextPageCount;
- (unsigned int)getFirstPageCount;
- (void)dumpLikeAndCommentInfoForAdDatas:(id)arg1;
- (_Bool)updateDataHead:(id)arg1;
- (_Bool)updateData:(id)arg1 WithReferID:(id)arg2;
- (id)getDataItem:(id)arg1 OfIndex:(long long)arg2;
- (long long)countOfDataItem:(id)arg1;
- (void)endSession;
- (void)beginSession;
- (id)safeAddAdvertiseData:(id)arg1;
- (void)removeItemNotInCacheFrom:(id)arg1;
- (unsigned int)getMaxTimeFromDataList:(id)arg1;
- (unsigned int)getMinTimeFromDataList:(id)arg1;
- (void)dealloc;
- (id)init;
// Remaining properties
@property(readonly, copy) NSString *debugDescription;
@property(readonly, copy) NSString *description;
@property(readonly) unsigned long long hash;
@property(readonly) Class superclass;
@end
| {
"pile_set_name": "Github"
} |
/**
* Copyright 2020 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { promises as fs } from 'fs';
import matter from 'gray-matter';
import marked from 'marked';
import { relative, sep } from 'path';
import { createRenderer } from './markdown-renderer';
const prefix = 'md:';
/**
* @typedef {Object} PluginOptions
* @prop {(path: string, metaData: any) => void} [metadataValidator] - Validate metadata.
*/
/**
* @param {PluginOptions} Options
*/
export default function markdownPlugin({ processContent } = {}) {
const renderer = createRenderer();
return {
name: 'markdown-plugin',
async resolveId(id, importer) {
if (!id.startsWith(prefix)) return;
const realId = id.slice(prefix.length);
const resolved = await this.resolve(realId, importer);
if (!resolved) {
throw Error(`Cannot find ${realId}`);
}
// Add an additional .js to the end so it ends up with .js at the end in the _virtual folder.
return prefix + resolved.id + '.js';
},
async load(id) {
if (!id.startsWith(prefix)) return;
const realId = id.slice(prefix.length, -'.js'.length);
this.addWatchFile(realId);
const source = await fs.readFile(realId);
const { content, data } = matter(source);
if (processContent) {
// Normalised realtive path for the validator
const relativePath = relative(process.cwd(), realId)
.split(sep)
.join('/');
await processContent(relativePath, data, content, this);
}
const html = marked(content, { renderer });
return `
export const html = ${JSON.stringify(html)};
export const meta = ${JSON.stringify(data)};
`;
},
};
}
| {
"pile_set_name": "Github"
} |
/* Copyright (C) 2017-2020 Andreas Shimokawa, Da Pa, Pavel Elagin, Sami Alaoui
This file is part of Gadgetbridge.
Gadgetbridge is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Gadgetbridge is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>. */
package nodomain.freeyourgadget.gadgetbridge.service.devices.jyou;
import android.bluetooth.BluetoothGatt;
import android.bluetooth.BluetoothGattCharacteristic;
import android.net.Uri;
import android.widget.Toast;
import org.slf4j.Logger;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.UUID;
import nodomain.freeyourgadget.gadgetbridge.deviceevents.GBDeviceEventBatteryInfo;
import nodomain.freeyourgadget.gadgetbridge.deviceevents.GBDeviceEventVersionInfo;
import nodomain.freeyourgadget.gadgetbridge.devices.jyou.JYouConstants;
import nodomain.freeyourgadget.gadgetbridge.impl.GBDevice;
import nodomain.freeyourgadget.gadgetbridge.model.Alarm;
import nodomain.freeyourgadget.gadgetbridge.model.CalendarEventSpec;
import nodomain.freeyourgadget.gadgetbridge.model.CallSpec;
import nodomain.freeyourgadget.gadgetbridge.model.CannedMessagesSpec;
import nodomain.freeyourgadget.gadgetbridge.model.MusicSpec;
import nodomain.freeyourgadget.gadgetbridge.model.MusicStateSpec;
import nodomain.freeyourgadget.gadgetbridge.model.NotificationSpec;
import nodomain.freeyourgadget.gadgetbridge.model.WeatherSpec;
import nodomain.freeyourgadget.gadgetbridge.service.btle.AbstractBTLEDeviceSupport;
import nodomain.freeyourgadget.gadgetbridge.service.btle.TransactionBuilder;
import nodomain.freeyourgadget.gadgetbridge.util.AlarmUtils;
import nodomain.freeyourgadget.gadgetbridge.util.GB;
import nodomain.freeyourgadget.gadgetbridge.util.StringUtils;
public class JYouSupport extends AbstractBTLEDeviceSupport {
private Logger logger;
protected BluetoothGattCharacteristic ctrlCharacteristic = null;
protected final GBDeviceEventVersionInfo versionCmd = new GBDeviceEventVersionInfo();
protected final GBDeviceEventBatteryInfo batteryCmd = new GBDeviceEventBatteryInfo();
public JYouSupport(Logger logger) {
super(logger);
this.logger = logger;
if (logger == null) {
throw new IllegalArgumentException("logger must not be null");
}
addSupportedService(JYouConstants.UUID_SERVICE_JYOU);
}
@Override
protected TransactionBuilder initializeDevice(TransactionBuilder builder) {
logger.info("Initializing");
gbDevice.setState(GBDevice.State.INITIALIZING);
gbDevice.sendDeviceUpdateIntent(getContext());
BluetoothGattCharacteristic measureCharacteristic = getCharacteristic(JYouConstants.UUID_CHARACTERISTIC_MEASURE);
ctrlCharacteristic = getCharacteristic(JYouConstants.UUID_CHARACTERISTIC_CONTROL);
builder.setGattCallback(this);
builder.notify(measureCharacteristic, true);
syncSettings(builder);
gbDevice.setState(GBDevice.State.INITIALIZED);
gbDevice.sendDeviceUpdateIntent(getContext());
logger.info("Initialization Done");
return builder;
}
@Override
public boolean onCharacteristicChanged(BluetoothGatt gatt,
BluetoothGattCharacteristic characteristic) {
return super.onCharacteristicChanged(gatt, characteristic);
}
protected void syncDateAndTime(TransactionBuilder builder) {
Calendar cal = Calendar.getInstance();
String strYear = String.valueOf(cal.get(Calendar.YEAR));
byte year1 = (byte)Integer.parseInt(strYear.substring(0, 2));
byte year2 = (byte)Integer.parseInt(strYear.substring(2, 4));
byte month = (byte)cal.get(Calendar.MONTH);
byte day = (byte)cal.get(Calendar.DAY_OF_MONTH);
byte hour = (byte)cal.get(Calendar.HOUR_OF_DAY);
byte minute = (byte)cal.get(Calendar.MINUTE);
byte second = (byte)cal.get(Calendar.SECOND);
byte weekDay = (byte)cal.get(Calendar.DAY_OF_WEEK);
builder.write(ctrlCharacteristic, commandWithChecksum(
JYouConstants.CMD_SET_DATE_AND_TIME,
(year1 << 24) | (year2 << 16) | (month << 8) | day,
(hour << 24) | (minute << 16) | (second << 8) | weekDay
));
}
protected void syncSettings(TransactionBuilder builder) {
}
private void showNotification(byte icon, String title, String message) {
try {
TransactionBuilder builder = performInitialized("ShowNotification");
byte[] titleBytes = stringToUTF8Bytes(title, 16);
byte[] messageBytes = stringToUTF8Bytes(message, 80);
for (int i = 1; i <= 7; i++)
{
byte[] currentPacket = new byte[20];
currentPacket[0] = JYouConstants.CMD_ACTION_SHOW_NOTIFICATION;
currentPacket[1] = 7;
currentPacket[2] = (byte)i;
switch(i) {
case 1:
currentPacket[4] = icon;
break;
case 2:
if (titleBytes != null) {
System.arraycopy(titleBytes, 0, currentPacket, 3, 6);
System.arraycopy(titleBytes, 6, currentPacket, 10, 10);
}
break;
default:
if (messageBytes != null) {
System.arraycopy(messageBytes, 16 * (i - 3), currentPacket, 3, 6);
System.arraycopy(messageBytes, 6 + 16 * (i - 3), currentPacket, 10, 10);
}
break;
}
builder.write(ctrlCharacteristic, currentPacket);
}
performConnected(builder.getTransaction());
} catch (IOException e) {
logger.warn(e.getMessage());
}
}
@Override
public boolean useAutoConnect() {
return true;
}
@Override
public void onNotification(NotificationSpec notificationSpec) {
String notificationTitle = StringUtils.getFirstOf(notificationSpec.sender, notificationSpec.title);
byte icon;
switch (notificationSpec.type) {
case GENERIC_SMS:
icon = JYouConstants.ICON_SMS;
break;
case FACEBOOK:
case FACEBOOK_MESSENGER:
icon = JYouConstants.ICON_FACEBOOK;
break;
case TWITTER:
icon = JYouConstants.ICON_TWITTER;
break;
case WHATSAPP:
icon = JYouConstants.ICON_WHATSAPP;
break;
default:
icon = JYouConstants.ICON_LINE;
break;
}
showNotification(icon, notificationTitle, notificationSpec.body);
}
@Override
public void onDeleteNotification(int id) {
}
@Override
public void onSetAlarms(ArrayList<? extends Alarm> alarms) {
try {
TransactionBuilder builder = performInitialized("SetAlarms");
for (int i = 0; i < alarms.size(); i++)
{
byte cmd;
switch (i) {
case 0:
cmd = JYouConstants.CMD_SET_ALARM_1;
break;
case 1:
cmd = JYouConstants.CMD_SET_ALARM_2;
break;
case 2:
cmd = JYouConstants.CMD_SET_ALARM_3;
break;
default:
return;
}
Calendar cal = AlarmUtils.toCalendar(alarms.get(i));
builder.write(ctrlCharacteristic, commandWithChecksum(
cmd,
alarms.get(i).getEnabled() ? cal.get(Calendar.HOUR_OF_DAY) : -1,
alarms.get(i).getEnabled() ? cal.get(Calendar.MINUTE) : -1
));
}
performConnected(builder.getTransaction());
GB.toast(getContext(), "Alarm settings applied - do note that the current device does not support day specification", Toast.LENGTH_LONG, GB.INFO);
} catch(IOException e) {
logger.warn(e.getMessage());
}
}
@Override
public void onSetTime() {
try {
TransactionBuilder builder = performInitialized("SetTime");
syncDateAndTime(builder);
performConnected(builder.getTransaction());
} catch(IOException e) {
logger.warn(e.getMessage());
}
}
@Override
public void onSetCallState(CallSpec callSpec) {
if(callSpec.command == CallSpec.CALL_INCOMING) {
showNotification(JYouConstants.ICON_CALL, callSpec.name, callSpec.number);
}
}
@Override
public void onSetCannedMessages(CannedMessagesSpec cannedMessagesSpec) {
}
@Override
public void onSetMusicState(MusicStateSpec stateSpec) {
}
@Override
public void onSetMusicInfo(MusicSpec musicSpec) {
}
@Override
public void onEnableRealtimeSteps(boolean enable) {
onEnableRealtimeHeartRateMeasurement(enable);
}
@Override
public void onInstallApp(Uri uri) {
}
@Override
public void onAppInfoReq() {
}
@Override
public void onAppStart(UUID uuid, boolean start) {
}
@Override
public void onAppDelete(UUID uuid) {
}
@Override
public void onAppConfiguration(UUID appUuid, String config, Integer id) {
}
@Override
public void onAppReorder(UUID[] uuids) {
}
@Override
public void onFetchRecordedData(int dataTypes) {
}
@Override
public void onReset(int flags) {
try {
TransactionBuilder builder = performInitialized("Reboot");
builder.write(ctrlCharacteristic, commandWithChecksum(
JYouConstants.CMD_ACTION_REBOOT_DEVICE, 0, 0
));
performConnected(builder.getTransaction());
} catch(Exception e) {
logger.warn(e.getMessage());
}
}
@Override
public void onHeartRateTest() {
try {
TransactionBuilder builder = performInitialized("HeartRateTest");
builder.write(ctrlCharacteristic, commandWithChecksum(
JYouConstants.CMD_ACTION_HEARTRATE_SWITCH, 0, 1
));
performConnected(builder.getTransaction());
} catch(Exception e) {
logger.warn(e.getMessage());
}
}
@Override
public void onEnableRealtimeHeartRateMeasurement(boolean enable) {
try {
TransactionBuilder builder = performInitialized("RealTimeHeartMeasurement");
builder.write(ctrlCharacteristic, commandWithChecksum(
JYouConstants.CMD_SET_HEARTRATE_AUTO, 0, enable ? 1 : 0
));
builder.queue(getQueue());
} catch(Exception e) {
logger.warn(e.getMessage());
}
}
@Override
public void onFindDevice(boolean start) {
if (start) {
showNotification(JYouConstants.ICON_QQ, "Gadgetbridge", "Bzzt! Bzzt!");
GB.toast(getContext(), "As your device doesn't have sound, it will only vibrate 3 times consecutively", Toast.LENGTH_LONG, GB.INFO);
}
}
@Override
public void onSetConstantVibration(int integer) {
}
@Override
public void onScreenshotReq() {
}
@Override
public void onEnableHeartRateSleepSupport(boolean enable) {
}
@Override
public void onSetHeartRateMeasurementInterval(int seconds) {
}
@Override
public void onAddCalendarEvent(CalendarEventSpec calendarEventSpec) {
}
@Override
public void onDeleteCalendarEvent(byte type, long id) {
}
@Override
public void onSendConfiguration(String config) {
}
@Override
public void onReadConfiguration(String config) {
}
@Override
public void onTestNewFunction() {
}
@Override
public void onSendWeather(WeatherSpec weatherSpec) {
}
protected byte[] commandWithChecksum(byte cmd, int argSlot1, int argSlot2)
{
ByteBuffer buf = ByteBuffer.allocate(10);
buf.put(cmd);
buf.putInt(argSlot1);
buf.putInt(argSlot2);
byte[] bytesToWrite = buf.array();
byte checksum = 0;
for (byte b : bytesToWrite) {
checksum += b;
}
bytesToWrite[9] = checksum;
return bytesToWrite;
}
private byte[] stringToUTF8Bytes(String src, int byteCount) {
try {
if (src == null)
return null;
for (int i = src.length(); i > 0; i--) {
String sub = src.substring(0, i);
byte[] subUTF8 = sub.getBytes("UTF-8");
if (subUTF8.length == byteCount) {
return subUTF8;
}
if (subUTF8.length < byteCount) {
byte[] largerSubUTF8 = new byte[byteCount];
System.arraycopy(subUTF8, 0, largerSubUTF8, 0, subUTF8.length);
return largerSubUTF8;
}
}
} catch (UnsupportedEncodingException e) {
logger.warn(e.getMessage());
}
return null;
}
}
| {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: 4cc8079dbe986da4cadb851189df94df
TextureImporter:
internalIDToNameTable: []
externalObjects: {}
serializedVersion: 10
mipmaps:
mipMapMode: 0
enableMipMap: 1
sRGBTexture: 1
linearTexture: 0
fadeOut: 0
borderMipMap: 0
mipMapsPreserveCoverage: 0
alphaTestReferenceValue: 0.5
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:
convertToNormalMap: 0
externalNormalMap: 0
heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0
seamlessCubemap: 0
textureFormat: 1
maxTextureSize: 2048
textureSettings:
serializedVersion: 2
filterMode: -1
aniso: 16
mipBias: -100
wrapU: 1
wrapV: 1
wrapW: 1
nPOTScale: 0
lightmap: 0
compressionQuality: 50
spriteMode: 1
spriteExtrude: 1
spriteMeshType: 1
alignment: 7
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 128
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 1
spriteTessellationDetail: -1
textureType: 8
textureShape: 1
singleChannelComponent: 0
maxTextureSizeSet: 0
compressionQualitySet: 0
textureFormatSet: 0
platformSettings:
- serializedVersion: 3
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
forceMaximumCompressionQuality_BC6H_BC7: 1
- serializedVersion: 3
buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
forceMaximumCompressionQuality_BC6H_BC7: 1
- serializedVersion: 3
buildTarget: Android
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
forceMaximumCompressionQuality_BC6H_BC7: 1
spriteSheet:
serializedVersion: 2
sprites: []
outline: []
physicsShape: []
bones: []
spriteID: 5e97eb03825dee720800000000000000
internalID: 0
vertices: []
indices:
edges: []
weights: []
secondaryTextures: []
spritePackingTag: 1
pSDRemoveMatte: 0
pSDShowRemoveMatteOption: 0
userData:
assetBundleName:
assetBundleVariant:
| {
"pile_set_name": "Github"
} |
require_relative '../../../config/initializers/active_hash_relation'
instance_exec(&(ARH_INITIALIZER))
| {
"pile_set_name": "Github"
} |
var $export = require('./_export')
, toIndex = require('./_to-index')
, fromCharCode = String.fromCharCode
, $fromCodePoint = String.fromCodePoint;
// length should be 1, old FF problem
$export($export.S + $export.F * (!!$fromCodePoint && $fromCodePoint.length != 1), 'String', {
// 21.1.2.2 String.fromCodePoint(...codePoints)
fromCodePoint: function fromCodePoint(x){ // eslint-disable-line no-unused-vars
var res = []
, aLen = arguments.length
, i = 0
, code;
while(aLen > i){
code = +arguments[i++];
if(toIndex(code, 0x10ffff) !== code)throw RangeError(code + ' is not a valid code point');
res.push(code < 0x10000
? fromCharCode(code)
: fromCharCode(((code -= 0x10000) >> 10) + 0xd800, code % 0x400 + 0xdc00)
);
} return res.join('');
}
}); | {
"pile_set_name": "Github"
} |
; RUN: %llc_dwarf -O0 -filetype=obj < %s > %t
; RUN: llvm-dwarfdump -v %t | FileCheck %s
; Generated from the following C++ source code:
;
; struct A {
; virtual void f();
; virtual void g();
; };
;
; void A::f() {}
; void A::g() {}
;
; and manually edited to set virtualIndex attribute on the A::g subprogram to
; 4294967295.
; CHECK: DW_TAG_subprogram [
; CHECK: DW_AT_vtable_elem_location [DW_FORM_exprloc] (DW_OP_constu 0x0)
; CHECK: DW_TAG_subprogram [
; CHECK-NOT: DW_AT_vtable_elem_location
%struct.A = type { i32 (...)** }
@_ZTV1A = unnamed_addr constant [4 x i8*] [i8* null, i8* null, i8* bitcast (void (%struct.A*)* @_ZN1A1fEv to i8*), i8* bitcast (void (%struct.A*)* @_ZN1A1gEv to i8*)], align 8
define void @_ZN1A1fEv(%struct.A* %this) unnamed_addr !dbg !18 {
ret void
}
define void @_ZN1A1gEv(%struct.A* %this) unnamed_addr !dbg !19 {
ret void
}
!llvm.dbg.cu = !{!0}
!llvm.module.flags = !{!20, !21}
!llvm.ident = !{!22}
!0 = distinct !DICompileUnit(language: DW_LANG_C_plus_plus, file: !1, producer: "", isOptimized: false, runtimeVersion: 0, emissionKind: FullDebug, enums: !2, retainedTypes: !3)
!1 = !DIFile(filename: "x", directory: "x")
!2 = !{}
!3 = !{!4}
!4 = !DICompositeType(tag: DW_TAG_structure_type, name: "A", file: !1, line: 1, size: 64, align: 64, elements: !5, vtableHolder: !4, identifier: "_ZTS1A")
!5 = !{!6, !12, !16}
!6 = !DIDerivedType(tag: DW_TAG_member, name: "_vptr$A", scope: !1, file: !1, baseType: !7, size: 64, flags: DIFlagArtificial)
!7 = !DIDerivedType(tag: DW_TAG_pointer_type, baseType: !8, size: 64)
!8 = !DIDerivedType(tag: DW_TAG_pointer_type, name: "__vtbl_ptr_type", baseType: !9, size: 64)
!9 = !DISubroutineType(types: !10)
!10 = !{!11}
!11 = !DIBasicType(name: "int", size: 32, align: 32, encoding: DW_ATE_signed)
!12 = !DISubprogram(name: "f", linkageName: "_ZN1A1fEv", scope: !4, file: !1, line: 2, type: !13, isLocal: false, isDefinition: false, scopeLine: 2, containingType: !4, virtuality: DW_VIRTUALITY_virtual, virtualIndex: 0, flags: DIFlagPrototyped, isOptimized: false)
!13 = !DISubroutineType(types: !14)
!14 = !{null, !15}
!15 = !DIDerivedType(tag: DW_TAG_pointer_type, baseType: !4, size: 64, align: 64, flags: DIFlagArtificial | DIFlagObjectPointer)
!16 = !DISubprogram(name: "g", linkageName: "_ZN1A1gEv", scope: !4, file: !1, line: 3, type: !13, isLocal: false, isDefinition: false, scopeLine: 3, containingType: !4, virtuality: DW_VIRTUALITY_virtual, virtualIndex: 4294967295, flags: DIFlagPrototyped, isOptimized: false)
!18 = distinct !DISubprogram(name: "f", linkageName: "_ZN1A1fEv", scope: !4, file: !1, line: 6, type: !13, isLocal: false, isDefinition: true, scopeLine: 6, flags: DIFlagPrototyped, isOptimized: false, unit: !0, declaration: !12, retainedNodes: !2)
!19 = distinct !DISubprogram(name: "g", linkageName: "_ZN1A1gEv", scope: !4, file: !1, line: 7, type: !13, isLocal: false, isDefinition: true, scopeLine: 7, flags: DIFlagPrototyped, isOptimized: false, unit: !0, declaration: !16, retainedNodes: !2)
!20 = !{i32 2, !"Dwarf Version", i32 4}
!21 = !{i32 2, !"Debug Info Version", i32 3}
!22 = !{!"clang version 3.9.0 (trunk 263469) (llvm/trunk 263156)"}
!23 = !DILocalVariable(name: "this", arg: 1, scope: !18, type: !24, flags: DIFlagArtificial | DIFlagObjectPointer)
!24 = !DIDerivedType(tag: DW_TAG_pointer_type, baseType: !4, size: 64, align: 64)
!25 = !DIExpression()
!26 = !DILocation(line: 0, scope: !18)
!27 = !DILocation(line: 6, column: 14, scope: !18)
!28 = !DILocalVariable(name: "this", arg: 1, scope: !19, type: !24, flags: DIFlagArtificial | DIFlagObjectPointer)
!29 = !DILocation(line: 0, scope: !19)
!30 = !DILocation(line: 7, column: 14, scope: !19)
| {
"pile_set_name": "Github"
} |
{{- if .Values.ingress.enabled }}
apiVersion: extensions/v1beta1
kind: Ingress
metadata:
name: {{ template "opencart.fullname" . }}
labels:
app: "{{ template "opencart.fullname" . }}"
chart: "{{ template "opencart.chart" . }}"
release: {{ .Release.Name | quote }}
heritage: {{ .Release.Service | quote }}
annotations:
{{- if .Values.ingress.certManager }}
kubernetes.io/tls-acme: "true"
{{- end }}
{{- range $key, $value := .Values.ingress.annotations }}
{{ $key }}: {{ $value | quote }}
{{- end }}
spec:
rules:
{{- range .Values.ingress.hosts }}
- host: {{ .name }}
http:
paths:
- path: {{ default "/" .path }}
backend:
serviceName: {{ template "opencart.fullname" $ }}
servicePort: http
{{- end }}
tls:
{{- range .Values.ingress.hosts }}
{{- if .tls }}
- hosts:
{{- if .tlsHosts }}
{{- range $host := .tlsHosts }}
- {{ $host }}
{{- end }}
{{- else }}
- {{ .name }}
{{- end }}
secretName: {{ .tlsSecret }}
{{- end }}
{{- end }}
{{- end }}
| {
"pile_set_name": "Github"
} |
/** Stylus Styles */
.main-content {
text-align center
img {
width 200px
}
.main-title {
color #42b983
}
.view1-title {
color #f06
}
.view2-title {
color #333
}
main a {
margin 0 10px
}
} | {
"pile_set_name": "Github"
} |
diff -r -U2 21a.orig/bin/build.sh 21a/bin/build.sh
--- 21a.orig/bin/build.sh 2015-05-19 07:42:40.000000000 +0600
+++ 21a/bin/build.sh 2016-04-03 20:54:08.597016537 +0600
@@ -40,5 +40,5 @@
ENABLE4="yes"
-version=21b
+version=21c
SRCDIR=src
BINDIR=bin
@@ -145,10 +145,9 @@
$TOOLDIR/load-world.sh $TARGET "$VERSION" || { echo "Failed: $TOOLDIR/load-world.sh"; exit 1; }
- $TARGET/lisp/lisp -batch -noinit -nositeinit $FPU_MODE < /dev/null || { echo "Failed: $TARGET/lisp/lisp -batch -noinit $FPU_MODE"; exit 1; }
+ $TARGET/lisp/lisp -batch -noinit -nositeinit < /dev/null || { echo "Failed: $TARGET/lisp/lisp -batch -noinit"; exit 1; }
return 0;
fi
}
-FPU_MODE=
BUILDWORLD="$TOOLDIR/build-world.sh"
BUILD_POT="yes"
| {
"pile_set_name": "Github"
} |
[Unit]
Description=fast remote file copy program daemon
Documentation=man:rsync(1) man:rsyncd.conf(5)
ConditionPathExists=/etc/rsyncd.conf
[Service]
ExecStart=/usr/bin/rsync --daemon --no-detach
StandardInput=socket
# See also http://0pointer.net/blog/dynamic-users-with-systemd.html
DynamicUser=yes
# Do not establish any new connections:
PrivateNetwork=yes
# Remove all capabilities(7), this is a stateless web server:
CapabilityBoundingSet=
# Ensure the service can never gain new privileges:
NoNewPrivileges=yes
# Prohibit access to any kind of namespacing:
RestrictNamespaces=yes
# Prohibit all address families:
# TODO(https://github.com/systemd/systemd/issues/15753): restrict to none
RestrictAddressFamilies=AF_UNIX
# Make home directories inaccessible:
ProtectHome=true
# Make device nodes except for /dev/null, /dev/zero, /dev/full,
# /dev/random and /dev/urandom inaccessible:
PrivateDevices=yes
# Make users other than root and the user for this daemon inaccessible:
PrivateUsers=yes
# Make cgroup file system hierarchy inaccessible:
ProtectControlGroups=yes
# Deny kernel module loading:
ProtectKernelModules=yes
# Make kernel variables (e.g. /proc/sys) read-only:
ProtectKernelTunables=yes
# Filter dangerous system calls. The following is listed as safe basic choice
# in systemd.exec(5):
SystemCallArchitectures=native
SystemCallFilter=@system-service
SystemCallErrorNumber=EPERM
# no-op for a socket-activated unit, but better for systemd-analyze security:
IPAddressDeny=any | {
"pile_set_name": "Github"
} |
/*
Copyright The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by client-gen. DO NOT EDIT.
// This package has the automatically generated typed clients.
package v1
| {
"pile_set_name": "Github"
} |
#include "../thnets.h"
#include <string.h>
int nnload_View(struct module *mod, struct nnmodule *n)
{
struct table *t = n->table;
mod->type = MT_View;
mod->updateOutput = nn_View_updateOutput;
struct View *m = &mod->View;
m->numElements = TableGetNumber(t, "numElements");
return 0;
}
void pyload_View(struct pyfunction *f)
{
f->module.updateOutput = nn_View_updateOutput;
f->module.type = MT_View;
struct View *p = &f->module.View;
struct pyelement *el;
if( (el = findelement(f->params, "sizes", 0)) && el->type == ELTYPE_INTVECT)
p->numElements = el->ivect[1];
}
#ifdef ONNX
void onnxload_View(const void *graph, struct module *m, int nodeidx)
{
m->updateOutput = nn_View_updateOutput;
m->type = MT_View;
struct View *p = &m->View;
int i;
p->nDimension = 0;
p->numElements = -1;
THFloatTensor *t = onnx_getshapetensor(graph, nodeidx, 1);
if(t)
{
p->nDimension = t->nDimension;
memcpy(p->size, t->size, sizeof(p->size));
THFloatTensor_free(t);
} else {
p->nDimension = onnx_getint(graph, nodeidx, "shape", -2);
for(i = 0; i < p->nDimension; i++)
p->size[i] = onnx_getint(graph, nodeidx, "shape", i);
}
}
#endif
THFloatTensor *nn_View_updateOutput(struct module *module, THFloatTensor *input)
{
long nElements = THFloatTensor_nElement(input);
struct View *p = &module->View;
int i, j;
if(p->nDimension)
{
long nelements = 1, size[4];
int ndim;
if(input->nDimension == 4)
{
ndim = p->nDimension;
memcpy(size, p->size, sizeof(size));
} else {
// p->size refers to a 4D tensor, we are working with 3D tensors
ndim = p->nDimension-1;
memcpy(size, p->size+1, sizeof(size[0]) * 3);
}
for(i = 0; i < ndim; i++)
if(size[i] == 0)
{
if(i >= ndim)
THError("Reshape has size 0 for non-existing dimension %d\n", i);
nelements *= input->size[i];
} else if(size[i] > 0)
nelements *= size[i];
else {
size[i] = 1;
for(j = i; j < ndim; j++)
size[i] *= input->size[j];
nelements *= size[i];
}
if(nelements != THFloatTensor_nElement(input))
THError("Wrong number of elements in Reshape: %ld (input) vs %ld (reshaped)\n", THFloatTensor_nElement(input), nelements);
THFloatTensor_set(module->output, input);
THFloatTensor_resize(module->output, size, ndim);
} else {
long numElements = p->numElements;
long batchSize = input->nDimension == 4 ? input->size[0] : 1;
if(numElements == -1)
numElements = nElements / batchSize;
else batchSize = nElements / numElements;
THFloatTensor *view;
if (batchSize > 1)
view = THFloatTensor_newWithStorage2d(input->storage, input->storageOffset, batchSize, numElements, numElements, 1);
else
view = THFloatTensor_newWithStorage1d(input->storage, input->storageOffset, numElements, 1);
THFloatTensor_free(module->output);
module->output = view;
}
return module->output;
}
| {
"pile_set_name": "Github"
} |
---
layout: documentation
title: 14288 - ZWave
---
{% include base.html %}
# 14288 In-Wall Outlet
This describes the Z-Wave device *14288*, manufactured by *Jasco Products* with the thing type UID of ```ge_14288_00_000```.
The device is in the category of *Power Outlet*, defining Small devices to be plugged into a power socket in a wall which stick there.

The 14288 supports routing. This allows the device to communicate using other routing enabled devices as intermediate routers. This device is also able to participate in the routing of data between other devices in the mesh network.
## Overview
Transform any home into a smart home with the GE Z-Wave Plus In-Wall Tamper-Resistant Smart Outlet. The GE Z-Wave Plus In-Wall Tamper-Resistant Smart Outlet enables wireless control of on/off functions from the Z-Wave controlled outlet, and is compatible with incandescent, LED, Halogen, fluorescent and compact fluorescent bulbs. The Smart Outlet replaces your current electrical outlet, uses your existing wiring, and provides a Z-Wave enabled outlet for the lamp or appliance that you would like to control and a standard pass-through AC outlet for other electrical appliances. The receptacle requires in-wall installation with hardwired connections. Take control of your smart home lighting with the GE Z-Wave Plus Wireless Smart Outlet Control.
### Inclusion Information
Once the controller is ready to add your device, press and release the Program Button to add it in the network.
### Exclusion Information
Once the controller is ready to remove your device, press and release the manual/program button to remove it from the network.
## Channels
The following table summarises the channels available for the 14288 -:
| Channel Name | Channel ID | Channel Type | Category | Item Type |
|--------------|------------|--------------|----------|-----------|
| Switch | switch_binary | switch_binary | Switch | Switch |
| Scene Number | scene_number | scene_number | | Number |
### Switch
Switch the power on and off.
The ```switch_binary``` channel is of type ```switch_binary``` and supports the ```Switch``` item and is in the ```Switch``` category.
### Scene Number
Triggers when a scene button is pressed.
The ```scene_number``` channel is of type ```scene_number``` and supports the ```Number``` item.
## Device Configuration
The following table provides a summary of the 1 configuration parameters available in the 14288.
Detailed information on each parameter can be found in the sections below.
| Param | Name | Description |
|-------|-------|-------------|
| 3 | LED Light configuration | LED Light configuration |
| | Switch All Mode | Set the mode for the switch when receiving SWITCH ALL commands |
### Parameter 3: LED Light configuration
LED Light configuration
0 - LED On when load is Off, Led Off when load is On
1 - LED On when load is On, LED Off when laod is Off (Default)
2 - LED always off
The following option values may be configured -:
| Value | Description |
|--------|-------------|
| 0 | LED On when load is Off |
| 1 | LED On when load is On |
| 2 | LED always off |
The manufacturer defined default value is ```1``` (LED On when load is On).
This parameter has the configuration ID ```config_3_1``` and is of type ```INTEGER```.
### Switch All Mode
Set the mode for the switch when receiving SWITCH ALL commands.
The following option values may be configured -:
| Value | Description |
|--------|-------------|
| 0 | Exclude from All On and All Off groups |
| 1 | Include in All On group |
| 2 | Include in All Off group |
| 255 | Include in All On and All Off groups |
This parameter has the configuration ID ```switchall_mode``` and is of type ```INTEGER```.
## Association Groups
Association groups allow the device to send unsolicited reports to the controller, or other devices in the network. Using association groups can allow you to eliminate polling, providing instant feedback of a device state change without unnecessary network traffic.
The 14288 supports 3 association groups.
### Group 1: Lifeline
The Lifeline association group reports device status to a hub and is not designed to control other devices directly. When using the Lineline group with a hub, in most cases, only the lifeline group will need to be configured and normally the hub will perform this automatically during the device initialisation.
Association group 1 supports 5 nodes.
### Group 2: Basic Set via local load
Supports Basic Set and is controlled with the local load
Association group 2 supports 5 nodes.
### Group 3: Basic Set via Button
Supports Basic Set and is controlled by pressing the On or Off button
Association group 3 supports 5 nodes.
## Technical Information
### Endpoints
#### Endpoint 0
| Command Class | Comment |
|---------------|---------|
| COMMAND_CLASS_NO_OPERATION_V1| |
| COMMAND_CLASS_BASIC_V1| |
| COMMAND_CLASS_SWITCH_BINARY_V1| Linked to BASIC|
| COMMAND_CLASS_SWITCH_ALL_V1| |
| COMMAND_CLASS_SCENE_ACTIVATION_V1| |
| COMMAND_CLASS_SCENE_ACTUATOR_CONF_V1| |
| COMMAND_CLASS_CRC_16_ENCAP_V1| |
| COMMAND_CLASS_ASSOCIATION_GRP_INFO_V1| |
| COMMAND_CLASS_DEVICE_RESET_LOCALLY_V1| |
| COMMAND_CLASS_ZWAVEPLUS_INFO_V1| |
| COMMAND_CLASS_CONFIGURATION_V1| |
| COMMAND_CLASS_MANUFACTURER_SPECIFIC_V1| |
| COMMAND_CLASS_POWERLEVEL_V1| |
| COMMAND_CLASS_FIRMWARE_UPDATE_MD_V1| |
| COMMAND_CLASS_ASSOCIATION_V2| |
| COMMAND_CLASS_VERSION_V2| |
### Documentation Links
* [Manual](https://opensmarthouse.org/zwavedatabase/714/14288.pdf)
---
Did you spot an error in the above definition or want to improve the content?
You can [contribute to the database here](https://opensmarthouse.org/zwavedatabase/714).
| {
"pile_set_name": "Github"
} |
---
author: shog
comments: true
date: 2012-08-02 14:50:30+00:00
layout: post
redirect_from: /2012/08/stack-exchange-now-60-valued-associates-strong
hero:
slug: stack-exchange-now-60-valued-associates-strong
title: 'Stack Exchange: now 60 Valued Associates strong!'
wordpress_id: 12053
tags:
- company
- background
---
Holy smokes... It's been over three and a half years since Jeff recruited [Valued Associate #00002](http://blog.stackoverflow.com/2009/01/welcome-stack-overflow-valued-associate-00002/) to work full-time on building Stack Overflow. In that time, a lot has changed. Jeff's moved on to [teasing us](https://twitter.com/codinghorror/status/213365532487983105) with his next project, Jarrod's gone from spending his days knee-deep in code to managing the Core dev team and [being big in Japan](https://twitter.com/alexlmiller/status/225382060313935874). And there are now 60 full-time employees of Stack Exchange, many hired from [within the communities](http://blog.stackoverflow.com/2010/01/eating-our-own-careers-dogfood/) on our sites.
It's been far too many months since [we last introduced](http://blog.stackoverflow.com/2012/01/welcome-valued-associates-anna-rachel-carleejean-and-charles/) any of them, which is a real shame - these folks work hard keeping the lights on, and there's no reason to keep them locked in the basement _all_ the time. So without further ado,
## New Valued Associates
**Bart Silverstrim - Systems Administrator**
[](http://stackexchange.com/users/28003/bart-silverstrim)
Bart is the newest addition to our Systems Administration Team. Bart is married to his wonderful wife Norma, an English teacher in PA, and has a stepdaughter in college and a son obsessed with Pokemon. He has three cats named Ruby, Python and Mongo. He knows more about Star Trek than most, and is an aspiring author (try and find him at the local Barnes and Noble!)
_ See also: [Welcome, ServerFault Valued Associate #0000004!](http://blog.serverfault.com/2012/07/10/welcome-serverfault-valued-associate-0000004/)_
**Jay Hanlon - VP of Community**
[](http://stackexchange.com/users/140824/jaydles)
As the new VP of community, Jay will oversee a combined team made up of our existing (and awesome) Chaos and Community teams. Specifically, he's tasked with driving a 6-sigma confidence level in cloud fluffiness, a 15º improvement in rainbow arc, and a modest 15% lift in unicorn nobility. He comes from a long, if accidental, career in coal mining financial services, where he started as a two-week temp answering phones, and most recently was a Managing Director of Capital Markets (whatever _that_ means). Prior to that, he studied Drama at Dartmouth College and did tried to do a lot of crossword puzzles. Today, he's a proud husband and the father of the world's definitively most-awesome one-year-old [citation needed].
**Steve Feldman - Office Admin**
[](http://stackexchange.com/users/1246026)
Proudly Polish from New Jersey, Steve helps the ever-expanding office at Stack HQ maintain its efficiency as we keep growing and growing; making sure the NYC team has enough jerky, Red Bull and peanut M&Ms to get them through the day; keeping the shelves stocked with enough swag to keep our users happy and buried in t-shirts and stickers. He graduated from University of Maryland (History) and then the University of Nottingham in England (MA Environmental History), where he found his love of Manchester United.
**Matt Jibson - Developer**
[](http://stackexchange.com/users/462110/mjibson)
Tall Matt is from Colorado and has joined the Development Team in the NYC office working on Careers 2.0. He plays organ and has a [website](http://www.mattjibson.com/).
**William Cole - Product Manager**
[](http://stackexchange.com/users/1345480/will-cole)
Will joins Stack Exchange as the Product Manager for Careers. He hails from Austin, TX and has been in NYC for 6 years (Don’t worry, he still has his cowboy boots). He founded two failed startups – one around news discovery (Know About It) and the other for fantasy sports (Chalq). His hobbies include rec league sports and their online fantasy equivalents. He enjoys reading science fiction and attending political, social, and economic debates. He is looking forward to building products at scale, working in a developer centric company culture, and not being responsible for raising money!
**Jay Greenbaum - Sales**

NY born and bred Jay joins the Careers Sales Team in NYC. Jay only left the Empire State for 4 great years as a Florida Gator. In his spare time, Jay loves travelling and eating and is obsessed with golf. Jay recently rescued a mutt dachshund puppy named Layla.
**Bethany Marzewski - Marketing Coordinator**

Bethany, a proud graduate of Northwestern University, comes to the Careers Marketing Team with a background in magazine journalism. Bethany’s career in journalism was highlighted by her cat (Freya)’s national magazine debut in Prevention Magazine. In her spare time, she enjoys traveling, singing in her community choir, and growing orchids at her Brooklyn apartment.
**Jon-Vincent Zampetti - Sales**

JV joins the Careers Sales Team in NYC. Beach bum, Jon-Vincent (JV) is from New Jersey and grew up in a small beach town, Monmouth Beach. JV is obsessed with all sports and routing for local teams: Giants, Rangers, Knicks, and Yankees. In his spare time, he enjoys pepperoni pizza and Hemingway.
**Dammand Cherry - Sales**

Dammand joins the Digital Ads Sales Team. He lives in Brooklyn with his 3 kids and lovely wife. He played football in college and loves to sell advertisements. Dammand is passionate about politics, and in his spare time spends time on his site The Politicus.
**Ben Kiziltug - Sales**

Ben joins Dimitar in our London Careers 2.0 Sales Office. Ben is originally from London and went to university in Liverpool. Upon completion he moved to Dubai to work in the headhunting sector where he lived for almost 2 years. Ben then took a hiatus to travel around Central and South America for 11 months. Highlights of Ben’s travels included living in the Amazon with a local tribe for a month, hiking on ice glaciers in Patagonia, swimming with whale sharks off the coast of Mexico and cycling down the world’s most dangerous road in Bolivia all before partying in Brazil for Carnaval.
**Stefan Schwarzgruber - Sales**

Stefan joins us in our ever-expanding Careers Sales Team in London. He grew up on a farm in Austria, moved to Vienna for a few years, and now resides in London. In his free time he plays volleyball, even traveling for International tournaments with his teammates! When he does make it back home to Austria he enjoys riding his brother’s horses (never without his permission though as he is the original horse whisperer).
** **
**Matthew Napolitano - Sales**

Matt joins our Career Sales Team in NY. Born and raised in the ‘burbs, Matt went to college in Madison, WI, and spent a year as a ski bum out in Lake Tahoe before moving back to NYC. He likes spending as much of his free time outside as he can, often playing tennis, basketball, or anything else he can make competitive.
**Sean Bave - Sales**

Sean joins our Career Sales Team in NY. He was born and raised in Westchester County, New York. He is addicted to football and golf. He once won a Chicken Nugget Eating Competition by eating 86 cafeteria nuggets.
**Robyn Wertman - Finance Manager**

Robyn joins Stack Exchange as our Finance Manager. Born and raised in central Ohio, Robyn moved from MI to NY in 2011 (with a broken leg!). She and her husband, Brad, have two boys Bryce and Chandler. She loves to spend time with her family, read paranormal romance books on her tablet and visit new places in NYC. If it’s a weekend, you can find her at the local playgrounds and parks.
** **
**Robert Brand - Sales**

Robert joins our Career Sales Team in NY. He grew up on Long Island and went to school at James Madison University in Virginia. Robert now lives in Brooklyn with his girlfriend and 3 cats. Robert enjoys playing video games, cooking, and listening to/reading science fiction/fantasy books. Also, he does not have a "real" belly-button (it is a hand made "innie")
Please join me in giving a warm, belated welcome to these fine conscripts!
| {
"pile_set_name": "Github"
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.vector;
import org.roaringbitmap.PeekableIntIterator;
import javax.annotation.Nullable;
import java.util.Arrays;
public class VectorSelectorUtils
{
/**
* Helper used by ColumnarLongs, ColumnarDoubles, etc. to populate null-flag vectors.
*/
@Nullable
public static boolean[] populateNullVector(
@Nullable final boolean[] nullVector,
final ReadableVectorOffset offset,
final PeekableIntIterator nullIterator
)
{
if (!nullIterator.hasNext()) {
return null;
}
final boolean[] retVal;
if (nullVector != null) {
retVal = nullVector;
} else {
retVal = new boolean[offset.getMaxVectorSize()];
}
if (offset.isContiguous()) {
final int startOffset = offset.getStartOffset();
nullIterator.advanceIfNeeded(startOffset);
if (!nullIterator.hasNext()) {
return null;
}
for (int i = 0; i < offset.getCurrentVectorSize(); i++) {
final int row = i + startOffset;
nullIterator.advanceIfNeeded(row);
if (!nullIterator.hasNext()) {
Arrays.fill(retVal, i, offset.getCurrentVectorSize(), false);
break;
}
retVal[i] = row == nullIterator.peekNext();
}
} else {
final int[] currentOffsets = offset.getOffsets();
nullIterator.advanceIfNeeded(currentOffsets[0]);
if (!nullIterator.hasNext()) {
return null;
}
for (int i = 0; i < offset.getCurrentVectorSize(); i++) {
final int row = currentOffsets[i];
nullIterator.advanceIfNeeded(row);
if (!nullIterator.hasNext()) {
Arrays.fill(retVal, i, offset.getCurrentVectorSize(), false);
break;
}
retVal[i] = row == nullIterator.peekNext();
}
}
return retVal;
}
}
| {
"pile_set_name": "Github"
} |
// Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
#include "include/dart_api.h"
#include "include/dart_native_api.h"
#include "vm/unit_test.h"
// Custom Isolate Test.
//
// This mid-size test uses the Dart Embedding Api to create a custom
// isolate abstraction. Instead of having a dedicated thread for each
// isolate, as is the case normally, this implementation shares a
// single thread among the isolates using an event queue.
namespace dart {
static void native_echo(Dart_NativeArguments args);
static void CustomIsolateImpl_start(Dart_NativeArguments args);
static Dart_NativeFunction NativeLookup(Dart_Handle name,
int argc,
bool* auto_setup_scope);
static const char* kCustomIsolateScriptChars =
"import 'dart:isolate';\n"
"\n"
"final RawReceivePort mainPort = new RawReceivePort();\n"
"final SendPort mainSendPort = mainPort.sendPort;\n"
"\n"
"echo(arg) native \"native_echo\";\n"
"\n"
"class CustomIsolateImpl implements CustomIsolate {\n"
" CustomIsolateImpl(String entry) : _entry = entry{\n"
" echo('Constructing isolate');\n"
" }\n"
"\n"
" SendPort spawn() {\n"
" return _start(_entry);\n"
" }\n"
"\n"
" static SendPort _start(entry)\n"
" native \"CustomIsolateImpl_start\";\n"
"\n"
" String _entry;\n"
"}\n"
"\n"
"abstract class CustomIsolate {\n"
" factory CustomIsolate(String entry) = CustomIsolateImpl;\n"
"\n"
" SendPort spawn();\n"
"}\n"
"\n"
"isolateMain() {\n"
" echo('Running isolateMain');\n"
" mainPort.handler = (message) {\n"
" var data = message[0];\n"
" var replyTo = message[1];\n"
" echo('Received: $data');\n"
" replyTo.send(data + 1);\n"
" };\n"
"}\n"
"\n"
"main() {\n"
" var isolate = new CustomIsolate(\"isolateMain\");\n"
" var receivePort = new RawReceivePort();\n"
" SendPort port = isolate.spawn();\n"
" port.send([42, receivePort.sendPort]);\n"
" receivePort.handler = (message) {\n"
" receivePort.close();\n"
" echo('Received: $message');\n"
" };\n"
" return 'success';\n"
"}\n";
// An entry in our event queue.
class Event {
protected:
explicit Event(Dart_Isolate isolate) : isolate_(isolate), next_(NULL) {}
public:
virtual ~Event() {}
virtual void Process() = 0;
Dart_Isolate isolate() const { return isolate_; }
private:
friend class EventQueue;
Dart_Isolate isolate_;
Event* next_;
};
// A simple event queue for our test.
class EventQueue {
public:
EventQueue() {
head_ = NULL;
}
void Add(Event* event) {
if (head_ == NULL) {
head_ = event;
tail_ = event;
} else {
tail_->next_ = event;
tail_ = event;
}
}
Event* Get() {
if (head_ == NULL) {
return NULL;
}
Event* tmp = head_;
head_ = head_->next_;
if (head_ == NULL) {
// Not necessary, but why not.
tail_ = NULL;
}
return tmp;
}
void RemoveEventsForIsolate(Dart_Isolate isolate) {
Event* cur = head_;
Event* prev = NULL;
while (cur != NULL) {
Event* next = cur->next_;
if (cur->isolate() == isolate) {
// Remove matching event.
if (prev != NULL) {
prev->next_ = next;
} else {
head_ = next;
}
delete cur;
} else {
// Advance.
prev = cur;
}
cur = next;
}
tail_ = prev;
}
private:
Event* head_;
Event* tail_;
};
EventQueue* event_queue;
// Start an isolate.
class StartEvent : public Event {
public:
StartEvent(Dart_Isolate isolate, const char* main)
: Event(isolate), main_(main) {}
virtual void Process();
private:
const char* main_;
};
void StartEvent::Process() {
OS::Print(">> StartEvent with isolate(%p)--\n", isolate());
Dart_EnterIsolate(isolate());
Dart_EnterScope();
Dart_Handle result;
Dart_Handle lib = Dart_LookupLibrary(NewString(TestCase::url()));
EXPECT_VALID(lib);
result = Dart_Invoke(lib, NewString(main_), 0, NULL);
EXPECT_VALID(result);
free(const_cast<char*>(main_));
main_ = NULL;
Dart_ExitScope();
Dart_ExitIsolate();
}
// Notify an isolate of a pending message.
class MessageEvent : public Event {
public:
explicit MessageEvent(Dart_Isolate isolate) : Event(isolate) {}
~MessageEvent() {
}
virtual void Process();
};
void MessageEvent::Process() {
OS::Print("$$ MessageEvent with isolate(%p)\n", isolate());
Dart_EnterIsolate(isolate());
Dart_EnterScope();
Dart_Handle result = Dart_HandleMessage();
EXPECT_VALID(result);
if (!Dart_HasLivePorts()) {
OS::Print("<< Shutting down isolate(%p)\n", isolate());
event_queue->RemoveEventsForIsolate(isolate());
Dart_ShutdownIsolate();
} else {
Dart_ExitScope();
Dart_ExitIsolate();
}
ASSERT(Dart_CurrentIsolate() == NULL);
}
static void NotifyMessage(Dart_Isolate dest_isolate) {
OS::Print("-- Notify isolate(%p) of pending message --\n", dest_isolate);
OS::Print("-- Adding MessageEvent to queue --\n");
event_queue->Add(new MessageEvent(dest_isolate));
}
static Dart_NativeFunction NativeLookup(Dart_Handle name,
int argc,
bool* auto_setup_scope) {
ASSERT(auto_setup_scope != NULL);
*auto_setup_scope = false;
const char* name_str = NULL;
EXPECT(Dart_IsString(name));
EXPECT_VALID(Dart_StringToCString(name, &name_str));
if (strcmp(name_str, "native_echo") == 0) {
return &native_echo;
} else if (strcmp(name_str, "CustomIsolateImpl_start") == 0) {
return &CustomIsolateImpl_start;
}
return NULL;
}
const char* saved_echo = NULL;
static void native_echo(Dart_NativeArguments args) {
Dart_EnterScope();
Dart_Handle arg = Dart_GetNativeArgument(args, 0);
Dart_Handle toString = Dart_ToString(arg);
EXPECT_VALID(toString);
const char* c_str = NULL;
EXPECT_VALID(Dart_StringToCString(toString, &c_str));
if (saved_echo) {
free(const_cast<char*>(saved_echo));
}
saved_echo = strdup(c_str);
OS::Print("-- (isolate=%p) %s\n", Dart_CurrentIsolate(), c_str);
Dart_ExitScope();
}
static void CustomIsolateImpl_start(Dart_NativeArguments args) {
OS::Print("-- Enter: CustomIsolateImpl_start --\n");
// We would probably want to pass in the this pointer too, so we
// could associate the CustomIsolateImpl instance with the
// Dart_Isolate by storing it in a native field.
EXPECT_EQ(1, Dart_GetNativeArgumentCount(args));
Dart_Handle param = Dart_GetNativeArgument(args, 0);
EXPECT_VALID(param);
EXPECT(Dart_IsString(param));
const char* isolate_main = NULL;
EXPECT_VALID(Dart_StringToCString(param, &isolate_main));
isolate_main = strdup(isolate_main);
// Save current isolate.
Dart_Isolate saved_isolate = Dart_CurrentIsolate();
Dart_ExitIsolate();
// Create a new Dart_Isolate.
Dart_Isolate new_isolate = TestCase::CreateTestIsolate();
EXPECT(new_isolate != NULL);
Dart_SetMessageNotifyCallback(&NotifyMessage);
Dart_EnterScope();
// Reload all the test classes here.
//
// TODO(turnidge): Use the create isolate callback instead?
Dart_Handle lib = TestCase::LoadTestScript(kCustomIsolateScriptChars,
NativeLookup);
EXPECT_VALID(lib);
Dart_Handle main_send_port = Dart_GetField(lib, NewString("mainSendPort"));
EXPECT_VALID(main_send_port);
Dart_Port main_port_id;
Dart_Handle err = Dart_SendPortGetId(main_send_port, &main_port_id);
EXPECT_VALID(err);
OS::Print("-- Adding StartEvent to queue --\n");
event_queue->Add(new StartEvent(new_isolate, isolate_main));
// Restore the original isolate.
Dart_ExitScope();
Dart_ExitIsolate();
Dart_EnterIsolate(saved_isolate);
Dart_EnterScope();
Dart_Handle send_port = Dart_NewSendPort(main_port_id);
EXPECT_VALID(send_port);
Dart_SetReturnValue(args, send_port);
OS::Print("-- Exit: CustomIsolateImpl_start --\n");
Dart_ExitScope();
}
UNIT_TEST_CASE(CustomIsolates) {
event_queue = new EventQueue();
Dart_Isolate dart_isolate = TestCase::CreateTestIsolate();
EXPECT(dart_isolate != NULL);
Dart_SetMessageNotifyCallback(&NotifyMessage);
Dart_EnterScope();
Dart_Handle result;
// Create a test library.
Dart_Handle lib = TestCase::LoadTestScript(kCustomIsolateScriptChars,
NativeLookup);
EXPECT_VALID(lib);
// Run main.
result = Dart_Invoke(lib, NewString("main"), 0, NULL);
EXPECT_VALID(result);
EXPECT(Dart_IsString(result));
const char* result_str = NULL;
EXPECT_VALID(Dart_StringToCString(result, &result_str));
EXPECT_STREQ("success", result_str);
Dart_ExitScope();
Dart_ExitIsolate();
OS::Print("-- Starting event loop --\n");
Event* event = event_queue->Get();
while (event) {
event->Process();
delete event;
event = event_queue->Get();
}
OS::Print("-- Finished event loop --\n");
EXPECT_STREQ("Received: 43", saved_echo);
free(const_cast<char*>(saved_echo));
delete event_queue;
}
} // namespace dart
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2015-2018 Isode Limited.
* All rights reserved.
* See the COPYING file for more information.
*/
#include <SwifTools/Dock/MacOSXDock.h>
#include <boost/lexical_cast.hpp>
#include <AppKit/AppKit.h>
#include <Cocoa/Cocoa.h>
#include <Swiften/Base/String.h>
namespace Swift {
MacOSXDock::MacOSXDock(CocoaApplication*) {
}
void MacOSXDock::setNumberOfPendingMessages(size_t i) {
std::string label(i > 0 ? boost::lexical_cast<std::string>(i) : "");
NSString *labelString = [[NSString alloc] initWithUTF8String: label.c_str()];
[[NSApp dockTile] setBadgeLabel: labelString];
[labelString release];
[NSApp requestUserAttention: NSInformationalRequest];
}
}
| {
"pile_set_name": "Github"
} |
# Copyright (c) 2008-2010, Intel Corporation.
# Copyright (c) 2006-2007, The Trustees of Stanford University.
# All rights reserved.
# Licensed under the terms of the New BSD License.
# Author: Mayur Naik ([email protected])
# name=LE-dlog
.include "E.dom"
.include "L.dom"
.include "P.dom"
.bddvarorder P0_L0xL1_E0
###
# Relations
###
LP(l:L0,p:P0) input
PE(p:P0,e:E0) input
LE(l:L0,e:E0) output
###
# Constraints
###
LE(l,e) :- LP(l,p), PE(p,e).
| {
"pile_set_name": "Github"
} |
ALTER TABLE /*_*/filearchive
MODIFY COLUMN fa_minor_mime varbinary(100) default "unknown";
ALTER TABLE /*_*/image
MODIFY COLUMN img_minor_mime varbinary(100) NOT NULL default "unknown";
ALTER TABLE /*_*/oldimage
MODIFY COLUMN oi_minor_mime varbinary(100) NOT NULL default "unknown";
INSERT INTO /*_*/updatelog(ul_key) VALUES ('mime_minor_length');
| {
"pile_set_name": "Github"
} |
org.apache.commons.math3.analysis.function.LogitTest
org.apache.commons.math3.analysis.interpolation.MicrosphereInterpolatorTest
org.apache.commons.math3.distribution.BetaDistributionTest
org.apache.commons.math3.distribution.BinomialDistributionTest
org.apache.commons.math3.distribution.CauchyDistributionTest
org.apache.commons.math3.distribution.ChiSquaredDistributionTest
org.apache.commons.math3.distribution.ExponentialDistributionTest
org.apache.commons.math3.distribution.FDistributionTest
org.apache.commons.math3.distribution.GammaDistributionTest
org.apache.commons.math3.distribution.HypergeometricDistributionTest
org.apache.commons.math3.distribution.LogNormalDistributionTest
org.apache.commons.math3.distribution.MultivariateNormalDistributionTest
org.apache.commons.math3.distribution.MultivariateNormalMixtureModelDistributionTest
org.apache.commons.math3.distribution.NormalDistributionTest
org.apache.commons.math3.distribution.PascalDistributionTest
org.apache.commons.math3.distribution.PoissonDistributionTest
org.apache.commons.math3.distribution.TDistributionTest
org.apache.commons.math3.distribution.TriangularDistributionTest
org.apache.commons.math3.distribution.UniformIntegerDistributionTest
org.apache.commons.math3.distribution.UniformRealDistributionTest
org.apache.commons.math3.distribution.WeibullDistributionTest
org.apache.commons.math3.distribution.ZipfDistributionTest
org.apache.commons.math3.fitting.PolynomialFitterTest
org.apache.commons.math3.geometry.euclidean.threed.Vector3DTest
org.apache.commons.math3.linear.HessenbergTransformerTest
org.apache.commons.math3.linear.SchurTransformerTest
org.apache.commons.math3.optim.nonlinear.scalar.noderiv.CMAESOptimizerTest
org.apache.commons.math3.optim.nonlinear.vector.jacobian.LevenbergMarquardtOptimizerTest
org.apache.commons.math3.optimization.direct.CMAESOptimizerTest
org.apache.commons.math3.optimization.fitting.PolynomialFitterTest
org.apache.commons.math3.optimization.general.LevenbergMarquardtOptimizerTest
org.apache.commons.math3.stat.correlation.PearsonsCorrelationTest
org.apache.commons.math3.stat.correlation.SpearmansRankCorrelationTest
org.apache.commons.math3.stat.descriptive.AggregateSummaryStatisticsTest
org.apache.commons.math3.stat.descriptive.moment.MeanTest
org.apache.commons.math3.stat.descriptive.moment.VarianceTest
org.apache.commons.math3.stat.descriptive.summary.ProductTest
org.apache.commons.math3.stat.inference.ChiSquareTestTest
org.apache.commons.math3.stat.inference.GTestTest
org.apache.commons.math3.stat.inference.MannWhitneyUTestTest
org.apache.commons.math3.stat.inference.OneWayAnovaTest
org.apache.commons.math3.stat.inference.TTestTest
org.apache.commons.math3.stat.inference.TestUtilsTest
org.apache.commons.math3.stat.inference.WilcoxonSignedRankTestTest
org.apache.commons.math3.stat.regression.SimpleRegressionTest
org.apache.commons.math3.util.ArithmeticUtilsTest
org.apache.commons.math3.util.FastMathTest
org.apache.commons.math3.util.MathArraysTest
org.apache.commons.math3.util.MathUtilsTest
org.apache.commons.math3.util.ResizableDoubleArrayTest
| {
"pile_set_name": "Github"
} |
/*
Copyright The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: k8s.io/kubernetes/vendor/k8s.io/api/coordination/v1/generated.proto
package v1
import (
fmt "fmt"
io "io"
proto "github.com/gogo/protobuf/proto"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
math "math"
math_bits "math/bits"
reflect "reflect"
strings "strings"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
func (m *Lease) Reset() { *m = Lease{} }
func (*Lease) ProtoMessage() {}
func (*Lease) Descriptor() ([]byte, []int) {
return fileDescriptor_929e1148ad9baca3, []int{0}
}
func (m *Lease) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *Lease) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
func (m *Lease) XXX_Merge(src proto.Message) {
xxx_messageInfo_Lease.Merge(m, src)
}
func (m *Lease) XXX_Size() int {
return m.Size()
}
func (m *Lease) XXX_DiscardUnknown() {
xxx_messageInfo_Lease.DiscardUnknown(m)
}
var xxx_messageInfo_Lease proto.InternalMessageInfo
func (m *LeaseList) Reset() { *m = LeaseList{} }
func (*LeaseList) ProtoMessage() {}
func (*LeaseList) Descriptor() ([]byte, []int) {
return fileDescriptor_929e1148ad9baca3, []int{1}
}
func (m *LeaseList) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *LeaseList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
func (m *LeaseList) XXX_Merge(src proto.Message) {
xxx_messageInfo_LeaseList.Merge(m, src)
}
func (m *LeaseList) XXX_Size() int {
return m.Size()
}
func (m *LeaseList) XXX_DiscardUnknown() {
xxx_messageInfo_LeaseList.DiscardUnknown(m)
}
var xxx_messageInfo_LeaseList proto.InternalMessageInfo
func (m *LeaseSpec) Reset() { *m = LeaseSpec{} }
func (*LeaseSpec) ProtoMessage() {}
func (*LeaseSpec) Descriptor() ([]byte, []int) {
return fileDescriptor_929e1148ad9baca3, []int{2}
}
func (m *LeaseSpec) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *LeaseSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
func (m *LeaseSpec) XXX_Merge(src proto.Message) {
xxx_messageInfo_LeaseSpec.Merge(m, src)
}
func (m *LeaseSpec) XXX_Size() int {
return m.Size()
}
func (m *LeaseSpec) XXX_DiscardUnknown() {
xxx_messageInfo_LeaseSpec.DiscardUnknown(m)
}
var xxx_messageInfo_LeaseSpec proto.InternalMessageInfo
func init() {
proto.RegisterType((*Lease)(nil), "k8s.io.api.coordination.v1.Lease")
proto.RegisterType((*LeaseList)(nil), "k8s.io.api.coordination.v1.LeaseList")
proto.RegisterType((*LeaseSpec)(nil), "k8s.io.api.coordination.v1.LeaseSpec")
}
func init() {
proto.RegisterFile("k8s.io/kubernetes/vendor/k8s.io/api/coordination/v1/generated.proto", fileDescriptor_929e1148ad9baca3)
}
var fileDescriptor_929e1148ad9baca3 = []byte{
// 535 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x90, 0xc1, 0x6e, 0xd3, 0x40,
0x10, 0x86, 0xe3, 0x36, 0x91, 0x9a, 0x0d, 0x2d, 0x91, 0x95, 0x83, 0x95, 0x83, 0x5d, 0x22, 0x21,
0xe5, 0xc2, 0x2e, 0xa9, 0x10, 0x42, 0x9c, 0xc0, 0x20, 0xa0, 0x52, 0x2a, 0x24, 0xb7, 0x27, 0xd4,
0x03, 0x1b, 0x7b, 0x70, 0x96, 0xd4, 0x5e, 0xb3, 0xbb, 0x0e, 0xea, 0x8d, 0x47, 0xe0, 0xca, 0x63,
0xc0, 0x53, 0xe4, 0xd8, 0x63, 0x4f, 0x16, 0x31, 0x2f, 0x82, 0x76, 0x93, 0x36, 0x21, 0x49, 0xd5,
0x8a, 0xdb, 0xee, 0xcc, 0xfc, 0xdf, 0xfc, 0xf3, 0xa3, 0x57, 0xa3, 0x67, 0x12, 0x33, 0x4e, 0x46,
0xf9, 0x00, 0x44, 0x0a, 0x0a, 0x24, 0x19, 0x43, 0x1a, 0x71, 0x41, 0xe6, 0x0d, 0x9a, 0x31, 0x12,
0x72, 0x2e, 0x22, 0x96, 0x52, 0xc5, 0x78, 0x4a, 0xc6, 0x3d, 0x12, 0x43, 0x0a, 0x82, 0x2a, 0x88,
0x70, 0x26, 0xb8, 0xe2, 0x76, 0x7b, 0x36, 0x8b, 0x69, 0xc6, 0xf0, 0xf2, 0x2c, 0x1e, 0xf7, 0xda,
0x8f, 0x62, 0xa6, 0x86, 0xf9, 0x00, 0x87, 0x3c, 0x21, 0x31, 0x8f, 0x39, 0x31, 0x92, 0x41, 0xfe,
0xc9, 0xfc, 0xcc, 0xc7, 0xbc, 0x66, 0xa8, 0xf6, 0x93, 0xc5, 0xda, 0x84, 0x86, 0x43, 0x96, 0x82,
0x38, 0x27, 0xd9, 0x28, 0xd6, 0x05, 0x49, 0x12, 0x50, 0x74, 0x83, 0x81, 0x36, 0xb9, 0x49, 0x25,
0xf2, 0x54, 0xb1, 0x04, 0xd6, 0x04, 0x4f, 0x6f, 0x13, 0xc8, 0x70, 0x08, 0x09, 0x5d, 0xd5, 0x75,
0x7e, 0x59, 0xa8, 0xd6, 0x07, 0x2a, 0xc1, 0xfe, 0x88, 0x76, 0xb4, 0x9b, 0x88, 0x2a, 0xea, 0x58,
0xfb, 0x56, 0xb7, 0x71, 0xf0, 0x18, 0x2f, 0x62, 0xb8, 0x86, 0xe2, 0x6c, 0x14, 0xeb, 0x82, 0xc4,
0x7a, 0x1a, 0x8f, 0x7b, 0xf8, 0xfd, 0xe0, 0x33, 0x84, 0xea, 0x08, 0x14, 0xf5, 0xed, 0x49, 0xe1,
0x55, 0xca, 0xc2, 0x43, 0x8b, 0x5a, 0x70, 0x4d, 0xb5, 0xdf, 0xa2, 0xaa, 0xcc, 0x20, 0x74, 0xb6,
0x0c, 0xfd, 0x21, 0xbe, 0x39, 0x64, 0x6c, 0x2c, 0x1d, 0x67, 0x10, 0xfa, 0xf7, 0xe6, 0xc8, 0xaa,
0xfe, 0x05, 0x06, 0xd0, 0xf9, 0x69, 0xa1, 0xba, 0x99, 0xe8, 0x33, 0xa9, 0xec, 0xd3, 0x35, 0xe3,
0xf8, 0x6e, 0xc6, 0xb5, 0xda, 0xd8, 0x6e, 0xce, 0x77, 0xec, 0x5c, 0x55, 0x96, 0x4c, 0xbf, 0x41,
0x35, 0xa6, 0x20, 0x91, 0xce, 0xd6, 0xfe, 0x76, 0xb7, 0x71, 0xf0, 0xe0, 0x56, 0xd7, 0xfe, 0xee,
0x9c, 0x56, 0x3b, 0xd4, 0xba, 0x60, 0x26, 0xef, 0xfc, 0xd8, 0x9e, 0x7b, 0xd6, 0x77, 0xd8, 0xcf,
0xd1, 0xde, 0x90, 0x9f, 0x45, 0x20, 0x0e, 0x23, 0x48, 0x15, 0x53, 0xe7, 0xc6, 0x79, 0xdd, 0xb7,
0xcb, 0xc2, 0xdb, 0x7b, 0xf7, 0x4f, 0x27, 0x58, 0x99, 0xb4, 0xfb, 0xa8, 0x75, 0xa6, 0x41, 0xaf,
0x73, 0x61, 0x36, 0x1f, 0x43, 0xc8, 0xd3, 0x48, 0x9a, 0x58, 0x6b, 0xbe, 0x53, 0x16, 0x5e, 0xab,
0xbf, 0xa1, 0x1f, 0x6c, 0x54, 0xd9, 0x03, 0xd4, 0xa0, 0xe1, 0x97, 0x9c, 0x09, 0x38, 0x61, 0x09,
0x38, 0xdb, 0x26, 0x40, 0x72, 0xb7, 0x00, 0x8f, 0x58, 0x28, 0xb8, 0x96, 0xf9, 0xf7, 0xcb, 0xc2,
0x6b, 0xbc, 0x5c, 0x70, 0x82, 0x65, 0xa8, 0x7d, 0x8a, 0xea, 0x02, 0x52, 0xf8, 0x6a, 0x36, 0x54,
0xff, 0x6f, 0xc3, 0x6e, 0x59, 0x78, 0xf5, 0xe0, 0x8a, 0x12, 0x2c, 0x80, 0xf6, 0x0b, 0xd4, 0x34,
0x97, 0x9d, 0x08, 0x9a, 0x4a, 0xa6, 0x6f, 0x93, 0x4e, 0xcd, 0x64, 0xd1, 0x2a, 0x0b, 0xaf, 0xd9,
0x5f, 0xe9, 0x05, 0x6b, 0xd3, 0x7e, 0x77, 0x32, 0x75, 0x2b, 0x17, 0x53, 0xb7, 0x72, 0x39, 0x75,
0x2b, 0xdf, 0x4a, 0xd7, 0x9a, 0x94, 0xae, 0x75, 0x51, 0xba, 0xd6, 0x65, 0xe9, 0x5a, 0xbf, 0x4b,
0xd7, 0xfa, 0xfe, 0xc7, 0xad, 0x7c, 0xd8, 0x1a, 0xf7, 0xfe, 0x06, 0x00, 0x00, 0xff, 0xff, 0x41,
0x5e, 0x94, 0x96, 0x5e, 0x04, 0x00, 0x00,
}
func (m *Lease) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *Lease) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *Lease) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
{
size, err := m.Spec.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintGenerated(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x12
{
size, err := m.ObjectMeta.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintGenerated(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0xa
return len(dAtA) - i, nil
}
func (m *LeaseList) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *LeaseList) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *LeaseList) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if len(m.Items) > 0 {
for iNdEx := len(m.Items) - 1; iNdEx >= 0; iNdEx-- {
{
size, err := m.Items[iNdEx].MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintGenerated(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x12
}
}
{
size, err := m.ListMeta.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintGenerated(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0xa
return len(dAtA) - i, nil
}
func (m *LeaseSpec) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *LeaseSpec) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *LeaseSpec) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if m.LeaseTransitions != nil {
i = encodeVarintGenerated(dAtA, i, uint64(*m.LeaseTransitions))
i--
dAtA[i] = 0x28
}
if m.RenewTime != nil {
{
size, err := m.RenewTime.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintGenerated(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x22
}
if m.AcquireTime != nil {
{
size, err := m.AcquireTime.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintGenerated(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x1a
}
if m.LeaseDurationSeconds != nil {
i = encodeVarintGenerated(dAtA, i, uint64(*m.LeaseDurationSeconds))
i--
dAtA[i] = 0x10
}
if m.HolderIdentity != nil {
i -= len(*m.HolderIdentity)
copy(dAtA[i:], *m.HolderIdentity)
i = encodeVarintGenerated(dAtA, i, uint64(len(*m.HolderIdentity)))
i--
dAtA[i] = 0xa
}
return len(dAtA) - i, nil
}
func encodeVarintGenerated(dAtA []byte, offset int, v uint64) int {
offset -= sovGenerated(v)
base := offset
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return base
}
func (m *Lease) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
l = m.ObjectMeta.Size()
n += 1 + l + sovGenerated(uint64(l))
l = m.Spec.Size()
n += 1 + l + sovGenerated(uint64(l))
return n
}
func (m *LeaseList) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
l = m.ListMeta.Size()
n += 1 + l + sovGenerated(uint64(l))
if len(m.Items) > 0 {
for _, e := range m.Items {
l = e.Size()
n += 1 + l + sovGenerated(uint64(l))
}
}
return n
}
func (m *LeaseSpec) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if m.HolderIdentity != nil {
l = len(*m.HolderIdentity)
n += 1 + l + sovGenerated(uint64(l))
}
if m.LeaseDurationSeconds != nil {
n += 1 + sovGenerated(uint64(*m.LeaseDurationSeconds))
}
if m.AcquireTime != nil {
l = m.AcquireTime.Size()
n += 1 + l + sovGenerated(uint64(l))
}
if m.RenewTime != nil {
l = m.RenewTime.Size()
n += 1 + l + sovGenerated(uint64(l))
}
if m.LeaseTransitions != nil {
n += 1 + sovGenerated(uint64(*m.LeaseTransitions))
}
return n
}
func sovGenerated(x uint64) (n int) {
return (math_bits.Len64(x|1) + 6) / 7
}
func sozGenerated(x uint64) (n int) {
return sovGenerated(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (this *Lease) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&Lease{`,
`ObjectMeta:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.ObjectMeta), "ObjectMeta", "v1.ObjectMeta", 1), `&`, ``, 1) + `,`,
`Spec:` + strings.Replace(strings.Replace(this.Spec.String(), "LeaseSpec", "LeaseSpec", 1), `&`, ``, 1) + `,`,
`}`,
}, "")
return s
}
func (this *LeaseList) String() string {
if this == nil {
return "nil"
}
repeatedStringForItems := "[]Lease{"
for _, f := range this.Items {
repeatedStringForItems += strings.Replace(strings.Replace(f.String(), "Lease", "Lease", 1), `&`, ``, 1) + ","
}
repeatedStringForItems += "}"
s := strings.Join([]string{`&LeaseList{`,
`ListMeta:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.ListMeta), "ListMeta", "v1.ListMeta", 1), `&`, ``, 1) + `,`,
`Items:` + repeatedStringForItems + `,`,
`}`,
}, "")
return s
}
func (this *LeaseSpec) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&LeaseSpec{`,
`HolderIdentity:` + valueToStringGenerated(this.HolderIdentity) + `,`,
`LeaseDurationSeconds:` + valueToStringGenerated(this.LeaseDurationSeconds) + `,`,
`AcquireTime:` + strings.Replace(fmt.Sprintf("%v", this.AcquireTime), "MicroTime", "v1.MicroTime", 1) + `,`,
`RenewTime:` + strings.Replace(fmt.Sprintf("%v", this.RenewTime), "MicroTime", "v1.MicroTime", 1) + `,`,
`LeaseTransitions:` + valueToStringGenerated(this.LeaseTransitions) + `,`,
`}`,
}, "")
return s
}
func valueToStringGenerated(v interface{}) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("*%v", pv)
}
func (m *Lease) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: Lease: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: Lease: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ObjectMeta", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthGenerated
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.ObjectMeta.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Spec", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthGenerated
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Spec.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *LeaseList) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: LeaseList: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: LeaseList: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ListMeta", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthGenerated
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.ListMeta.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Items", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthGenerated
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Items = append(m.Items, Lease{})
if err := m.Items[len(m.Items)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *LeaseSpec) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: LeaseSpec: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: LeaseSpec: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field HolderIdentity", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthGenerated
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
s := string(dAtA[iNdEx:postIndex])
m.HolderIdentity = &s
iNdEx = postIndex
case 2:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field LeaseDurationSeconds", wireType)
}
var v int32
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= int32(b&0x7F) << shift
if b < 0x80 {
break
}
}
m.LeaseDurationSeconds = &v
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field AcquireTime", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthGenerated
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.AcquireTime == nil {
m.AcquireTime = &v1.MicroTime{}
}
if err := m.AcquireTime.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 4:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field RenewTime", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthGenerated
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.RenewTime == nil {
m.RenewTime = &v1.MicroTime{}
}
if err := m.RenewTime.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 5:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field LeaseTransitions", wireType)
}
var v int32
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= int32(b&0x7F) << shift
if b < 0x80 {
break
}
}
m.LeaseTransitions = &v
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipGenerated(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
depth := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowGenerated
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowGenerated
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
case 1:
iNdEx += 8
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowGenerated
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if length < 0 {
return 0, ErrInvalidLengthGenerated
}
iNdEx += length
case 3:
depth++
case 4:
if depth == 0 {
return 0, ErrUnexpectedEndOfGroupGenerated
}
depth--
case 5:
iNdEx += 4
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
if iNdEx < 0 {
return 0, ErrInvalidLengthGenerated
}
if depth == 0 {
return iNdEx, nil
}
}
return 0, io.ErrUnexpectedEOF
}
var (
ErrInvalidLengthGenerated = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowGenerated = fmt.Errorf("proto: integer overflow")
ErrUnexpectedEndOfGroupGenerated = fmt.Errorf("proto: unexpected end of group")
)
| {
"pile_set_name": "Github"
} |
var createWrap = require('./_createWrap'),
flatRest = require('./_flatRest');
/** Used to compose bitmasks for function metadata. */
var WRAP_REARG_FLAG = 256;
/**
* Creates a function that invokes `func` with arguments arranged according
* to the specified `indexes` where the argument value at the first index is
* provided as the first argument, the argument value at the second index is
* provided as the second argument, and so on.
*
* @static
* @memberOf _
* @since 3.0.0
* @category Function
* @param {Function} func The function to rearrange arguments for.
* @param {...(number|number[])} indexes The arranged argument indexes.
* @returns {Function} Returns the new function.
* @example
*
* var rearged = _.rearg(function(a, b, c) {
* return [a, b, c];
* }, [2, 0, 1]);
*
* rearged('b', 'c', 'a')
* // => ['a', 'b', 'c']
*/
var rearg = flatRest(function(func, indexes) {
return createWrap(func, WRAP_REARG_FLAG, undefined, undefined, undefined, indexes);
});
module.exports = rearg;
| {
"pile_set_name": "Github"
} |
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris
package ipv4
import (
"unsafe"
"golang.org/x/net/internal/iana"
"golang.org/x/net/internal/socket"
)
func setControlMessage(c *socket.Conn, opt *rawOpt, cf ControlFlags, on bool) error {
opt.Lock()
defer opt.Unlock()
if so, ok := sockOpts[ssoReceiveTTL]; ok && cf&FlagTTL != 0 {
if err := so.SetInt(c, boolint(on)); err != nil {
return err
}
if on {
opt.set(FlagTTL)
} else {
opt.clear(FlagTTL)
}
}
if so, ok := sockOpts[ssoPacketInfo]; ok {
if cf&(FlagSrc|FlagDst|FlagInterface) != 0 {
if err := so.SetInt(c, boolint(on)); err != nil {
return err
}
if on {
opt.set(cf & (FlagSrc | FlagDst | FlagInterface))
} else {
opt.clear(cf & (FlagSrc | FlagDst | FlagInterface))
}
}
} else {
if so, ok := sockOpts[ssoReceiveDst]; ok && cf&FlagDst != 0 {
if err := so.SetInt(c, boolint(on)); err != nil {
return err
}
if on {
opt.set(FlagDst)
} else {
opt.clear(FlagDst)
}
}
if so, ok := sockOpts[ssoReceiveInterface]; ok && cf&FlagInterface != 0 {
if err := so.SetInt(c, boolint(on)); err != nil {
return err
}
if on {
opt.set(FlagInterface)
} else {
opt.clear(FlagInterface)
}
}
}
return nil
}
func marshalTTL(b []byte, cm *ControlMessage) []byte {
m := socket.ControlMessage(b)
m.MarshalHeader(iana.ProtocolIP, sysIP_RECVTTL, 1)
return m.Next(1)
}
func parseTTL(cm *ControlMessage, b []byte) {
cm.TTL = int(*(*byte)(unsafe.Pointer(&b[:1][0])))
}
| {
"pile_set_name": "Github"
} |
module Katello
module FixturesSupport
FIXTURE_CLASSES = {
:katello_activation_keys => Katello::ActivationKey,
:katello_activation_key_purpose_addons => Katello::ActivationKeyPurposeAddon,
:katello_contents => Katello::Content,
:katello_content_views => Katello::ContentView,
:katello_content_view_environments => Katello::ContentViewEnvironment,
:katello_content_view_filters => Katello::ContentViewFilter,
:katello_content_view_erratum_filter_rules => Katello::ContentViewErratumFilterRule,
:katello_content_view_package_filter_rules => Katello::ContentViewPackageFilterRule,
:katello_content_view_package_group_filter_rules => Katello::ContentViewPackageGroupFilterRule,
:katello_content_view_module_stream_filter_rules => Katello::ContentViewModuleStreamFilterRule,
:katello_content_view_puppet_modules => Katello::ContentViewPuppetModule,
:katello_content_view_puppet_environments => Katello::ContentViewPuppetEnvironment,
:katello_content_view_repositories => Katello::ContentViewRepository,
:katello_content_view_histories => Katello::ContentViewHistory,
:katello_content_view_versions => Katello::ContentViewVersion,
:katello_environments => Katello::KTEnvironment,
:katello_files => Katello::FileUnit,
:katello_gpg_keys => Katello::GpgKey,
:katello_package_groups => Katello::PackageGroup,
:katello_repository_package_groups => Katello::RepositoryPackageGroup,
:katello_pools => Katello::Pool,
:katello_products => Katello::Product,
:katello_pool_products => Katello::PoolProduct,
:katello_product_contents => Katello::ProductContent,
:katello_providers => Katello::Provider,
:katello_puppet_modules => Katello::PuppetModule,
:katello_purpose_addons => Katello::PurposeAddon,
:katello_repository_puppet_modules => Katello::RepositoryPuppetModule,
:katello_root_repositories => Katello::RootRepository,
:katello_repositories => Katello::Repository,
:katello_sync_plans => Katello::SyncPlan,
:katello_host_collections => Katello::HostCollection,
:katello_subscriptions => Katello::Subscription,
:katello_host_collection_hosts => Katello::HostCollectionHosts,
:katello_task_statuses => Katello::TaskStatus,
:katello_errata => Katello::Erratum,
:katello_erratum_packages => Katello::ErratumPackage,
:katello_erratum_cves => Katello::ErratumCve,
:katello_repository_errata => Katello::RepositoryErratum,
:katello_rpms => Katello::Rpm,
:katello_srpms => Katello::Srpm,
:katello_repository_rpms => Katello::RepositoryRpm,
:katello_repository_srpms => Katello::RepositorySrpm,
:katello_yum_metadata_files => Katello::YumMetadataFile,
:katello_content_facets => Katello::Host::ContentFacet,
:katello_subscription_facets => Katello::Host::SubscriptionFacet,
:katello_docker_manifests => Katello::DockerManifest,
:katello_docker_tags => Katello::DockerTag,
:katello_subscription_facet_pools => Katello::SubscriptionFacetPool,
:katello_module_streams => Katello::ModuleStream,
:katello_module_profiles => Katello::ModuleProfile,
:katello_module_stream_artifacts => Katello::ModuleStreamArtifact,
:katello_module_profile_rpms => Katello::ModuleProfileRpm,
:katello_repository_module_streams => Katello::RepositoryModuleStream,
:katello_content_facet_applicable_module_streams => Katello::ContentFacetApplicableModuleStream,
:katello_available_module_streams => Katello::AvailableModuleStream,
:katello_host_available_module_streams => Katello::HostAvailableModuleStream,
:katello_ansible_collections => Katello::AnsibleCollection,
:katello_repository_ansible_collections => Katello::RepositoryAnsibleCollection
}.freeze
# rubocop:disable Naming/AccessorMethodName
def self.set_fixture_classes(test_class)
FIXTURE_CLASSES.each { |k, v| test_class.set_fixture_class(k => v) }
end
end
end
| {
"pile_set_name": "Github"
} |
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
namespace Microsoft.ServiceFabric.Powershell
{
using System.Fabric;
using System.Management.Automation;
[Cmdlet(VerbsCommon.New, "ServiceFabricPackageSharingPolicy")]
public sealed class NewPackageSharingPolicy : CommonCmdletBase
{
[Parameter(Mandatory = false, Position = 0)]
public string PackageName
{
get;
set;
}
[Parameter(Mandatory = true, ParameterSetName = "All")]
public SwitchParameter SharingScopeAll
{
get;
set;
}
[Parameter(Mandatory = true, ParameterSetName = "Code")]
public SwitchParameter SharingScopeCode
{
get;
set;
}
[Parameter(Mandatory = true, ParameterSetName = "Config")]
public SwitchParameter SharingScopeConfig
{
get;
set;
}
[Parameter(Mandatory = true, ParameterSetName = "Data")]
public SwitchParameter SharingScopeData
{
get;
set;
}
protected override void ProcessRecord()
{
PackageSharingPolicy sharingPolicy;
if (this.SharingScopeAll.IsPresent)
{
sharingPolicy = new PackageSharingPolicy(this.PackageName, PackageSharingPolicyScope.All);
}
else if (this.SharingScopeCode.IsPresent)
{
sharingPolicy = new PackageSharingPolicy(this.PackageName, PackageSharingPolicyScope.Code);
}
else if (this.SharingScopeConfig.IsPresent)
{
sharingPolicy = new PackageSharingPolicy(this.PackageName, PackageSharingPolicyScope.Config);
}
else if (this.SharingScopeData.IsPresent)
{
sharingPolicy = new PackageSharingPolicy(this.PackageName, PackageSharingPolicyScope.Data);
}
else
{
sharingPolicy = new PackageSharingPolicy(this.PackageName, PackageSharingPolicyScope.None);
}
this.WriteObject(new PSObject(sharingPolicy));
}
}
} | {
"pile_set_name": "Github"
} |
class Solution:
def isMatch(self, s: str, p: str) -> bool:
m, n = len(s) + 1, len(p) + 1
if n == 1:
return m == 1
dp = [[False for _ in range(n)] for _ in range(m)]
dp[0][0], dp[0][1] = True, False
for j in range(2, n):
if p[j - 1] == '*':
dp[0][j] = dp[0][j - 2]
for i in range(1, m):
for j in range(1, n):
if s[i - 1] == p[j - 1] or p[j - 1] == '.':
dp[i][j] = dp[i - 1][j - 1]
elif p[j - 1] == '*':
if p[j - 2] == '.' or p[j - 2] == s[i - 1]:
dp[i][j] = dp[i][j - 2] or dp[i - 1][j]
else:
dp[i][j] = dp[i][j - 2]
else:
dp[i][j] = False
return dp[m - 1][n - 1]
| {
"pile_set_name": "Github"
} |
package iam
import (
"fmt"
"github.com/aws/aws-sdk-go/aws"
awsiam "github.com/aws/aws-sdk-go/service/iam"
)
//go:generate faux --interface accessKeysClient --output fakes/access_keys_client.go
type accessKeysClient interface {
ListAccessKeys(*awsiam.ListAccessKeysInput) (*awsiam.ListAccessKeysOutput, error)
DeleteAccessKey(*awsiam.DeleteAccessKeyInput) (*awsiam.DeleteAccessKeyOutput, error)
}
//go:generate faux --interface accessKeys --output fakes/access_keys.go
type accessKeys interface {
Delete(userName string) error
}
type AccessKeys struct {
client accessKeysClient
logger logger
}
func NewAccessKeys(client accessKeysClient, logger logger) AccessKeys {
return AccessKeys{
client: client,
logger: logger,
}
}
func (k AccessKeys) Delete(userName string) error {
accessKeys, err := k.client.ListAccessKeys(&awsiam.ListAccessKeysInput{UserName: aws.String(userName)})
if err != nil {
return fmt.Errorf("List IAM Access Keys: %s", err)
}
for _, a := range accessKeys.AccessKeyMetadata {
n := *a.AccessKeyId
_, err = k.client.DeleteAccessKey(&awsiam.DeleteAccessKeyInput{
UserName: aws.String(userName),
AccessKeyId: a.AccessKeyId,
})
if err == nil {
k.logger.Printf("[IAM User: %s] Deleted access key %s \n", userName, n)
} else {
k.logger.Printf("[IAM User: %s] Delete access key %s: %s \n", userName, n, err)
}
}
return nil
}
| {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: 77fad823e2f8876428710271c7fe3beb
timeCreated: 1483355216
licenseType: Pro
NativeFormatImporter:
userData:
assetBundleName:
assetBundleVariant:
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2010-2011 Espressif System
*
*/
#ifndef _WLAN_LWIP_IF_H_
#define _WLAN_LWIP_IF_H_
#define LWIP_IF0_PRIO 28
#define LWIP_IF1_PRIO 29
enum {
SIG_LWIP_RX = 0,
};
struct netif * eagle_lwip_if_alloc(struct ieee80211_conn *conn, const uint8 *macaddr, struct ip_info *info);
struct netif * eagle_lwip_getif(uint8 index);
#ifndef IOT_SIP_MODE
sint8 ieee80211_output_pbuf(struct netif *ifp, struct pbuf* pb);
#else
sint8 ieee80211_output_pbuf(struct ieee80211_conn *conn, esf_buf *eb);
#endif
#endif /* _WLAN_LWIP_IF_H_ */
| {
"pile_set_name": "Github"
} |
/* ScummVM - Graphic Adventure Engine
*
* ScummVM is the legal property of its developers, whose names
* are too numerous to list here. Please refer to the COPYRIGHT
* file distributed with this source distribution.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
*/
/*
* This code is based on Broken Sword 2.5 engine
*
* Copyright (c) Malte Thiesen, Daniel Queteschiner and Michael Elsdoerfer
*
* Licensed under GNU GPL v2
*
*/
#include "sword25/gfx/timedrenderobject.h"
#include "sword25/gfx/renderobjectmanager.h"
namespace Sword25 {
TimedRenderObject::TimedRenderObject(RenderObjectPtr<RenderObject> pParent, TYPES type, uint handle) :
RenderObject(pParent, type, handle) {
assert(getManager());
getManager()->attatchTimedRenderObject(this->getHandle());
}
TimedRenderObject::~TimedRenderObject() {
assert(getManager());
getManager()->detatchTimedRenderObject(this->getHandle());
}
} // End of namespace Sword25
| {
"pile_set_name": "Github"
} |
$georgia: Georgia, Cambria, "Times New Roman", Times, serif;
$helvetica: "Helvetica Neue", Helvetica, Arial, sans-serif;
$lucida-grande: "Lucida Grande", Tahoma, Verdana, Arial, sans-serif;
$monospace: "Bitstream Vera Sans Mono", Consolas, Courier, monospace;
$verdana: Verdana, Geneva, sans-serif;
| {
"pile_set_name": "Github"
} |
# coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python2, python3
"""Tool that prints the most likely architecture from a mobile model search."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os
from typing import Text
from absl import app
from absl import flags
import numpy as np
import six
from six.moves import map
import tensorflow.compat.v1 as tf
# The mobile_classifier_factory import might look like it's unused, but
# importing it will register some namedtuples that are needed for model_spec
# deserialization.
from tunas import analyze_mobile_search_lib
from tunas import mobile_classifier_factory # pylint:disable=unused-import
from tunas import mobile_cost_model
from tunas import schema
from tunas import schema_io
_OUTPUT_FORMAT_LINES = 'lines'
_OUTPUT_FORMAT_CSV = 'csv'
_OUTPUT_FORMATS = (_OUTPUT_FORMAT_LINES, _OUTPUT_FORMAT_CSV)
flags.DEFINE_string(
'dirname', None,
'Directory containing the logs to read. Can also be a glob, in which '
'case we will separately process each directory that matches the pattern.')
flags.DEFINE_enum(
'output_format', 'lines', _OUTPUT_FORMATS,
'Format to use for the printed output.')
FLAGS = flags.FLAGS
def _scan_directory(directory,
output_format,
ssd):
"""Scan a directory for log files and write the final model to stdout."""
if output_format == _OUTPUT_FORMAT_LINES:
print('directory =', directory)
model_spec_filename = os.path.join(directory, 'model_spec.json')
if not tf.io.gfile.exists(model_spec_filename):
print('file {} not found; skipping'.format(model_spec_filename))
if output_format == _OUTPUT_FORMAT_LINES:
print()
return
with tf.io.gfile.GFile(model_spec_filename, 'r') as handle:
model_spec = schema_io.deserialize(handle.read())
paths = []
oneofs = dict()
def populate_oneofs(path, oneof):
paths.append(path)
oneofs[path] = oneof
schema.map_oneofs_with_paths(populate_oneofs, model_spec)
all_path_logits = analyze_mobile_search_lib.read_path_logits(directory)
if not all_path_logits:
print('event data missing from directory {}; skipping'.format(directory))
if output_format == _OUTPUT_FORMAT_LINES:
print()
return
global_step = max(all_path_logits)
if output_format == _OUTPUT_FORMAT_LINES:
print('global_step = {:d}'.format(global_step))
all_path_logit_keys = six.viewkeys(all_path_logits[global_step])
oneof_keys = six.viewkeys(oneofs)
if all_path_logit_keys != oneof_keys:
raise ValueError(
'OneOf key mismatch. Present in event files but not in model_spec: {}. '
'Present in model_spec but not in event files: {}'.format(
all_path_logit_keys - oneof_keys,
oneof_keys - all_path_logit_keys))
indices = []
for path in paths:
index = np.argmax(all_path_logits[global_step][path])
indices.append(index)
indices_str = ':'.join(map(str, indices))
if output_format == _OUTPUT_FORMAT_LINES:
print('indices = {:s}'.format(indices_str))
cost_model_time = mobile_cost_model.estimate_cost(indices, ssd)
if output_format == _OUTPUT_FORMAT_LINES:
print('cost_model = {:f}'.format(cost_model_time))
if output_format == _OUTPUT_FORMAT_LINES:
print()
elif output_format == _OUTPUT_FORMAT_CSV:
fields = [indices_str, global_step, directory, cost_model_time]
print(','.join(map(str, fields)))
def _get_ssd(dirname):
with tf.io.gfile.GFile(os.path.join(dirname, 'params.json')) as handle:
params = json.load(handle)
return params['ssd']
def main(argv):
if len(argv) > 1:
raise app.UsageError('Too many command-line arguments.')
directories = tf.io.gfile.glob(FLAGS.dirname)
if not directories:
print('No matches found:', FLAGS.dirname)
return
if FLAGS.output_format == _OUTPUT_FORMAT_CSV:
columns = ['indices', 'global_step', 'directory', 'cost_model']
print(','.join(columns))
for dirname in directories:
_scan_directory(
directory=dirname,
output_format=FLAGS.output_format,
ssd=_get_ssd(dirname))
if __name__ == '__main__':
flags.mark_flag_as_required('dirname')
tf.logging.set_verbosity(tf.logging.ERROR)
tf.disable_v2_behavior()
app.run(main)
| {
"pile_set_name": "Github"
} |
<?php
/**
* Manage syncing of content between WP and Elasticsearch for Terms
*
* @since 3.1
* @package elasticpress
*/
namespace ElasticPress\Indexable\Term;
use ElasticPress\Indexables as Indexables;
use ElasticPress\Elasticsearch as Elasticsearch;
use ElasticPress\SyncManager as SyncManagerAbstract;
if ( ! defined( 'ABSPATH' ) ) {
exit; // Exit if accessed directly.
}
/**
* Sync manager class
*/
class SyncManager extends SyncManagerAbstract {
/**
* Setup actions and filters
*
* @since 3.1
*/
public function setup() {
if ( ! Elasticsearch::factory()->get_elasticsearch_version() ) {
return;
}
add_action( 'created_term', [ $this, 'action_sync_on_update' ] );
add_action( 'edited_terms', [ $this, 'action_sync_on_update' ] );
add_action( 'added_term_meta', [ $this, 'action_queue_meta_sync' ], 10, 2 );
add_action( 'deleted_term_meta', [ $this, 'action_queue_meta_sync' ], 10, 2 );
add_action( 'updated_term_meta', [ $this, 'action_queue_meta_sync' ], 10, 2 );
add_action( 'delete_term', [ $this, 'action_sync_on_delete' ] );
add_action( 'set_object_terms', [ $this, 'action_sync_on_object_update' ], 10, 2 );
}
/**
* Sync ES index with changes to the term being saved
*
* @param int $term_id Term ID.
* @since 3.1
*/
public function action_sync_on_update( $term_id ) {
if ( $this->kill_sync() ) {
return;
}
if ( ! current_user_can( 'edit_term', $term_id ) ) {
return;
}
if ( apply_filters( 'ep_term_sync_kill', false, $term_id ) ) {
return;
}
do_action( 'ep_sync_term_on_transition', $term_id );
$this->sync_queue[ $term_id ] = true;
// Find all terms in the hierarchy so we resync those as well
$term = get_term( $term_id );
$children = get_term_children( $term_id, $term->taxonomy );
$ancestors = get_ancestors( $term_id, $term->taxonomy, 'taxonomy' );
$hierarchy = array_merge( $ancestors, $children );
foreach ( $hierarchy as $hierarchy_term_id ) {
if ( ! current_user_can( 'edit_term', $hierarchy_term_id ) ) {
return;
}
if ( apply_filters( 'ep_term_sync_kill', false, $hierarchy_term_id ) ) {
return;
}
do_action( 'ep_sync_term_on_transition', $hierarchy_term_id );
$this->sync_queue[ $hierarchy_term_id ] = true;
}
}
/**
* When term relationships are updated, queue the terms for reindex
*
* @param int $object_id Object ID.
* @param array $terms An array of term objects.
* @since 3.1
*/
public function action_sync_on_object_update( $object_id, $terms ) {
if ( $this->kill_sync() ) {
return;
}
if ( empty( $terms ) ) {
return;
}
foreach ( $terms as $term ) {
$term = get_term( $term );
if ( ! current_user_can( 'edit_term', $term->term_id ) ) {
return;
}
if ( apply_filters( 'ep_term_sync_kill', false, $term->term_id ) ) {
return;
}
do_action( 'ep_sync_term_on_transition', $term->term_id );
$this->sync_queue[ $term->term_id ] = true;
// Find all terms in the hierarchy so we resync those as well
$children = get_term_children( $term->term_id, $term->taxonomy );
$ancestors = get_ancestors( $term->term_id, $term->taxonomy, 'taxonomy' );
$hierarchy = array_merge( $ancestors, $children );
foreach ( $hierarchy as $hierarchy_term_id ) {
if ( ! current_user_can( 'edit_term', $hierarchy_term_id ) ) {
return;
}
if ( apply_filters( 'ep_term_sync_kill', false, $hierarchy_term_id ) ) {
return;
}
do_action( 'ep_sync_term_on_transition', $hierarchy_term_id );
$this->sync_queue[ $hierarchy_term_id ] = true;
}
}
}
/**
* When term meta is updated/added/deleted, queue the term for reindex
*
* @param int $meta_id Meta ID.
* @param int $term_id Term ID.
* @since 3.1
*/
public function action_queue_meta_sync( $meta_id, $term_id ) {
if ( $this->kill_sync() ) {
return;
}
$this->sync_queue[ $term_id ] = true;
}
/**
* Delete term from ES when deleted in WP
*
* @param int $term_id Term ID.
* @since 3.1
*/
public function action_sync_on_delete( $term_id ) {
if ( $this->kill_sync() ) {
return;
}
if ( ! current_user_can( 'delete_term', $term_id ) ) {
return;
}
Indexables::factory()->get( 'term' )->delete( $term_id, false );
// Find all terms in the hierarchy so we resync those as well
$term = get_term( $term_id );
$children = get_term_children( $term->term_id, $term->taxonomy );
$ancestors = get_ancestors( $term->term_id, $term->taxonomy, 'taxonomy' );
$hierarchy = array_merge( $ancestors, $children );
foreach ( $hierarchy as $hierarchy_term_id ) {
if ( ! current_user_can( 'edit_term', $hierarchy_term_id ) ) {
return;
}
if ( apply_filters( 'ep_term_sync_kill', false, $hierarchy_term_id ) ) {
return;
}
do_action( 'ep_sync_term_on_transition', $hierarchy_term_id );
$this->sync_queue[ $hierarchy_term_id ] = true;
}
}
}
| {
"pile_set_name": "Github"
} |
;;; SRFI 142 (bitwise operations)
;;;
;;; $Id$
;;;
;;; Copyright (C) William D Clinger (2017).
;;;
;;; Permission is hereby granted, free of charge, to any person
;;; obtaining a copy of this software and associated documentation
;;; files (the "Software"), to deal in the Software without
;;; restriction, including without limitation the rights to use,
;;; copy, modify, merge, publish, distribute, sublicense, and/or
;;; sell copies of the Software, and to permit persons to whom the
;;; Software is furnished to do so, subject to the following
;;; conditions:
;;;
;;; The above copyright notice and this permission notice shall be
;;; included in all copies or substantial portions of the Software.
;;;
;;; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
;;; EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
;;; OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
;;; NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
;;; HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
;;; WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
;;; FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
;;; OTHER DEALINGS IN THE SOFTWARE.
;;; According to SRFI 151, SRFI 142 is exactly the same except
;;; for the interpretation of mask bits in bitwise-if.
(define-library (srfi 142)
(export
bitwise-not
bitwise-and
bitwise-ior
bitwise-xor
bitwise-eqv ; not R6RS
bitwise-nand ; not R6RS
bitwise-nor ; not R6RS
bitwise-andc1 ; not R6RS
bitwise-andc2 ; not R6RS
bitwise-orc1 ; not R6RS
bitwise-orc2 ; not R6RS
arithmetic-shift ; renaming of R6RS procedure
bit-count ; renaming of R6RS procedure
integer-length ; renaming of R6RS procedure
bitwise-if
bit-set? ; not R6RS
copy-bit ; not R6RS
bit-swap ; not R6RS
any-bit-set? ; not R6RS
every-bit-set? ; not R6RS
first-set-bit ; renaming of R6RS procedure
bit-field ; renaming of R6RS procedure
bit-field-any? ; not R6RS
bit-field-every? ; not R6RS
bit-field-clear ; not R6RS
bit-field-set ; not R6RS
bit-field-replace ; not R6RS
bit-field-replace-same ; not R6RS
bit-field-rotate ; not R6RS
bit-field-reverse ; renaming of R6RS procedure
bits->list ; not R6RS
list->bits ; not R6RS
bits->vector ; not R6RS
vector->bits ; not R6RS
bits ; not R6RS
bitwise-fold ; not R6RS
bitwise-for-each ; not R6RS
bitwise-unfold ; not R6RS
make-bitwise-generator ; not R6RS
)
(import (scheme base)
(rename (srfi 151)
(bitwise-if srfi151:bitwise-if))
(larceny deprecated))
(begin
(define (bitwise-if mask i j)
(issue-warning-deprecated 'srfi-142:bitwise-if)
(srfi151:bitwise-if mask j i))))
| {
"pile_set_name": "Github"
} |
/*******************************
Image
*******************************/
/*-------------------
Element
--------------------*/
@placeholderColor: transparent;
@roundedBorderRadius: 0.3125em;
@imageHorizontalMargin: 0.25rem;
@imageVerticalMargin: 0.5rem;
@imageBorder: 1px solid rgba(0, 0, 0, 0.1);
/*-------------------
Types
--------------------*/
/* Avatar */
@avatarSize: 2em;
@avatarMargin: 0.25em;
/*-------------------
Variations
--------------------*/
/* Spaced */
@spacedDistance: 0.5em;
/* Floated */
@floatedHorizontalMargin: 1em;
@floatedVerticalMargin: 1em;
/* Size */
@miniWidth: 35px;
@tinyWidth: 80px;
@smallWidth: 150px;
@mediumWidth: 300px;
@largeWidth: 450px;
@bigWidth: 600px;
@hugeWidth: 800px;
@massiveWidth: 960px;
| {
"pile_set_name": "Github"
} |
#ifndef consts_namespace_H
#define consts_namespace_H
#define v0_0 crypto_scalarmult_curve25519_sandy2x_v0_0
#define v1_0 crypto_scalarmult_curve25519_sandy2x_v1_0
#define v2_1 crypto_scalarmult_curve25519_sandy2x_v2_1
#define v9_0 crypto_scalarmult_curve25519_sandy2x_v9_0
#define v9_9 crypto_scalarmult_curve25519_sandy2x_v9_9
#define v19_19 crypto_scalarmult_curve25519_sandy2x_v19_19
#define v38_1 crypto_scalarmult_curve25519_sandy2x_v38_1
#define v38_38 crypto_scalarmult_curve25519_sandy2x_v38_38
#define v121666_121666 crypto_scalarmult_curve25519_sandy2x_v121666_121666
#define m25 crypto_scalarmult_curve25519_sandy2x_m25
#define m26 crypto_scalarmult_curve25519_sandy2x_m26
#define subc0 crypto_scalarmult_curve25519_sandy2x_subc0
#define subc2 crypto_scalarmult_curve25519_sandy2x_subc2
#define REDMASK51 crypto_scalarmult_curve25519_sandy2x_REDMASK51
#endif /* ifndef consts_namespace_H */
| {
"pile_set_name": "Github"
} |
//
// Copyright 2019 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// system_utils_unittest_helper.h: Constants used by the SystemUtils.RunApp unittest
#ifndef COMMON_SYSTEM_UTILS_UNITTEST_HELPER_H_
#define COMMON_SYSTEM_UTILS_UNITTEST_HELPER_H_
namespace
{
constexpr char kRunAppTestEnvVarName[] = "RUN_APP_TEST_ENV";
constexpr char kRunAppTestEnvVarValue[] = "RunAppTest environment variable value\n";
constexpr char kRunAppTestStdout[] = "RunAppTest stdout test\n";
constexpr char kRunAppTestStderr[] = "RunAppTest stderr test\n .. that expands multiple lines\n";
constexpr char kRunAppTestArg1[] = "--expected-arg1";
constexpr char kRunAppTestArg2[] = "expected_arg2";
constexpr char kRunTestSuite[] = "--run-test-suite";
} // anonymous namespace
#endif // COMMON_SYSTEM_UTILS_UNITTEST_HELPER_H_
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleDisplayName</key>
<string>句读</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>judou</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>$(FLUTTER_BUILD_NAME)</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>$(FLUTTER_BUILD_NUMBER)</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<key>UIMainStoryboardFile</key>
<string>Main</string>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
</array>
<key>UISupportedInterfaceOrientations~ipad</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationPortraitUpsideDown</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UIViewControllerBasedStatusBarAppearance</key>
<false/>
</dict>
</plist>
| {
"pile_set_name": "Github"
} |
.v c00 c01 c10 c11 e10 e11 c20 c21 c30 c31 a0 a1 e30 e31 c40 c41 c50 c51 e50 e51 c60 c61 c70 c71 b0 b1 b2 b3 e70 e71
.i c00 c01 c10 c11 e10 e11 c20 c21 c30 c31 e30 e31 c40 c41 c50 c51 e50 e51 c60 c61 c70 c71 e70 e71
BEGIN
H e10
H e11
H a0
H a1
H e30
H e31
H e50
H e51
H b0
H b1
H b2
H b3
H e70
H e71
Z c00 c11 e10
X c01
Z c01 c10 e11
X c01
X c00
Z c00 c10 e10
X c00
Z c01 c11 e11
Z c20 c31 a0
X c21
Z c21 c30 a1
X c21
X c20
Z c20 c30 a0
X c20
Z c21 c31 a1
Z c40 c51 e50
X c41
Z c41 c50 e51
X c41
X c40
Z c40 c50 e50
X c40
Z c41 c51 e51
Z c60 c71 b0
X c61
Z c61 c70 b1
X c61
X c60
Z c60 c70 b0
X c60
Z c61 c71 b1
H e10
H e11
H a0
H a1
H e50
H e51
H b0
H b1
X e10
Z e10 a0 e30
X e10
Z e11 a1 e31
X e11
Z e11 a0 e31
X e11
Z e10 a1 e30
X e50
Z e50 b0 b2
X e50
Z e51 b1 b3
X e51
Z e51 b0 b3
X e51
Z e50 b1 b2
H e30
H e31
H b2
H b3
X e30
Z e30 b2 e70
X e30
Z e31 b3 e71
X e31
Z e31 b2 e71
X e31
Z e30 b3 e70
H e70
H e71
END
| {
"pile_set_name": "Github"
} |
from django.conf.urls.defaults import *
from jellyroll.views import tags
urlpatterns = patterns('',
url(r'^$', tags.tag_list, {}, name='jellyroll_tag_list'),
url(r'^(?P<tag>[-\.\'\:\w]+)/$',tags.tag_item_list, {}, name="jellyroll_tag_item_list"),
)
| {
"pile_set_name": "Github"
} |
/*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ehcache.jsr107;
import java.lang.management.ManagementFactory;
import org.ehcache.config.builders.CacheConfigurationBuilder;
import org.ehcache.jsr107.config.ConfigurationElementState;
import org.ehcache.jsr107.config.Jsr107CacheConfiguration;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import javax.cache.Cache;
import javax.cache.CacheManager;
import javax.cache.Caching;
import javax.cache.configuration.MutableConfiguration;
import javax.cache.spi.CachingProvider;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder;
import static org.ehcache.config.builders.ResourcePoolsBuilder.heap;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
/**
* ConfigStatsManagementActivationTest
*/
public class ConfigStatsManagementActivationTest {
private static final String MBEAN_MANAGEMENT_TYPE = "CacheConfiguration";
private static final String MBEAN_STATISTICS_TYPE = "CacheStatistics";
private MBeanServer server = ManagementFactory.getPlatformMBeanServer();
private CachingProvider provider;
@Before
public void setUp() {
provider = Caching.getCachingProvider();
}
@After
public void tearDown() {
provider.close();
}
@Test
public void testEnabledAtCacheLevel() throws Exception {
CacheManager cacheManager = provider.getCacheManager(getClass().getResource("/ehcache-107-mbeans-cache-config.xml")
.toURI(), provider.getDefaultClassLoader());
Cache<String, String> cache = cacheManager.getCache("stringCache", String.class, String.class);
@SuppressWarnings("unchecked")
Eh107Configuration<String, String> configuration = cache.getConfiguration(Eh107Configuration.class);
assertThat(configuration.isManagementEnabled(), is(true));
assertThat(configuration.isStatisticsEnabled(), is(true));
assertThat(isMbeanRegistered("stringCache", MBEAN_MANAGEMENT_TYPE), is(true));
assertThat(isMbeanRegistered("stringCache", MBEAN_STATISTICS_TYPE), is(true));
}
@Test
public void testEnabledAtCacheManagerLevel() throws Exception {
CacheManager cacheManager = provider.getCacheManager(getClass().getResource("/org/ehcache/docs/ehcache-107-mbeans-cache-manager-config.xml")
.toURI(), provider.getDefaultClassLoader());
Cache<String, String> cache = cacheManager.getCache("stringCache", String.class, String.class);
@SuppressWarnings("unchecked")
Eh107Configuration<String, String> configuration = cache.getConfiguration(Eh107Configuration.class);
assertThat(configuration.isManagementEnabled(), is(true));
assertThat(configuration.isStatisticsEnabled(), is(true));
assertThat(isMbeanRegistered("stringCache", MBEAN_MANAGEMENT_TYPE), is(true));
assertThat(isMbeanRegistered("stringCache", MBEAN_STATISTICS_TYPE), is(true));
}
@Test
public void testCacheLevelOverridesCacheManagerLevel() throws Exception {
CacheManager cacheManager = provider.getCacheManager(getClass().getResource("/org/ehcache/docs/ehcache-107-mbeans-cache-manager-config.xml")
.toURI(), provider.getDefaultClassLoader());
Cache<String, String> cache = cacheManager.getCache("overrideCache", String.class, String.class);
@SuppressWarnings("unchecked")
Eh107Configuration<String, String> configuration = cache.getConfiguration(Eh107Configuration.class);
assertThat(configuration.isManagementEnabled(), is(false));
assertThat(configuration.isStatisticsEnabled(), is(false));
assertThat(isMbeanRegistered("overrideCache", MBEAN_MANAGEMENT_TYPE), is(false));
assertThat(isMbeanRegistered("overrideCache", MBEAN_STATISTICS_TYPE), is(false));
}
@Test
public void testCacheLevelOnlyOneOverridesCacheManagerLevel() throws Exception {
CacheManager cacheManager = provider.getCacheManager(getClass().getResource("/org/ehcache/docs/ehcache-107-mbeans-cache-manager-config.xml")
.toURI(), provider.getDefaultClassLoader());
Cache<String, String> cache = cacheManager.getCache("overrideOneCache", String.class, String.class);
@SuppressWarnings("unchecked")
Eh107Configuration<String, String> configuration = cache.getConfiguration(Eh107Configuration.class);
assertThat(configuration.isManagementEnabled(), is(true));
assertThat(configuration.isStatisticsEnabled(), is(false));
assertThat(isMbeanRegistered("overrideOneCache", MBEAN_MANAGEMENT_TYPE), is(true));
assertThat(isMbeanRegistered("overrideOneCache", MBEAN_STATISTICS_TYPE), is(false));
}
@Test
public void testEnableCacheLevelProgrammatic() throws Exception {
CacheManager cacheManager = provider.getCacheManager();
CacheConfigurationBuilder<Long, String> configurationBuilder = newCacheConfigurationBuilder(Long.class, String.class, heap(10))
.withService(new Jsr107CacheConfiguration(ConfigurationElementState.ENABLED, ConfigurationElementState.ENABLED));
Cache<Long, String> cache = cacheManager.createCache("test", Eh107Configuration.fromEhcacheCacheConfiguration(configurationBuilder));
@SuppressWarnings("unchecked")
Eh107Configuration<Long, String> configuration = cache.getConfiguration(Eh107Configuration.class);
assertThat(configuration.isManagementEnabled(), is(true));
assertThat(configuration.isStatisticsEnabled(), is(true));
assertThat(isMbeanRegistered("test", MBEAN_MANAGEMENT_TYPE), is(true));
assertThat(isMbeanRegistered("test", MBEAN_STATISTICS_TYPE), is(true));
}
private boolean isMbeanRegistered(String cacheName, String type) throws MalformedObjectNameException {
String query = "javax.cache:type=" + type + ",CacheManager=*,Cache=" + cacheName;
return server.queryMBeans(ObjectName.getInstance(query), null).size() == 1;
}
@Test
public void testManagementDisabledOverriddenFromTemplate() throws Exception {
CacheManager cacheManager = provider.getCacheManager(getClass().getResource("/ehcache-107-mbeans-template-config.xml")
.toURI(),
provider.getDefaultClassLoader());
MutableConfiguration<Long, String> configuration = new MutableConfiguration<>();
configuration.setTypes(Long.class, String.class);
configuration.setManagementEnabled(false);
configuration.setStatisticsEnabled(false);
Cache<Long, String> cache = cacheManager.createCache("enables-mbeans", configuration);
@SuppressWarnings("unchecked")
Eh107Configuration<Long, String> eh107Configuration = cache.getConfiguration(Eh107Configuration.class);
assertThat(eh107Configuration.isManagementEnabled(), is(true));
assertThat(eh107Configuration.isStatisticsEnabled(), is(true));
assertThat(isMbeanRegistered("enables-mbeans", MBEAN_MANAGEMENT_TYPE), is(true));
assertThat(isMbeanRegistered("enables-mbeans", MBEAN_STATISTICS_TYPE), is(true));
}
@Test
public void testManagementEnabledOverriddenFromTemplate() throws Exception {
CacheManager cacheManager = provider.getCacheManager(getClass().getResource("/ehcache-107-mbeans-template-config.xml")
.toURI(),
provider.getDefaultClassLoader());
MutableConfiguration<Long, String> configuration = new MutableConfiguration<>();
configuration.setTypes(Long.class, String.class);
configuration.setManagementEnabled(true);
configuration.setStatisticsEnabled(true);
Cache<Long, String> cache = cacheManager.createCache("disables-mbeans", configuration);
@SuppressWarnings("unchecked")
Eh107Configuration<Long, String> eh107Configuration = cache.getConfiguration(Eh107Configuration.class);
assertThat(eh107Configuration.isManagementEnabled(), is(false));
assertThat(eh107Configuration.isStatisticsEnabled(), is(false));
assertThat(isMbeanRegistered("disables-mbeans", MBEAN_MANAGEMENT_TYPE), is(false));
assertThat(isMbeanRegistered("disables-mbeans", MBEAN_STATISTICS_TYPE), is(false));
}
@Test
public void basicJsr107StillWorks() throws Exception {
CacheManager cacheManager = provider.getCacheManager();
MutableConfiguration<Long, String> configuration = new MutableConfiguration<>();
configuration.setTypes(Long.class, String.class);
configuration.setManagementEnabled(true);
configuration.setStatisticsEnabled(true);
Cache<Long, String> cache = cacheManager.createCache("cache", configuration);
@SuppressWarnings("unchecked")
Eh107Configuration<Long, String> eh107Configuration = cache.getConfiguration(Eh107Configuration.class);
assertThat(eh107Configuration.isManagementEnabled(), is(true));
assertThat(eh107Configuration.isStatisticsEnabled(), is(true));
assertThat(isMbeanRegistered("cache", MBEAN_MANAGEMENT_TYPE), is(true));
assertThat(isMbeanRegistered("cache", MBEAN_STATISTICS_TYPE), is(true));
}
}
| {
"pile_set_name": "Github"
} |
; RUN: llc < %s -march=x86-64
define void @foo(<8 x i32>* %p) nounwind {
%t = load <8 x i32>* %p
%cti69 = trunc <8 x i32> %t to <8 x i16> ; <<8 x i16>> [#uses=1]
store <8 x i16> %cti69, <8 x i16>* undef
ret void
}
define void @bar(<4 x i32>* %p) nounwind {
%t = load <4 x i32>* %p
%cti44 = trunc <4 x i32> %t to <4 x i16> ; <<4 x i16>> [#uses=1]
store <4 x i16> %cti44, <4 x i16>* undef
ret void
}
| {
"pile_set_name": "Github"
} |
--TEST--
MySQL PDOStatement->closeCursor()
--SKIPIF--
<?php
require_once(dirname(__FILE__) . DIRECTORY_SEPARATOR . 'skipif.inc');
require_once(dirname(__FILE__) . DIRECTORY_SEPARATOR . 'mysql_pdo_test.inc');
MySQLPDOTest::skip();
$db = MySQLPDOTest::factory();
?>
--FILE--
<?php
/* TODO the results look wrong, why do we get 2014 with buffered AND unbuffered queries */
require_once(dirname(__FILE__) . DIRECTORY_SEPARATOR . 'mysql_pdo_test.inc');
$db = MySQLPDOTest::factory();
function pdo_mysql_stmt_closecursor($db) {
// This one should fail. I let it fail to prove that closeCursor() makes a difference.
// If no error messages gets printed do not know if proper usage of closeCursor() makes any
// difference or not. That's why we need to cause an error here.
$db->setAttribute(PDO::MYSQL_ATTR_USE_BUFFERED_QUERY, false);
$stmt1 = $db->query('SELECT id, label FROM test ORDER BY id ASC');
// query() shall fail!
$stmt2 = $db->query('SELECT id, label FROM test ORDER BY id ASC');
$stmt1->closeCursor();
// This is proper usage of closeCursor(). It shall prevent any further error messages.
if (MySQLPDOTest::isPDOMySQLnd()) {
$stmt1 = $db->query('SELECT id, label FROM test ORDER BY id ASC');
} else {
// see pdo_mysql_stmt_unbuffered_2050.phpt for an explanation
unset($stmt1);
$stmt1 = $db->query('SELECT id, label FROM test ORDER BY id ASC');
}
// fetch only the first rows and let closeCursor() clean up
$row1 = $stmt1->fetch(PDO::FETCH_ASSOC);
$stmt1->closeCursor();
$stmt2 = $db->prepare('UPDATE test SET label = ? WHERE id = ?');
$stmt2->bindValue(1, "z");
$stmt2->bindValue(2, $row1['id']);
$stmt2->execute();
$stmt2->closeCursor();
$db->setAttribute(PDO::MYSQL_ATTR_USE_BUFFERED_QUERY, true);
// check if changing the fetch mode from unbuffered to buffered will
// cause any harm to a statement created prior to the change
$stmt1->execute();
$row2 = $stmt1->fetch(PDO::FETCH_ASSOC);
$stmt1->closeCursor();
if (!isset($row2['label']) || ('z' !== $row2['label']))
printf("Expecting array(id => 1, label => z) got %s\n", var_export($row2, true));
unset($stmt1);
$stmt1 = $db->query('SELECT id, label FROM test ORDER BY id ASC');
// should work
$stmt2 = $db->query('SELECT id, label FROM test ORDER BY id ASC');
$stmt1->closeCursor();
$stmt1 = $db->query('SELECT id, label FROM test ORDER BY id ASC');
// fetch only the first rows and let closeCursor() clean up
$row3 = $stmt1->fetch(PDO::FETCH_ASSOC);
$stmt1->closeCursor();
assert($row3 == $row2);
$stmt2 = $db->prepare('UPDATE test SET label = ? WHERE id = ?');
$stmt2->bindValue(1, "a");
$stmt2->bindValue(2, $row1['id']);
$stmt2->execute();
$stmt2->closeCursor();
$stmt1->execute();
$row4 = $stmt1->fetch(PDO::FETCH_ASSOC);
$stmt1->closeCursor();
assert($row4 == $row1);
$offset = 0;
$stmt = $db->prepare('SELECT id, label FROM test WHERE id > ? ORDER BY id ASC LIMIT 2');
$in = 0;
if (!$stmt->bindParam(1, $in))
printf("[%03d + 1] Cannot bind parameter, %s %s\n", $offset,
$stmt->errorCode(), var_export($stmt->errorInfo(), true));
$stmt->execute();
$id = $label = null;
if (!$stmt->bindColumn(1, $id, PDO::PARAM_INT))
printf("[%03d + 2] Cannot bind integer column, %s %s\n", $offset,
$stmt->errorCode(), var_export($stmt->errorInfo(), true));
if (!$stmt->bindColumn(2, $label, PDO::PARAM_STR))
printf("[%03d + 3] Cannot bind string column, %s %s\n", $offset,
$stmt->errorCode(), var_export($stmt->errorInfo(), true));
while ($stmt->fetch(PDO::FETCH_BOUND))
printf("in = %d -> id = %s (%s) / label = %s (%s)\n",
$in,
var_export($id, true), gettype($id),
var_export($label, true), gettype($label));
$stmt->closeCursor();
$stmt->execute();
}
try {
printf("Testing emulated PS...\n");
$db->setAttribute(PDO::MYSQL_ATTR_DIRECT_QUERY, 1);
if (1 != $db->getAttribute(PDO::MYSQL_ATTR_DIRECT_QUERY))
printf("[002] Unable to turn on emulated prepared statements\n");
printf("Buffered...\n");
$db->setAttribute(PDO::MYSQL_ATTR_USE_BUFFERED_QUERY, true);
MySQLPDOTest::createTestTable($db);
pdo_mysql_stmt_closecursor($db);
printf("Unbuffered...\n");
$db->setAttribute(PDO::MYSQL_ATTR_USE_BUFFERED_QUERY, false);
MySQLPDOTest::createTestTable($db);
pdo_mysql_stmt_closecursor($db);
printf("Testing native PS...\n");
$db->setAttribute(PDO::MYSQL_ATTR_DIRECT_QUERY, 0);
if (0 != $db->getAttribute(PDO::MYSQL_ATTR_DIRECT_QUERY))
printf("[002] Unable to turn off emulated prepared statements\n");
printf("Buffered...\n");
MySQLPDOTest::createTestTable($db);
$db->setAttribute(PDO::MYSQL_ATTR_USE_BUFFERED_QUERY, true);
pdo_mysql_stmt_closecursor($db);
printf("Unbuffered...\n");
MySQLPDOTest::createTestTable($db);
$db->setAttribute(PDO::MYSQL_ATTR_USE_BUFFERED_QUERY, false);
pdo_mysql_stmt_closecursor($db);
} catch (PDOException $e) {
printf("[001] %s [%s] %s\n",
$e->getMessage(), $db->errorCode(), implode(' ', $db->errorInfo()));
}
print "done!";
?>
--CLEAN--
<?php
require dirname(__FILE__) . '/mysql_pdo_test.inc';
$db = MySQLPDOTest::factory();
$db->exec('DROP TABLE IF EXISTS test');
?>
--EXPECTF--
Testing emulated PS...
Buffered...
Warning: PDO::query(): SQLSTATE[HY000]: General error: 2014 Cannot execute queries while other unbuffered queries are active. Consider using PDOStatement::fetchAll(). Alternatively, if your code is only ever going to run against mysql, you may enable query buffering by setting the PDO::MYSQL_ATTR_USE_BUFFERED_QUERY attribute. in %s on line %d
in = 0 -> id = 1 (integer) / label = 'a' (string)
in = 0 -> id = 2 (integer) / label = 'b' (string)
Unbuffered...
Warning: PDO::query(): SQLSTATE[HY000]: General error: 2014 Cannot execute queries while other unbuffered queries are active. Consider using PDOStatement::fetchAll(). Alternatively, if your code is only ever going to run against mysql, you may enable query buffering by setting the PDO::MYSQL_ATTR_USE_BUFFERED_QUERY attribute. in %s on line %d
in = 0 -> id = 1 (integer) / label = 'a' (string)
in = 0 -> id = 2 (integer) / label = 'b' (string)
Testing native PS...
Buffered...
Warning: PDO::query(): SQLSTATE[HY000]: General error: 2014 Cannot execute queries while other unbuffered queries are active. Consider using PDOStatement::fetchAll(). Alternatively, if your code is only ever going to run against mysql, you may enable query buffering by setting the PDO::MYSQL_ATTR_USE_BUFFERED_QUERY attribute. in %s on line %d
in = 0 -> id = 1 (integer) / label = 'a' (string)
in = 0 -> id = 2 (integer) / label = 'b' (string)
Unbuffered...
Warning: PDO::query(): SQLSTATE[HY000]: General error: 2014 Cannot execute queries while other unbuffered queries are active. Consider using PDOStatement::fetchAll(). Alternatively, if your code is only ever going to run against mysql, you may enable query buffering by setting the PDO::MYSQL_ATTR_USE_BUFFERED_QUERY attribute. in %s on line %d
in = 0 -> id = 1 (integer) / label = 'a' (string)
in = 0 -> id = 2 (integer) / label = 'b' (string)
done!
| {
"pile_set_name": "Github"
} |
<?php
/**
* Yar自动补全类(基于最新的2.0.4版本)
* @author shixinke(http://www.shixinke.com)
* @modified 2018/05/20
*/
/**
*yar打包异常类
*/
class Yar_Client_Packager_Exception extends Yar_Client_Exception implements Throwable
{
/**
* @var string $message
* 异常信息
* @access protected
*/
protected $message = '';
/**
* @var int $code
* 异常码
* @access protected
*/
protected $code = 0;
/**
* @var string $file
* 异常文件名称
* @access protected
*/
protected $file;
/**
* @var int $line
* 异常文件行数
* @access protected
*/
protected $line;
/**
*
*获取异常类型
* @example
* @return string
*/
public function getType(): string
{
}
/**
*
*克隆方法
* @example
* @return
*/
private final function __clone()
{
}
/**
*
*初始化异常
* @example
* @param string $message 异常提示
* @param int $code 异常码
* @param Throwable $previous 上一个异常对象
* @return
*/
public function __construct(string $message = '', int $code = 0, Throwable $previous)
{
}
/**
*
*
* @example
* @return
*/
public function __wakeup()
{
}
/**
*
*获取异常信息
* @example
* @return string
*/
public final function getMessage(): string
{
}
/**
*
*获取异常码
* @example
* @return int
*/
public final function getCode(): int
{
}
/**
*
*获取异常文件名称
* @example
* @return string
*/
public final function getFile(): string
{
}
/**
*
*获取异常行数
* @example
* @return int
*/
public final function getLine(): int
{
}
/**
*
*获取trace调试数组
* @example
* @return array
*/
public final function getTrace(): array
{
}
/**
*
*获取前一个异常对象
* @example
* @return Throwable
*/
public final function getPrevious()
{
return new Exception();
}
/**
*
*获取调试信息的字符串
* @example
* @return string
*/
public final function getTraceAsString(): string
{
}
/**
*
*转化为字符串
* @example
* @return string
*/
public function __toString(): string
{
}
}
| {
"pile_set_name": "Github"
} |
--- src/efivarfs.c.orig 2015-01-19 15:38:34.000000000 +0100
+++ src/efivarfs.c 2015-01-19 15:40:15.000000000 +0100
@@ -18,15 +18,15 @@
#include <errno.h>
#include <fcntl.h>
-#include <linux/magic.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/mman.h>
+#include <sys/mount.h>
#include <sys/types.h>
+#include <sys/param.h>
#include <sys/stat.h>
#include <sys/uio.h>
-#include <sys/vfs.h>
#include <unistd.h>
#include "lib.h"
@@ -66,7 +66,7 @@ efivarfs_probe(void)
#define make_efivarfs_path(str, guid, name) ({ \
asprintf(str, EFIVARS_PATH "%s-" GUID_FORMAT, \
name, (guid).a, (guid).b, (guid).c, \
- bswap_16((guid).d), \
+ bswap16((guid).d), \
(guid).e[0], (guid).e[1], (guid).e[2], \
(guid).e[3], (guid).e[4], (guid).e[5]); \
})
| {
"pile_set_name": "Github"
} |
.ax-button span
{
display:none;
width:0;
height:0;
}
/*************************buttons Styles*******************/
.ax-browse-c
{
width:64px;
height:64px;
overflow: hidden;
display: inline-block;
position:relative;
cursor:pointer;
background: url(images.png) no-repeat 0 -21px;
}
.ax-upload-all
{
width:64px;
height:64px;
margin-left:5px;
display: inline-block;
background: url(images.png) no-repeat 0 -193px;
cursor:pointer;
}
.ax-clear
{
width:64px;
height:64px;
margin-left:5px;
display: inline-block;
background: url(images.png) no-repeat 0 -107px;
cursor:pointer;
}
.ax-browse-c:hover,.ax-upload-all:hover,.ax-clear:hover
{
opacity:0.5;
filter:alpha(opacity=50);
}
.ax-remove, .ax-upload, .ax-abort, .ax-confirm
{
margin-top:4px;
width:20px;
height:20px;
margin-left:5px;
cursor:pointer;
display: inline-block;
}
.ax-remove{
background: url(images.png) no-repeat 0 -86px;
}
.ax-upload{
background: url(images.png) no-repeat 0 -172px;
}
.ax-abort{
background: url(images.png) no-repeat 0 0;
}
.ax-disabled
{
opacity: 0.5;
filter:alpha(opacity=50);
cursor:auto;
}
.ax-confirm
{
display:none;
}
.ax-remove:hover, .ax-upload:hover, .ax-abort:hover
{
-moz-box-shadow:inset 0 1px 0 rgba(255,255,255,0.3),0 0 3px rgba(0,0,0,0.9),0 0 6px rgba(255,255,255,0.7);
-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.3),0 0 3px rgba(0,0,0,0.9),0 0 6px rgba(255,255,255,0.7);
box-shadow:inset 0 1px 0 rgba(255,255,255,0.3),0 0 3px rgba(0,0,0,0.9),0 0 6px rgba(255,255,255,0.7);
}
.ax-preview
{
max-height:40px;
max-width:40px;
}
.ax-prev-container
{
height:40px;
width: 40px;
margin: 0 auto;
display: inline-block;
background: url(icons.png) no-repeat -41px -41px;
}
/************************List files styles********/
.ax-file-list
{
overflow:hidden;
list-style: none outside none;
padding:0;
margin:0;
}
.ax-file-list li
{
background: #cee6ff; /* Old browsers */
background: -moz-linear-gradient(top, #cee6ff 0%, #60abf8 100%, #4096ee 100%); /* FF3.6+ */
background: -webkit-gradient(linear, left top, left bottom, color-stop(0%,#cee6ff), color-stop(100%,#60abf8), color-stop(100%,#4096ee)); /* Chrome,Safari4+ */
background: -webkit-linear-gradient(top, #cee6ff 0%,#60abf8 100%,#4096ee 100%); /* Chrome10+,Safari5.1+ */
background: -o-linear-gradient(top, #cee6ff 0%,#60abf8 100%,#4096ee 100%); /* Opera 11.10+ */
background: -ms-linear-gradient(top, #cee6ff 0%,#60abf8 100%,#4096ee 100%); /* IE10+ */
background: linear-gradient(top, #cee6ff 0%,#60abf8 100%,#4096ee 100%); /* W3C */
filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#cee6ff', endColorstr='#4096ee',GradientType=0 ); /* IE6-9 */
border: 1px solid #000000;
float: left;
text-align:center;
border-radius: 5px;
margin: 0 2px 2px 0;
padding: 2px;
position: relative;
text-align:center;
width: 105px;
height: 105px;
}
.ax-file-name
{
padding:0;
color: #000;
font: 10px 'PT Sans','Helvetica Neue',Helvetica,Arial,sans-serif;
overflow: hidden;
width: 100%;
margin:0;
}
.ax-file-name input
{
width:80%;
}
.ax-file-size
{
color: #000;
font: 10px 'PT Sans','Helvetica Neue',Helvetica,Arial,sans-serif;
height: 13px;
overflow: hidden;
width: 100%;
margin:0;
padding: 0;
}
.ax-progress
{
background-color: white;
margin-top: 2px;
padding: 0;
float:left;
height:10px;
width: 100%;
position:relative;
text-align:center;
}
.ax-progress-bar
{
height: 10px;
position:absolute;
top:0;
left:0;
width: 0%;
background-image: url(loader.gif);
}
.ax-progress-info
{
font: 10px 'PT Sans','Helvetica Neue',Helvetica,Arial,sans-serif;
height: 10px;
position:absolute;
width: 100%;
top:0;
left:0;
overflow:hidden;
}
.ax-toolbar
{
width:100%;
overflow:hidden;
}
.ax-browse
{
opacity: 0;
filter:alpha(opacity=0);
font-size:64px;
cursor:pointer;
height:64px;
position: absolute;
top: 0;
right: 0;
}
.ax-details
{
float:left;
width:100%;
margin:0 auto;
}
/*************************************Light Box CSS***************************************/
#ax-box {
position: absolute;
top: 0;
left: 0;
background: #fff;
z-index: 1001;
display: none;
border:10px solid #fff;
-webkit-border-radius: 5px;
-moz-border-radius: 5px;
border-radius: 5px;
-webkit-box-shadow: 0px 0px 21px 0px #ffffff; /* Saf3-4, iOS 4.0.2 - 4.2, Android 2.3+ */
-moz-box-shadow: 0px 0px 21px 0px #ffffff; /* FF3.5 - 3.6 */
box-shadow: 0px 0px 21px 0px #ffffff; /* Opera 10.5, IE9, FF4+, Chrome 6+, iOS 5 */
}
#ax-box span
{
margin-left:20px;
}
#ax-box-shadow
{
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
background: #000;
filter: alpha(opacity=75);
-moz-opacity: 0.75;
-khtml-opacity: 0.75;
opacity: 0.75;
z-index: 1000;
display: none;
}
/*****************************************************************************************************/
/*************************************File Type Icons*************************************************/
.ax-filetype-pdf{
background: url(icons.png) no-repeat -81px 0px;
}
.ax-filetype-zip{
background: url(icons.png) no-repeat -121px -121px;
}
.ax-filetype-doc{
background: url(icons.png) no-repeat 0 -81px;
}
.ax-filetype-docx{
background: url(icons.png) no-repeat 0 -81px;
}
.ax-filetype-rar{
background: url(icons.png) no-repeat -121px -121px;
}
.ax-filetype-exe{
background: url(icons.png) no-repeat -41px 0;
}
.ax-filetype-xls{
background: url(icons.png) no-repeat 0 -121px;
}
.ax-filetype-xml{
background: url(icons.png) no-repeat -121px -81px;
}
.ax-filetype-mp4{
background: url(icons.png) no-repeat -121px -41px;
}
.ax-filetype-avi{
background: url(icons.png) no-repeat -121px -41px;
}
.ax-filetype-pdf{
background: url(icons.png) no-repeat -81px 0px;
}
.ax-filetype-gif{
background: url(icons.png) no-repeat -41px -81px;
}
.ax-filetype-bmp{
background: url(icons.png) no-repeat 0 -41px;
}
.ax-filetype-mp3{
background: url(icons.png) no-repeat 0 0;
}
.ax-filetype-ppt{
background: url(icons.png) no-repeat -81px -81px;
}
.ax-filetype-tiff{
background: url(icons.png) no-repeat -121px 0;
}
.ax-filetype-txt{
background: url(icons.png) no-repeat -81px -121px;
} | {
"pile_set_name": "Github"
} |
angular.module('ajenti.terminal', [
'core',
'ajenti.ace'
]);
| {
"pile_set_name": "Github"
} |
# Makefile.in generated by automake 1.15.1 from Makefile.am.
# @configure_input@
# Copyright (C) 1994-2017 Free Software Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
@SET_MAKE@
VPATH = @srcdir@
am__is_gnu_make = { \
if test -z '$(MAKELEVEL)'; then \
false; \
elif test -n '$(MAKE_HOST)'; then \
true; \
elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
true; \
else \
false; \
fi; \
}
am__make_running_with_option = \
case $${target_option-} in \
?) ;; \
*) echo "am__make_running_with_option: internal error: invalid" \
"target option '$${target_option-}' specified" >&2; \
exit 1;; \
esac; \
has_opt=no; \
sane_makeflags=$$MAKEFLAGS; \
if $(am__is_gnu_make); then \
sane_makeflags=$$MFLAGS; \
else \
case $$MAKEFLAGS in \
*\\[\ \ ]*) \
bs=\\; \
sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
| sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \
esac; \
fi; \
skip_next=no; \
strip_trailopt () \
{ \
flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
}; \
for flg in $$sane_makeflags; do \
test $$skip_next = yes && { skip_next=no; continue; }; \
case $$flg in \
*=*|--*) continue;; \
-*I) strip_trailopt 'I'; skip_next=yes;; \
-*I?*) strip_trailopt 'I';; \
-*O) strip_trailopt 'O'; skip_next=yes;; \
-*O?*) strip_trailopt 'O';; \
-*l) strip_trailopt 'l'; skip_next=yes;; \
-*l?*) strip_trailopt 'l';; \
-[dEDm]) skip_next=yes;; \
-[JT]) skip_next=yes;; \
esac; \
case $$flg in \
*$$target_option*) has_opt=yes; break;; \
esac; \
done; \
test $$has_opt = yes
am__make_dryrun = (target_option=n; $(am__make_running_with_option))
am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
pkgdatadir = $(datadir)/@PACKAGE@
pkgincludedir = $(includedir)/@PACKAGE@
pkglibdir = $(libdir)/@PACKAGE@
pkglibexecdir = $(libexecdir)/@PACKAGE@
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(program_transform_name)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
build_triplet = @build@
host_triplet = @host@
target_triplet = @target@
check_PROGRAMS = test$(EXEEXT)
subdir = test
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/add_cflags.m4 \
$(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \
$(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \
$(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/ogg.m4 \
$(top_srcdir)/m4/pkg.m4 $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON)
mkinstalldirs = $(install_sh) -d
CONFIG_HEADER = $(top_builddir)/config.h
CONFIG_CLEAN_FILES =
CONFIG_CLEAN_VPATH_FILES =
am_test_OBJECTS = util.$(OBJEXT) write_read.$(OBJEXT) test.$(OBJEXT)
test_OBJECTS = $(am_test_OBJECTS)
test_DEPENDENCIES = ../lib/libvorbisenc.la ../lib/libvorbis.la
AM_V_lt = $(am__v_lt_@AM_V@)
am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@)
am__v_lt_0 = --silent
am__v_lt_1 =
AM_V_P = $(am__v_P_@AM_V@)
am__v_P_ = $(am__v_P_@AM_DEFAULT_V@)
am__v_P_0 = false
am__v_P_1 = :
AM_V_GEN = $(am__v_GEN_@AM_V@)
am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@)
am__v_GEN_0 = @echo " GEN " $@;
am__v_GEN_1 =
AM_V_at = $(am__v_at_@AM_V@)
am__v_at_ = $(am__v_at_@AM_DEFAULT_V@)
am__v_at_0 = @
am__v_at_1 =
DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir)
depcomp = $(SHELL) $(top_srcdir)/depcomp
am__depfiles_maybe = depfiles
am__mv = mv -f
COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
$(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \
$(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
$(AM_CFLAGS) $(CFLAGS)
AM_V_CC = $(am__v_CC_@AM_V@)
am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@)
am__v_CC_0 = @echo " CC " $@;
am__v_CC_1 =
CCLD = $(CC)
LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
$(AM_LDFLAGS) $(LDFLAGS) -o $@
AM_V_CCLD = $(am__v_CCLD_@AM_V@)
am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@)
am__v_CCLD_0 = @echo " CCLD " $@;
am__v_CCLD_1 =
SOURCES = $(test_SOURCES)
DIST_SOURCES = $(test_SOURCES)
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
# Read a list of newline-separated strings from the standard input,
# and print each of them once, without duplicates. Input order is
# *not* preserved.
am__uniquify_input = $(AWK) '\
BEGIN { nonempty = 0; } \
{ items[$$0] = 1; nonempty = 1; } \
END { if (nonempty) { for (i in items) print i; }; } \
'
# Make sure the list of sources is unique. This is necessary because,
# e.g., the same source file might be shared among _SOURCES variables
# for different programs/libraries.
am__define_uniq_tagged_files = \
list='$(am__tagged_files)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | $(am__uniquify_input)`
ETAGS = etags
CTAGS = ctags
am__DIST_COMMON = $(srcdir)/Makefile.in $(top_srcdir)/depcomp
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
ACLOCAL = @ACLOCAL@
ACLOCAL_AMFLAGS = @ACLOCAL_AMFLAGS@
ALLOCA = @ALLOCA@
AMTAR = @AMTAR@
AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@
AR = @AR@
AS = @AS@
AUTOCONF = @AUTOCONF@
AUTOHEADER = @AUTOHEADER@
AUTOMAKE = @AUTOMAKE@
AWK = @AWK@
CC = @CC@
CCDEPMODE = @CCDEPMODE@
CFLAGS = @CFLAGS@
CPP = @CPP@
CPPFLAGS = @CPPFLAGS@
CYGPATH_W = @CYGPATH_W@
DEBUG = @DEBUG@
DEFS = @DEFS@
DEPDIR = @DEPDIR@
DLLTOOL = @DLLTOOL@
DSYMUTIL = @DSYMUTIL@
DUMPBIN = @DUMPBIN@
ECHO_C = @ECHO_C@
ECHO_N = @ECHO_N@
ECHO_T = @ECHO_T@
EGREP = @EGREP@
EXEEXT = @EXEEXT@
FGREP = @FGREP@
GREP = @GREP@
HAVE_DOXYGEN = @HAVE_DOXYGEN@
HTLATEX = @HTLATEX@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
LD = @LD@
LDFLAGS = @LDFLAGS@
LIBOBJS = @LIBOBJS@
LIBS = @LIBS@
LIBTOOL = @LIBTOOL@
LIBTOOL_DEPS = @LIBTOOL_DEPS@
LIPO = @LIPO@
LN_S = @LN_S@
LTLIBOBJS = @LTLIBOBJS@
LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@
MAINT = @MAINT@
MAKEINFO = @MAKEINFO@
MANIFEST_TOOL = @MANIFEST_TOOL@
MKDIR_P = @MKDIR_P@
NM = @NM@
NMEDIT = @NMEDIT@
OBJDUMP = @OBJDUMP@
OBJEXT = @OBJEXT@
OGG_CFLAGS = @OGG_CFLAGS@
OGG_LIBS = @OGG_LIBS@
OTOOL = @OTOOL@
OTOOL64 = @OTOOL64@
PACKAGE = @PACKAGE@
PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
PACKAGE_NAME = @PACKAGE_NAME@
PACKAGE_STRING = @PACKAGE_STRING@
PACKAGE_TARNAME = @PACKAGE_TARNAME@
PACKAGE_URL = @PACKAGE_URL@
PACKAGE_VERSION = @PACKAGE_VERSION@
PATH_SEPARATOR = @PATH_SEPARATOR@
PDFLATEX = @PDFLATEX@
PKG_CONFIG = @PKG_CONFIG@
PROFILE = @PROFILE@
RANLIB = @RANLIB@
SED = @SED@
SET_MAKE = @SET_MAKE@
SHELL = @SHELL@
STRIP = @STRIP@
VERSION = @VERSION@
VE_LIB_AGE = @VE_LIB_AGE@
VE_LIB_CURRENT = @VE_LIB_CURRENT@
VE_LIB_REVISION = @VE_LIB_REVISION@
VF_LIB_AGE = @VF_LIB_AGE@
VF_LIB_CURRENT = @VF_LIB_CURRENT@
VF_LIB_REVISION = @VF_LIB_REVISION@
VORBIS_LIBS = @VORBIS_LIBS@
V_LIB_AGE = @V_LIB_AGE@
V_LIB_CURRENT = @V_LIB_CURRENT@
V_LIB_REVISION = @V_LIB_REVISION@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
abs_top_srcdir = @abs_top_srcdir@
ac_ct_AR = @ac_ct_AR@
ac_ct_CC = @ac_ct_CC@
ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
am__include = @am__include@
am__leading_dot = @am__leading_dot@
am__quote = @am__quote@
am__tar = @am__tar@
am__untar = @am__untar@
bindir = @bindir@
build = @build@
build_alias = @build_alias@
build_cpu = @build_cpu@
build_os = @build_os@
build_vendor = @build_vendor@
builddir = @builddir@
datadir = @datadir@
datarootdir = @datarootdir@
docdir = @docdir@
dvidir = @dvidir@
exec_prefix = @exec_prefix@
host = @host@
host_alias = @host_alias@
host_cpu = @host_cpu@
host_os = @host_os@
host_vendor = @host_vendor@
htmldir = @htmldir@
includedir = @includedir@
infodir = @infodir@
install_sh = @install_sh@
libdir = @libdir@
libexecdir = @libexecdir@
localedir = @localedir@
localstatedir = @localstatedir@
mandir = @mandir@
mkdir_p = @mkdir_p@
oldincludedir = @oldincludedir@
pdfdir = @pdfdir@
prefix = @prefix@
program_transform_name = @program_transform_name@
psdir = @psdir@
pthread_lib = @pthread_lib@
sbindir = @sbindir@
sharedstatedir = @sharedstatedir@
srcdir = @srcdir@
sysconfdir = @sysconfdir@
target = @target@
target_alias = @target_alias@
target_cpu = @target_cpu@
target_os = @target_os@
target_vendor = @target_vendor@
top_build_prefix = @top_build_prefix@
top_builddir = @top_builddir@
top_srcdir = @top_srcdir@
AUTOMAKE_OPTIONS = foreign
AM_CPPFLAGS = -I$(top_srcdir)/include @OGG_CFLAGS@
test_SOURCES = util.c util.h write_read.c write_read.h test.c
test_LDADD = ../lib/libvorbisenc.la ../lib/libvorbis.la @OGG_LIBS@ @VORBIS_LIBS@
all: all-am
.SUFFIXES:
.SUFFIXES: .c .lo .o .obj
$(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
*$$dep*) \
( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
&& { if test -f $@; then exit 0; else break; fi; }; \
exit 1;; \
esac; \
done; \
echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign test/Makefile'; \
$(am__cd) $(top_srcdir) && \
$(AUTOMAKE) --foreign test/Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
*config.status*) \
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
*) \
echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
esac;
$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(am__aclocal_m4_deps):
clean-checkPROGRAMS:
@list='$(check_PROGRAMS)'; test -n "$$list" || exit 0; \
echo " rm -f" $$list; \
rm -f $$list || exit $$?; \
test -n "$(EXEEXT)" || exit 0; \
list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \
echo " rm -f" $$list; \
rm -f $$list
test$(EXEEXT): $(test_OBJECTS) $(test_DEPENDENCIES) $(EXTRA_test_DEPENDENCIES)
@rm -f test$(EXEEXT)
$(AM_V_CCLD)$(LINK) $(test_OBJECTS) $(test_LDADD) $(LIBS)
mostlyclean-compile:
-rm -f *.$(OBJEXT)
distclean-compile:
-rm -f *.tab.c
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/test.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/util.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/write_read.Po@am__quote@
.c.o:
@am__fastdepCC_TRUE@ $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(COMPILE) -c -o $@ $<
.c.obj:
@am__fastdepCC_TRUE@ $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'`
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(COMPILE) -c -o $@ `$(CYGPATH_W) '$<'`
.c.lo:
@am__fastdepCC_TRUE@ $(AM_V_CC)$(LTCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LTCOMPILE) -c -o $@ $<
mostlyclean-libtool:
-rm -f *.lo
clean-libtool:
-rm -rf .libs _libs
ID: $(am__tagged_files)
$(am__define_uniq_tagged_files); mkid -fID $$unique
tags: tags-am
TAGS: tags
tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
set x; \
here=`pwd`; \
$(am__define_uniq_tagged_files); \
shift; \
if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
test -n "$$unique" || unique=$$empty_fix; \
if test $$# -gt 0; then \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
"$$@" $$unique; \
else \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
$$unique; \
fi; \
fi
ctags: ctags-am
CTAGS: ctags
ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
$(am__define_uniq_tagged_files); \
test -z "$(CTAGS_ARGS)$$unique" \
|| $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
$$unique
GTAGS:
here=`$(am__cd) $(top_builddir) && pwd` \
&& $(am__cd) $(top_srcdir) \
&& gtags -i $(GTAGS_ARGS) "$$here"
cscopelist: cscopelist-am
cscopelist-am: $(am__tagged_files)
list='$(am__tagged_files)'; \
case "$(srcdir)" in \
[\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
*) sdir=$(subdir)/$(srcdir) ;; \
esac; \
for i in $$list; do \
if test -f "$$i"; then \
echo "$(subdir)/$$i"; \
else \
echo "$$sdir/$$i"; \
fi; \
done >> $(top_builddir)/cscope.files
distclean-tags:
-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
distdir: $(DISTFILES)
@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
list='$(DISTFILES)'; \
dist_files=`for file in $$list; do echo $$file; done | \
sed -e "s|^$$srcdirstrip/||;t" \
-e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
case $$dist_files in \
*/*) $(MKDIR_P) `echo "$$dist_files" | \
sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
sort -u` ;; \
esac; \
for file in $$dist_files; do \
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
if test -d $$d/$$file; then \
dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
if test -d "$(distdir)/$$file"; then \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
else \
test -f "$(distdir)/$$file" \
|| cp -p $$d/$$file "$(distdir)/$$file" \
|| exit 1; \
fi; \
done
check-am: all-am
$(MAKE) $(AM_MAKEFLAGS) $(check_PROGRAMS)
check: check-am
all-am: Makefile
installdirs:
install: install-am
install-exec: install-exec-am
install-data: install-data-am
uninstall: uninstall-am
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
installcheck: installcheck-am
install-strip:
if test -z '$(STRIP)'; then \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
install; \
else \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
"INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
fi
mostlyclean-generic:
clean-generic:
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-am
clean-am: clean-checkPROGRAMS clean-generic clean-libtool \
mostlyclean-am
distclean: distclean-am
-rm -rf ./$(DEPDIR)
-rm -f Makefile
distclean-am: clean-am distclean-compile distclean-generic \
distclean-tags
dvi: dvi-am
dvi-am:
html: html-am
html-am:
info: info-am
info-am:
install-data-am:
install-dvi: install-dvi-am
install-dvi-am:
install-exec-am:
install-html: install-html-am
install-html-am:
install-info: install-info-am
install-info-am:
install-man:
install-pdf: install-pdf-am
install-pdf-am:
install-ps: install-ps-am
install-ps-am:
installcheck-am:
maintainer-clean: maintainer-clean-am
-rm -rf ./$(DEPDIR)
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-am
mostlyclean-am: mostlyclean-compile mostlyclean-generic \
mostlyclean-libtool
pdf: pdf-am
pdf-am:
ps: ps-am
ps-am:
uninstall-am:
.MAKE: check-am install-am install-strip
.PHONY: CTAGS GTAGS TAGS all all-am check check-am clean \
clean-checkPROGRAMS clean-generic clean-libtool cscopelist-am \
ctags ctags-am distclean distclean-compile distclean-generic \
distclean-libtool distclean-tags distdir dvi dvi-am html \
html-am info info-am install install-am install-data \
install-data-am install-dvi install-dvi-am install-exec \
install-exec-am install-html install-html-am install-info \
install-info-am install-man install-pdf install-pdf-am \
install-ps install-ps-am install-strip installcheck \
installcheck-am installdirs maintainer-clean \
maintainer-clean-generic mostlyclean mostlyclean-compile \
mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \
tags tags-am uninstall uninstall-am
.PRECIOUS: Makefile
check: $(check_PROGRAMS)
./test$(EXEEXT)
debug:
$(MAKE) check CFLAGS="@DEBUG@"
profile:
$(MAKE) check CFLAGS="@PROFILE@"
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT:
| {
"pile_set_name": "Github"
} |
package manta
import (
"testing"
"fmt"
"time"
"github.com/hashicorp/terraform/backend"
"github.com/hashicorp/terraform/states/remote"
)
func TestRemoteClient_impl(t *testing.T) {
var _ remote.Client = new(RemoteClient)
var _ remote.ClientLocker = new(RemoteClient)
}
func TestRemoteClient(t *testing.T) {
testACC(t)
directory := fmt.Sprintf("terraform-remote-manta-test-%x", time.Now().Unix())
keyName := "testState"
b := backend.TestBackendConfig(t, New(), backend.TestWrapConfig(map[string]interface{}{
"path": directory,
"object_name": keyName,
})).(*Backend)
createMantaFolder(t, b.storageClient, directory)
defer deleteMantaFolder(t, b.storageClient, directory)
state, err := b.StateMgr(backend.DefaultStateName)
if err != nil {
t.Fatal(err)
}
remote.TestClient(t, state.(*remote.State).Client)
}
func TestRemoteClientLocks(t *testing.T) {
testACC(t)
directory := fmt.Sprintf("terraform-remote-manta-test-%x", time.Now().Unix())
keyName := "testState"
b1 := backend.TestBackendConfig(t, New(), backend.TestWrapConfig(map[string]interface{}{
"path": directory,
"object_name": keyName,
})).(*Backend)
b2 := backend.TestBackendConfig(t, New(), backend.TestWrapConfig(map[string]interface{}{
"path": directory,
"object_name": keyName,
})).(*Backend)
createMantaFolder(t, b1.storageClient, directory)
defer deleteMantaFolder(t, b1.storageClient, directory)
s1, err := b1.StateMgr(backend.DefaultStateName)
if err != nil {
t.Fatal(err)
}
s2, err := b2.StateMgr(backend.DefaultStateName)
if err != nil {
t.Fatal(err)
}
remote.TestRemoteLocks(t, s1.(*remote.State).Client, s2.(*remote.State).Client)
}
| {
"pile_set_name": "Github"
} |
.. Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
.. _rubypackage:
-----------
RubyPackage
-----------
Like Perl, Python, and R, Ruby has its own build system for
installing Ruby gems.
^^^^^^
Phases
^^^^^^
The ``RubyPackage`` base class provides the following phases that
can be overridden:
#. ``build`` - build everything needed to install
#. ``install`` - install everything from build directory
For packages that come with a ``*.gemspec`` file, these phases run:
.. code-block:: console
$ gem build *.gemspec
$ gem install *.gem
For packages that come with a ``Rakefile`` file, these phases run:
.. code-block:: console
$ rake package
$ gem install *.gem
For packages that come pre-packaged as a ``*.gem`` file, the build
phase is skipped and the install phase runs:
.. code-block:: console
$ gem install *.gem
These are all standard ``gem`` commands and can be found by running:
.. code-block:: console
$ gem help commands
For packages that only distribute ``*.gem`` files, these files can be
downloaded with the ``expand=False`` option in the ``version`` directive.
The build phase will be automatically skipped.
^^^^^^^^^^^^^^^
Important files
^^^^^^^^^^^^^^^
When building from source, Ruby packages can be identified by the
presence of any of the following files:
* ``*.gemspec``
* ``Rakefile``
* ``setup.rb`` (not yet supported)
However, not all Ruby packages are released as source code. Some are only
released as ``*.gem`` files. These files can be extracted using:
.. code-block:: console
$ gem unpack *.gem
^^^^^^^^^^^
Description
^^^^^^^^^^^
The ``*.gemspec`` file may contain something like:
.. code-block:: ruby
summary = 'An implementation of the AsciiDoc text processor and publishing toolchain'
description = 'A fast, open source text processor and publishing toolchain for converting AsciiDoc content to HTML 5, DocBook 5, and other formats.'
Either of these can be used for the description of the Spack package.
^^^^^^^^
Homepage
^^^^^^^^
The ``*.gemspec`` file may contain something like:
.. code-block:: ruby
homepage = 'https://asciidoctor.org'
This should be used as the official homepage of the Spack package.
^^^^^^^^^^^^^^^^^^^^^^^^^
Build system dependencies
^^^^^^^^^^^^^^^^^^^^^^^^^
All Ruby packages require Ruby at build and run-time. For this reason,
the base class contains:
.. code-block:: python
extends('ruby')
depends_on('ruby', type=('build', 'run'))
The ``*.gemspec`` file may contain something like:
.. code-block:: ruby
required_ruby_version = '>= 2.3.0'
This can be added to the Spack package using:
.. code-block:: python
depends_on('[email protected]:', type=('build', 'run'))
^^^^^^^^^^^^^^^^^
Ruby dependencies
^^^^^^^^^^^^^^^^^
When you install a package with ``gem``, it reads the ``*.gemspec``
file in order to determine the dependencies of the package.
If the dependencies are not yet installed, ``gem`` downloads them
and installs them for you. This may sound convenient, but Spack
cannot rely on this behavior for two reasons:
#. Spack needs to be able to install packages on air-gapped networks.
If there is no internet connection, ``gem`` can't download the
package dependencies. By explicitly listing every dependency in
the ``package.py``, Spack knows what to download ahead of time.
#. Duplicate installations of the same dependency may occur.
Spack supports *activation* of Ruby extensions, which involves
symlinking the package installation prefix to the Ruby installation
prefix. If your package is missing a dependency, that dependency
will be installed to the installation directory of the same package.
If you try to activate the package + dependency, it may cause a
problem if that package has already been activated.
For these reasons, you must always explicitly list all dependencies.
Although the documentation may list the package's dependencies,
often the developers assume people will use ``gem`` and won't have to
worry about it. Always check the ``*.gemspec`` file to find the true
dependencies.
Check for the following clues in the ``*.gemspec`` file:
* ``add_runtime_dependency``
These packages are required for installation.
* ``add_dependency``
This is an alias for ``add_runtime_dependency``
* ``add_development_dependency``
These packages are optional dependencies used for development.
They should not be added as dependencies of the package.
^^^^^^^^^^^^^^^^^^^^^^
External documentation
^^^^^^^^^^^^^^^^^^^^^^
For more information on Ruby packaging, see:
https://guides.rubygems.org/
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
| This file is part of the web2py Web Framework
| Created by [email protected]
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
Scheduler with redis backend
---------------------------------
"""
from __future__ import print_function
import os
import time
import socket
import datetime
import logging
from json import loads, dumps
from gluon.utils import web2py_uuid
from gluon.storage import Storage
from gluon.scheduler import *
from gluon.scheduler import _decode_dict
from gluon.contrib.redis_utils import RWatchError
USAGE = """
## Example
For any existing app
Create File: app/models/scheduler.py ======
from gluon.contrib.redis_utils import RConn
from gluon.contrib.redis_scheduler import RScheduler
def demo1(*args,**vars):
print('you passed args=%s and vars=%s' % (args, vars))
return 'done!'
def demo2():
1/0
rconn = RConn()
mysched = RScheduler(db, dict(demo1=demo1,demo2=demo2), ...., redis_conn=rconn)
## run worker nodes with:
cd web2py
python web2py.py -K app
"""
path = os.getcwd()
if 'WEB2PY_PATH' not in os.environ:
os.environ['WEB2PY_PATH'] = path
IDENTIFIER = "%s#%s" % (socket.gethostname(), os.getpid())
logger = logging.getLogger('web2py.scheduler.%s' % IDENTIFIER)
POLLING = 'POLLING'
class RScheduler(Scheduler):
def __init__(self, db, tasks=None, migrate=True,
worker_name=None, group_names=None, heartbeat=HEARTBEAT,
max_empty_runs=0, discard_results=False, utc_time=False,
redis_conn=None, mode=1):
"""
Highly-experimental coordination with redis
Takes all args from Scheduler except redis_conn which
must be something closer to a StrictRedis instance.
My only regret - and the reason why I kept this under the hood for a
while - is that it's hard to hook up in web2py to something happening
right after the commit to a table, which will enable this version of the
scheduler to process "immediate" tasks right away instead of waiting a
few seconds (see FIXME in queue_task())
mode is reserved for future usage patterns.
Right now it moves the coordination (which is the most intensive
routine in the scheduler in matters of IPC) of workers to redis.
I'd like to have incrementally redis-backed modes of operations,
such as e.g.:
- 1: IPC through redis (which is the current implementation)
- 2: Store task results in redis (which will relieve further pressure
from the db leaving the scheduler_run table empty and possibly
keep things smooth as tasks results can be set to expire
after a bit of time)
- 3: Move all the logic for storing and queueing tasks to redis
itself - which means no scheduler_task usage too - and use
the database only as an historical record-bookkeeping
(e.g. for reporting)
As usual, I'm eager to see your comments.
"""
Scheduler.__init__(self, db, tasks=tasks, migrate=migrate,
worker_name=worker_name, group_names=group_names,
heartbeat=heartbeat, max_empty_runs=max_empty_runs,
discard_results=discard_results, utc_time=utc_time)
self.r_server = redis_conn
from gluon import current
self._application = current.request.application or 'appname'
def _nkey(self, key):
"""Helper to restrict all keys to a namespace and track them."""
prefix = 'w2p:rsched:%s' % self._application
allkeys = '%s:allkeys' % prefix
newkey = "%s:%s" % (prefix, key)
self.r_server.sadd(allkeys, newkey)
return newkey
def prune_all(self):
"""Global housekeeping."""
all_keys = self._nkey('allkeys')
with self.r_server.pipeline() as pipe:
while True:
try:
pipe.watch('PRUNE_ALL')
while True:
k = pipe.spop(all_keys)
if k is None:
break
pipe.delete(k)
pipe.execute()
break
except RWatchError:
time.sleep(0.1)
continue
def dt2str(self, value):
return value.strftime('%Y-%m-%d %H:%M:%S')
def str2date(self, value):
return datetime.datetime.strptime(value, '%Y-%m-%d %H:%M:%S')
def send_heartbeat(self, counter):
"""
Workers coordination in redis.
It has evolved into something is not that easy.
Here we try to do what we need in a single transaction,
and retry that transaction if something goes wrong
"""
with self.r_server.pipeline() as pipe:
while True:
try:
pipe.watch('SEND_HEARTBEAT')
self.inner_send_heartbeat(counter, pipe)
pipe.execute()
self.adj_hibernation()
self.sleep()
break
except RWatchError:
time.sleep(0.1)
continue
def inner_send_heartbeat(self, counter, pipe):
"""
Do a few things in the "maintenance" thread.
Specifically:
- registers the workers
- accepts commands sent to workers (KILL, TERMINATE, PICK, DISABLED, etc)
- adjusts sleep
- saves stats
- elects master
- does "housecleaning" for dead workers
- triggers tasks assignment
"""
r_server = pipe
status_keyset = self._nkey('worker_statuses')
status_key = self._nkey('worker_status:%s' % (self.worker_name))
now = self.now()
mybackedstatus = r_server.hgetall(status_key)
if not mybackedstatus:
r_server.hmset(
status_key,
dict(
status=ACTIVE, worker_name=self.worker_name,
first_heartbeat=self.dt2str(now),
last_heartbeat=self.dt2str(now),
group_names=dumps(self.group_names), is_ticker=False,
worker_stats=dumps(self.w_stats))
)
r_server.sadd(status_keyset, status_key)
if not self.w_stats.status == POLLING:
self.w_stats.status = ACTIVE
self.w_stats.sleep = self.heartbeat
mybackedstatus = ACTIVE
else:
mybackedstatus = mybackedstatus['status']
if mybackedstatus == DISABLED:
# keep sleeping
self.w_stats.status = DISABLED
r_server.hmset(
status_key,
dict(last_heartbeat=self.dt2str(now),
worker_stats=dumps(self.w_stats))
)
elif mybackedstatus == TERMINATE:
self.w_stats.status = TERMINATE
logger.debug("Waiting to terminate the current task")
self.give_up()
elif mybackedstatus == KILL:
self.w_stats.status = KILL
self.die()
else:
if mybackedstatus == STOP_TASK:
logger.info('Asked to kill the current task')
self.terminate_process()
logger.info('........recording heartbeat (%s)',
self.w_stats.status)
r_server.hmset(
status_key,
dict(
last_heartbeat=self.dt2str(now), status=ACTIVE,
worker_stats=dumps(self.w_stats)
)
)
# newroutine
r_server.expire(status_key, self.heartbeat * 3 * 15)
self.w_stats.sleep = self.heartbeat # re-activating the process
if self.w_stats.status not in (RUNNING, POLLING):
self.w_stats.status = ACTIVE
self.do_assign_tasks = False
if counter % 5 == 0 or mybackedstatus == PICK:
try:
logger.info(
' freeing workers that have not sent heartbeat')
registered_workers = r_server.smembers(status_keyset)
allkeys = self._nkey('allkeys')
for worker in registered_workers:
w = r_server.hgetall(worker)
w = Storage(w)
if not w:
r_server.srem(status_keyset, worker)
logger.info('removing %s from %s', worker, allkeys)
r_server.srem(allkeys, worker)
continue
try:
self.is_a_ticker = self.being_a_ticker(pipe)
except:
pass
if self.w_stats.status in (ACTIVE, POLLING):
self.do_assign_tasks = True
if self.is_a_ticker and self.do_assign_tasks:
# I'm a ticker, and 5 loops passed without reassigning tasks,
# let's do that and loop again
if not self.db_thread:
logger.debug('thread building own DAL object')
self.db_thread = DAL(
self.db._uri, folder=self.db._adapter.folder)
self.define_tables(self.db_thread, migrate=False)
db = self.db_thread
self.wrapped_assign_tasks(db)
return None
except:
logger.error('Error assigning tasks')
def being_a_ticker(self, pipe):
"""
Elects a ticker.
This is slightly more convoluted than the original
but if far more efficient
"""
r_server = pipe
status_keyset = self._nkey('worker_statuses')
registered_workers = r_server.smembers(status_keyset)
ticker = None
all_active = []
all_workers = []
for worker in registered_workers:
w = r_server.hgetall(worker)
if w['worker_name'] != self.worker_name and w['status'] == ACTIVE:
all_active.append(w)
if w['is_ticker'] == 'True' and ticker is None:
ticker = w
all_workers.append(w)
not_busy = self.w_stats.status in (ACTIVE, POLLING)
if not ticker:
if not_busy:
# only if this worker isn't busy, otherwise wait for a free one
for worker in all_workers:
key = self._nkey('worker_status:%s' % worker['worker_name'])
if worker['worker_name'] == self.worker_name:
r_server.hset(key, 'is_ticker', True)
else:
r_server.hset(key, 'is_ticker', False)
logger.info("TICKER: I'm a ticker")
else:
# giving up, only if I'm not alone
if len(all_active) > 1:
key = self._nkey('worker_status:%s' % (self.worker_name))
r_server.hset(key, 'is_ticker', False)
else:
not_busy = True
return not_busy
else:
logger.info(
"%s is a ticker, I'm a poor worker" % ticker['worker_name'])
return False
def assign_tasks(self, db):
"""
The real beauty.
We don't need to ASSIGN tasks, we just put
them into the relevant queue
"""
st, sd = db.scheduler_task, db.scheduler_task_deps
r_server = self.r_server
now = self.now()
status_keyset = self._nkey('worker_statuses')
with r_server.pipeline() as pipe:
while True:
try:
# making sure we're the only one doing the job
pipe.watch('ASSIGN_TASKS')
registered_workers = pipe.smembers(status_keyset)
all_workers = []
for worker in registered_workers:
w = pipe.hgetall(worker)
if w['status'] == ACTIVE:
all_workers.append(Storage(w))
pipe.execute()
break
except RWatchError:
time.sleep(0.1)
continue
# build workers as dict of groups
wkgroups = {}
for w in all_workers:
group_names = loads(w.group_names)
for gname in group_names:
if gname not in wkgroups:
wkgroups[gname] = dict(
workers=[{'name': w.worker_name, 'c': 0}])
else:
wkgroups[gname]['workers'].append(
{'name': w.worker_name, 'c': 0})
# set queued tasks that expired between "runs" (i.e., you turned off
# the scheduler): then it wasn't expired, but now it is
db(
(st.status.belongs((QUEUED, ASSIGNED))) &
(st.stop_time < now)
).update(status=EXPIRED)
# calculate dependencies
deps_with_no_deps = db(
(sd.can_visit == False) &
(~sd.task_child.belongs(
db(sd.can_visit == False)._select(sd.task_parent)
)
)
)._select(sd.task_child)
no_deps = db(
(st.status.belongs((QUEUED, ASSIGNED))) &
(
(sd.id == None) | (st.id.belongs(deps_with_no_deps))
)
)._select(st.id, distinct=True, left=sd.on(
(st.id == sd.task_parent) &
(sd.can_visit == False)
)
)
all_available = db(
(st.status.belongs((QUEUED, ASSIGNED))) &
(st.next_run_time <= now) &
(st.enabled == True) &
(st.id.belongs(no_deps))
)
limit = len(all_workers) * (50 / (len(wkgroups) or 1))
# let's freeze it up
db.commit()
x = 0
r_server = self.r_server
for group in wkgroups.keys():
queued_list = self._nkey('queued:%s' % group)
queued_set = self._nkey('queued_set:%s' % group)
# if are running, let's don't assign them again
running_list = self._nkey('running:%s' % group)
while True:
# the joys for rpoplpush!
t = r_server.rpoplpush(running_list, queued_list)
if not t:
# no more
break
r_server.sadd(queued_set, t)
tasks = all_available(st.group_name == group).select(
limitby=(0, limit), orderby = st.next_run_time)
# put tasks in the processing list
for task in tasks:
x += 1
gname = task.group_name
if r_server.sismember(queued_set, task.id):
# already queued, we don't put on the list
continue
r_server.sadd(queued_set, task.id)
r_server.lpush(queued_list, task.id)
d = dict(status=QUEUED)
if not task.task_name:
d['task_name'] = task.function_name
db(
(st.id == task.id) &
(st.status.belongs((QUEUED, ASSIGNED)))
).update(**d)
db.commit()
# I didn't report tasks but I'm working nonetheless!!!!
if x > 0:
self.w_stats.empty_runs = 0
self.w_stats.queue = x
self.w_stats.distribution = wkgroups
self.w_stats.workers = len(all_workers)
# I'll be greedy only if tasks queued are equal to the limit
# (meaning there could be others ready to be queued)
self.greedy = x >= limit
logger.info('TICKER: workers are %s', len(all_workers))
logger.info('TICKER: tasks are %s', x)
def pop_task(self, db):
"""Lift a task off a queue."""
r_server = self.r_server
st = self.db.scheduler_task
task = None
# ready to process something
for group in self.group_names:
queued_set = self._nkey('queued_set:%s' % group)
queued_list = self._nkey('queued:%s' % group)
running_list = self._nkey('running:%s' % group)
running_dict = self._nkey('running_dict:%s' % group)
self.w_stats.status = POLLING
# polling for 1 minute in total. If more groups are in,
# polling is 1 minute in total
logger.debug(' polling on %s', group)
task_id = r_server.brpoplpush(queued_list, running_list,
timeout=60 / len(self.group_names))
logger.debug(' finished polling')
self.w_stats.status = ACTIVE
if task_id:
r_server.hset(running_dict, task_id, self.worker_name)
r_server.srem(queued_set, task_id)
task = db(
(st.id == task_id) &
(st.status == QUEUED)
).select().first()
if not task:
r_server.lrem(running_list, 0, task_id)
r_server.hdel(running_dict, task_id)
r_server.lrem(queued_list, 0, task_id)
logger.error("we received a task that isn't there (%s)",
task_id)
return None
break
now = self.now()
if task:
task.update_record(status=RUNNING, last_run_time=now)
# noone will touch my task!
db.commit()
logger.debug(' work to do %s', task.id)
else:
logger.info('nothing to do')
return None
times_run = task.times_run + 1
if task.cronline:
cron_recur = CronParser(task.cronline, now.replace(second=0))
next_run_time = cron_recur.get_next()
elif not task.prevent_drift:
next_run_time = task.last_run_time + datetime.timedelta(
seconds=task.period
)
else:
# calc next_run_time based on available slots
# see #1191
next_run_time = task.start_time
secondspassed = (now - next_run_time).total_seconds()
steps = secondspassed // task.period + 1
next_run_time += datetime.timedelta(seconds=task.period * steps)
if times_run < task.repeats or task.repeats == 0:
# need to run (repeating task)
run_again = True
else:
# no need to run again
run_again = False
run_id = 0
while True and not self.discard_results:
logger.debug(' new scheduler_run record')
try:
run_id = db.scheduler_run.insert(
task_id=task.id,
status=RUNNING,
start_time=now,
worker_name=self.worker_name)
db.commit()
break
except:
time.sleep(0.5)
db.rollback()
logger.info('new task %(id)s "%(task_name)s"'
' %(application_name)s.%(function_name)s' % task)
return Task(
app=task.application_name,
function=task.function_name,
timeout=task.timeout,
args=task.args, # in json
vars=task.vars, # in json
task_id=task.id,
run_id=run_id,
run_again=run_again,
next_run_time=next_run_time,
times_run=times_run,
stop_time=task.stop_time,
retry_failed=task.retry_failed,
times_failed=task.times_failed,
sync_output=task.sync_output,
uuid=task.uuid,
group_name=task.group_name)
def report_task(self, task, task_report):
"""
Override.
Needs it only because we need to pop from the
running tasks
"""
r_server = self.r_server
db = self.db
now = self.now()
st = db.scheduler_task
sr = db.scheduler_run
if not self.discard_results:
if task_report.result != 'null' or task_report.tb:
# result is 'null' as a string if task completed
# if it's stopped it's None as NoneType, so we record
# the STOPPED "run" anyway
logger.debug(' recording task report in db (%s)',
task_report.status)
db(sr.id == task.run_id).update(
status=task_report.status,
stop_time=now,
run_result=task_report.result,
run_output=task_report.output,
traceback=task_report.tb)
else:
logger.debug(' deleting task report in db because of no result')
db(sr.id == task.run_id).delete()
# if there is a stop_time and the following run would exceed it
is_expired = (task.stop_time and
task.next_run_time > task.stop_time and
True or False)
status = (task.run_again and is_expired and EXPIRED or
task.run_again and not is_expired and
QUEUED or COMPLETED)
if task_report.status == COMPLETED:
# assigned calculations
d = dict(status=status,
next_run_time=task.next_run_time,
times_run=task.times_run,
times_failed=0,
assigned_worker_name=self.worker_name
)
db(st.id == task.task_id).update(**d)
if status == COMPLETED:
self.update_dependencies(db, task.task_id)
else:
st_mapping = {'FAILED': 'FAILED',
'TIMEOUT': 'TIMEOUT',
'STOPPED': 'FAILED'}[task_report.status]
status = (task.retry_failed and
task.times_failed < task.retry_failed and
QUEUED or task.retry_failed == -1 and
QUEUED or st_mapping)
db(st.id == task.task_id).update(
times_failed=st.times_failed + 1,
next_run_time=task.next_run_time,
status=status,
assigned_worker_name=self.worker_name
)
logger.info('task completed (%s)', task_report.status)
running_list = self._nkey('running:%s' % task.group_name)
running_dict = self._nkey('running_dict:%s' % task.group_name)
r_server.lrem(running_list, 0, task.task_id)
r_server.hdel(running_dict, task.task_id)
def wrapped_pop_task(self):
"""Commodity function to call `pop_task` and trap exceptions.
If an exception is raised, assume it happened because of database
contention and retries `pop_task` after 0.5 seconds
"""
db = self.db
db.commit() # another nifty db.commit() only for Mysql
x = 0
while x < 10:
try:
rtn = self.pop_task(db)
return rtn
break
# this is here to "interrupt" any blrpoplpush op easily
except KeyboardInterrupt:
self.give_up()
break
except:
self.w_stats.errors += 1
db.rollback()
logger.error(' error popping tasks')
x += 1
time.sleep(0.5)
def get_workers(self, only_ticker=False):
"""Return a dict holding worker_name : {**columns}
representing all "registered" workers.
only_ticker returns only the worker running as a TICKER,
if there is any
"""
r_server = self.r_server
status_keyset = self._nkey('worker_statuses')
registered_workers = r_server.smembers(status_keyset)
all_workers = {}
for worker in registered_workers:
w = r_server.hgetall(worker)
w = Storage(w)
if not w:
continue
all_workers[w.worker_name] = Storage(
status=w.status,
first_heartbeat=self.str2date(w.first_heartbeat),
last_heartbeat=self.str2date(w.last_heartbeat),
group_names=loads(w.group_names, object_hook=_decode_dict),
is_ticker=w.is_ticker == 'True' and True or False,
worker_stats=loads(w.worker_stats, object_hook=_decode_dict)
)
if only_ticker:
for k, v in all_workers.iteritems():
if v['is_ticker']:
return {k: v}
return {}
return all_workers
def set_worker_status(self, group_names=None, action=ACTIVE,
exclude=None, limit=None, worker_name=None):
"""Internal function to set worker's status"""
r_server = self.r_server
all_workers = self.get_workers()
if not group_names:
group_names = self.group_names
elif isinstance(group_names, str):
group_names = [group_names]
exclusion = exclude and exclude.append(action) or [action]
workers = []
if worker_name is not None:
if worker_name in all_workers.keys():
workers = [worker_name]
else:
for k, v in all_workers.iteritems():
if v.status not in exclusion and set(group_names) & set(v.group_names):
workers.append(k)
if limit and worker_name is None:
workers = workers[:limit]
if workers:
with r_server.pipeline() as pipe:
while True:
try:
pipe.watch('SET_WORKER_STATUS')
for w in workers:
worker_key = self._nkey('worker_status:%s' % w)
pipe.hset(worker_key, 'status', action)
pipe.execute()
break
except RWatchError:
time.sleep(0.1)
continue
def queue_task(self, function, pargs=[], pvars={}, **kwargs):
"""
FIXME: immediate should put item in queue. The hard part is
that currently there are no hooks happening at post-commit time
Queue tasks. This takes care of handling the validation of all
parameters
Args:
function: the function (anything callable with a __name__)
pargs: "raw" args to be passed to the function. Automatically
jsonified.
pvars: "raw" kwargs to be passed to the function. Automatically
jsonified
kwargs: all the parameters available (basically, every
`scheduler_task` column). If args and vars are here, they should
be jsonified already, and they will override pargs and pvars
Returns:
a dict just as a normal validate_and_insert(), plus a uuid key
holding the uuid of the queued task. If validation is not passed
( i.e. some parameters are invalid) both id and uuid will be None,
and you'll get an "error" dict holding the errors found.
"""
if hasattr(function, '__name__'):
function = function.__name__
targs = 'args' in kwargs and kwargs.pop('args') or dumps(pargs)
tvars = 'vars' in kwargs and kwargs.pop('vars') or dumps(pvars)
tuuid = 'uuid' in kwargs and kwargs.pop('uuid') or web2py_uuid()
tname = 'task_name' in kwargs and kwargs.pop('task_name') or function
immediate = 'immediate' in kwargs and kwargs.pop('immediate') or None
cronline = kwargs.get('cronline')
kwargs.update(function_name=function,
task_name=tname,
args=targs,
vars=tvars,
uuid=tuuid)
if cronline:
try:
start_time = kwargs.get('start_time', self.now)
next_run_time = CronParser(cronline, start_time).get_next()
kwargs.update(start_time=start_time, next_run_time=next_run_time)
except:
pass
rtn = self.db.scheduler_task.validate_and_insert(**kwargs)
if not rtn.errors:
rtn.uuid = tuuid
if immediate:
r_server = self.r_server
ticker = self.get_workers(only_ticker=True)
if ticker.keys():
ticker = ticker.keys()[0]
with r_server.pipeline() as pipe:
while True:
try:
pipe.watch('SET_WORKER_STATUS')
worker_key = self._nkey('worker_status:%s' % ticker)
pipe.hset(worker_key, 'status', 'PICK')
pipe.execute()
break
except RWatchError:
time.sleep(0.1)
continue
else:
rtn.uuid = None
return rtn
def stop_task(self, ref):
"""Shortcut for task termination.
If the task is RUNNING it will terminate it, meaning that status
will be set as FAILED.
If the task is QUEUED, its stop_time will be set as to "now",
the enabled flag will be set to False, and the status to STOPPED
Args:
ref: can be
- an integer : lookup will be done by scheduler_task.id
- a string : lookup will be done by scheduler_task.uuid
Returns:
- 1 if task was stopped (meaning an update has been done)
- None if task was not found, or if task was not RUNNING or QUEUED
Note:
Experimental
"""
r_server = self.r_server
st = self.db.scheduler_task
if isinstance(ref, int):
q = st.id == ref
elif isinstance(ref, str):
q = st.uuid == ref
else:
raise SyntaxError(
"You can retrieve results only by id or uuid")
task = self.db(q).select(st.id, st.status, st.group_name)
task = task.first()
rtn = None
if not task:
return rtn
running_dict = self._nkey('running_dict:%s' % task.group_name)
if task.status == 'RUNNING':
worker_key = r_server.hget(running_dict, task.id)
worker_key = self._nkey('worker_status:%s' % (worker_key))
r_server.hset(worker_key, 'status', STOP_TASK)
elif task.status == 'QUEUED':
rtn = self.db(q).update(
stop_time=self.now(),
enabled=False,
status=STOPPED)
return rtn
| {
"pile_set_name": "Github"
} |
.file "helloworld.s"
.section ".text"
.align 4
.global main
.type main, #function
main:
push {r0,r10,pc}
ldr r3, [pc, #228]
cmp r3, #0
| {
"pile_set_name": "Github"
} |
// Boost.Geometry - gis-projections (based on PROJ4)
// Copyright (c) 2008-2015 Barend Gehrels, Amsterdam, the Netherlands.
// This file was modified by Oracle on 2017, 2018.
// Modifications copyright (c) 2017-2018, Oracle and/or its affiliates.
// Contributed and/or modified by Adam Wulkiewicz, on behalf of Oracle.
// Use, modification and distribution is subject to the Boost Software License,
// Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
// This file is converted from PROJ4, http://trac.osgeo.org/proj
// PROJ4 is originally written by Gerald Evenden (then of the USGS)
// PROJ4 is maintained by Frank Warmerdam
// PROJ4 is converted to Boost.Geometry by Barend Gehrels
// Last updated version of proj: 5.0.0
// Original copyright notice:
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
#ifndef BOOST_GEOMETRY_PROJECTIONS_DENOY_HPP
#define BOOST_GEOMETRY_PROJECTIONS_DENOY_HPP
#include <boost/geometry/srs/projections/impl/base_static.hpp>
#include <boost/geometry/srs/projections/impl/base_dynamic.hpp>
#include <boost/geometry/srs/projections/impl/projects.hpp>
#include <boost/geometry/srs/projections/impl/factory_entry.hpp>
namespace boost { namespace geometry
{
namespace projections
{
#ifndef DOXYGEN_NO_DETAIL
namespace detail { namespace denoy
{
static const double C0 = 0.95;
//static const double C1 = -0.08333333333333333333;
//static const double C3 = 0.00166666666666666666;
static const double D1 = 0.9;
static const double D5 = 0.03;
template <typename T>
inline T C1() { return -0.0833333333333333333333333333333; }
template <typename T>
inline T C3() { return 0.0016666666666666666666666666666; }
// template class, using CRTP to implement forward/inverse
template <typename T, typename Parameters>
struct base_denoy_spheroid
: public base_t_f<base_denoy_spheroid<T, Parameters>, T, Parameters>
{
inline base_denoy_spheroid(const Parameters& par)
: base_t_f<base_denoy_spheroid<T, Parameters>, T, Parameters>(*this, par)
{}
// FORWARD(s_forward) spheroid
// Project coordinates from geographic (lon, lat) to cartesian (x, y)
inline void fwd(T lp_lon, T const& lp_lat, T& xy_x, T& xy_y) const
{
static const T C1 = denoy::C1<T>();
static const T C3 = denoy::C3<T>();
xy_y = lp_lat;
xy_x = lp_lon;
lp_lon = fabs(lp_lon);
xy_x *= cos((C0 + lp_lon * (C1 + lp_lon * lp_lon * C3)) *
(lp_lat * (D1 + D5 * lp_lat * lp_lat * lp_lat * lp_lat)));
}
static inline std::string get_name()
{
return "denoy_spheroid";
}
};
// Denoyer Semi-Elliptical
template <typename Parameters>
inline void setup_denoy(Parameters& par)
{
par.es = 0.0;
}
}} // namespace detail::denoy
#endif // doxygen
/*!
\brief Denoyer Semi-Elliptical projection
\ingroup projections
\tparam Geographic latlong point type
\tparam Cartesian xy point type
\tparam Parameters parameter type
\par Projection characteristics
- Pseudocylindrical
- no inverse
- Spheroid
\par Example
\image html ex_denoy.gif
*/
template <typename T, typename Parameters>
struct denoy_spheroid : public detail::denoy::base_denoy_spheroid<T, Parameters>
{
template <typename Params>
inline denoy_spheroid(Params const& , Parameters const& par)
: detail::denoy::base_denoy_spheroid<T, Parameters>(par)
{
detail::denoy::setup_denoy(this->m_par);
}
};
#ifndef DOXYGEN_NO_DETAIL
namespace detail
{
// Static projection
BOOST_GEOMETRY_PROJECTIONS_DETAIL_STATIC_PROJECTION(srs::spar::proj_denoy, denoy_spheroid, denoy_spheroid)
// Factory entry(s)
BOOST_GEOMETRY_PROJECTIONS_DETAIL_FACTORY_ENTRY_F(denoy_entry, denoy_spheroid)
BOOST_GEOMETRY_PROJECTIONS_DETAIL_FACTORY_INIT_BEGIN(denoy_init)
{
BOOST_GEOMETRY_PROJECTIONS_DETAIL_FACTORY_INIT_ENTRY(denoy, denoy_entry);
}
} // namespace detail
#endif // doxygen
} // namespace projections
}} // namespace boost::geometry
#endif // BOOST_GEOMETRY_PROJECTIONS_DENOY_HPP
| {
"pile_set_name": "Github"
} |
# release.sh
#
# Takes a tag to release, and syncs it to WordPress.org
TAG=$1
PLUGIN="rest-api"
TMPDIR=/tmp/release-svn
PLUGINDIR="$PWD"
PLUGINSVN="https://plugins.svn.wordpress.org/$PLUGIN"
# Fail on any error
set -e
# Is the tag valid?
if [ -z "$TAG" ] || ! git rev-parse "$TAG" > /dev/null; then
echo "Invalid tag. Make sure you tag before trying to release."
exit 1
fi
if [[ $VERSION == "v*" ]]; then
# Starts with an extra "v", strip for the version
VERSION=${TAG:1}
else
VERSION="$TAG"
fi
if [ -d "$TMPDIR" ]; then
# Wipe it clean
rm -r "$TMPDIR"
fi
# Ensure the directory exists first
mkdir "$TMPDIR"
# Grab an unadulterated copy of SVN
svn co "$PLUGINSVN/trunk" "$TMPDIR" > /dev/null
# Extract files from the Git tag to there
git archive --format="zip" -0 "$TAG" | tar -C "$TMPDIR" -xf -
# Switch to build dir
cd "$TMPDIR"
# Run build tasks
sed -e "s/{{TAG}}/$VERSION/g" < "$PLUGINDIR/bin/readme.txt" > readme.txt
# Remove special files
rm ".gitignore"
rm ".scrutinizer.yml"
rm ".travis.yml"
rm "composer.json"
rm "Gruntfile.js"
rm "package.json"
rm "phpcs.ruleset.xml"
rm "phpunit.xml.dist"
rm "multisite.xml"
rm "codecoverage.xml"
rm -r "assets"
rm -r "bin"
rm -r "tests"
# Add any new files
svn status | grep -v "^.[ \t]*\..*" | grep "^?" | awk '{print $2}' | xargs svn add
# Pause to allow checking
echo "About to commit $VERSION. Double-check $TMPDIR to make sure everything looks fine."
read -p "Hit Enter to continue."
# Commit the changes
svn commit -m "Tag $VERSION"
# tag_ur_it
svn copy "$PLUGINSVN/trunk" "$PLUGINSVN/tags/$VERSION" -m "Tag $VERSION"
| {
"pile_set_name": "Github"
} |
+++
City = "Zurich"
Year = "2019"
talk_date = ""
talk_start_time = ""
talk_end_time = ""
title = "Retros are the New Black: How to Cultivate Continuous Improvement Across Teams"
type = "talk"
speakers = ["jacqueline-sloves"]
youtube = ""
vimeo = "344514221"
speakerdeck = "https://speakerdeck.com/jacquelion/retros-are-the-new-black-how-to-cultivate-continuous-improvement-across-teams"
slideshare = ""
slides = ""
+++
Raw engineering talent is important to achieve business and product needs; however, it is
also important to make space to improvement team dynamics in order to maximize the
effectiveness of individuals and maintain emotionally healthy working environments. One
tactic to achieve this is to organize team retros on a regular cadence.
Retros, when executed properly, empower individuals to give and receive blameless honest
feedback. Done wrong or not at all, resentment can build and lead to a toxic engineering
culture. In this talk, learn techniques to build inclusive environments, enable reflection
and open communication, and motivate teams to take action. Walk away with a plan for
implementing retros with your team. Take your first step towards continuous improvement.
This talk will be particularly beneficial to engineers who shape team dynamics or one in a
position to introduce a new process to a team. Depending on how one's team is organized,
this includes Product Managers, SCRUM masters, or an aspiring team lead. Attendees will
learn what a retro is, and best practices for how to introduce, moderate, and follow up on
retros within their company.
| {
"pile_set_name": "Github"
} |
(module Mounting_Wuerth_WA-SMSI-4.5mm_H6mm_9774060482 (layer F.Cu) (tedit 5E1DC567)
(descr "Mounting Hardware, inside through hole 4.5mm, height 6, Wuerth electronics 9774060482 (https://katalog.we-online.de/em/datasheet/9774060482.pdf), generated with kicad-footprint-generator")
(tags "Mounting 4.5mm 9774060482")
(attr smd)
(fp_text reference REF** (at 0 -5.65) (layer F.SilkS)
(effects (font (size 1 1) (thickness 0.15)))
)
(fp_text value Mounting_Wuerth_WA-SMSI-4.5mm_H6mm_9774060482 (at 0 5.65) (layer F.Fab)
(effects (font (size 1 1) (thickness 0.15)))
)
(fp_circle (center 0 0) (end 4.1 0) (layer F.Fab) (width 0.1))
(fp_circle (center 0 0) (end 4.95 0) (layer F.CrtYd) (width 0.05))
(pad "" smd custom (at 2.607456 2.607456) (size 1.495 1.495) (layers F.Paste)
(options (clearance outline) (anchor circle))
(primitives
(gr_arc (start -2.607456 -2.607456) (end 0.188183 -2.107456) (angle 69.719727) (width 0.2))
(gr_arc (start -2.607456 -2.607456) (end 0.289416 -2.107456) (angle 70.414473) (width 0.2))
(gr_arc (start -2.607456 -2.607456) (end 0.390547 -2.107456) (angle 71.06298) (width 0.2))
(gr_arc (start -2.607456 -2.607456) (end 0.491585 -2.107456) (angle 71.669736) (width 0.2))
(gr_arc (start -2.607456 -2.607456) (end 0.59254 -2.107456) (angle 72.238663) (width 0.2))
(gr_arc (start -2.607456 -2.607456) (end 0.693419 -2.107456) (angle 72.773205) (width 0.2))
(gr_arc (start -2.607456 -2.607456) (end 0.794229 -2.107456) (angle 73.276399) (width 0.2))
(gr_arc (start -2.607456 -2.607456) (end 0.894975 -2.107456) (angle 73.750934) (width 0.2))
(gr_arc (start -2.607456 -2.607456) (end 0.995664 -2.107456) (angle 74.199197) (width 0.2))
(gr_arc (start -2.607456 -2.607456) (end 1.0963 -2.107456) (angle 74.623318) (width 0.2))
(gr_arc (start -2.607456 -2.607456) (end 1.196886 -2.107456) (angle 75.025202) (width 0.2))
(gr_arc (start -2.607456 -2.607456) (end 1.297427 -2.107456) (angle 75.406558) (width 0.2))
(gr_arc (start -2.607456 -2.607456) (end 1.397927 -2.107456) (angle 75.768923) (width 0.2))
(gr_arc (start -2.607456 -2.607456) (end 1.498388 -2.107456) (angle 76.113685) (width 0.2))
(gr_arc (start -2.607456 -2.607456) (end 1.598813 -2.107456) (angle 76.442099) (width 0.2))
(gr_arc (start -2.607456 -2.607456) (end 1.699204 -2.107456) (angle 76.755304) (width 0.2))
(gr_arc (start -2.607456 -2.607456) (end 1.799565 -2.107456) (angle 77.054334) (width 0.2))
(gr_arc (start -2.607456 -2.607456) (end 1.899896 -2.107456) (angle 77.340133) (width 0.2))
(gr_arc (start -2.607456 -2.607456) (end 1.899896 -2.107456) (angle 77.340133) (width 0.2))
(gr_line (start -2.107456 0.188183) (end -2.107456 1.899896) (width 0.2))
(gr_line (start 0.188183 -2.107456) (end 1.899896 -2.107456) (width 0.2))
))
(pad "" smd custom (at -2.607456 2.607456) (size 1.495 1.495) (layers F.Paste)
(options (clearance outline) (anchor circle))
(primitives
(gr_arc (start 2.607456 -2.607456) (end 2.107456 0.188183) (angle 69.719727) (width 0.2))
(gr_arc (start 2.607456 -2.607456) (end 2.107456 0.289416) (angle 70.414473) (width 0.2))
(gr_arc (start 2.607456 -2.607456) (end 2.107456 0.390547) (angle 71.06298) (width 0.2))
(gr_arc (start 2.607456 -2.607456) (end 2.107456 0.491585) (angle 71.669736) (width 0.2))
(gr_arc (start 2.607456 -2.607456) (end 2.107456 0.59254) (angle 72.238663) (width 0.2))
(gr_arc (start 2.607456 -2.607456) (end 2.107456 0.693419) (angle 72.773205) (width 0.2))
(gr_arc (start 2.607456 -2.607456) (end 2.107456 0.794229) (angle 73.276399) (width 0.2))
(gr_arc (start 2.607456 -2.607456) (end 2.107456 0.894975) (angle 73.750934) (width 0.2))
(gr_arc (start 2.607456 -2.607456) (end 2.107456 0.995664) (angle 74.199197) (width 0.2))
(gr_arc (start 2.607456 -2.607456) (end 2.107456 1.0963) (angle 74.623318) (width 0.2))
(gr_arc (start 2.607456 -2.607456) (end 2.107456 1.196886) (angle 75.025202) (width 0.2))
(gr_arc (start 2.607456 -2.607456) (end 2.107456 1.297427) (angle 75.406558) (width 0.2))
(gr_arc (start 2.607456 -2.607456) (end 2.107456 1.397927) (angle 75.768923) (width 0.2))
(gr_arc (start 2.607456 -2.607456) (end 2.107456 1.498388) (angle 76.113685) (width 0.2))
(gr_arc (start 2.607456 -2.607456) (end 2.107456 1.598813) (angle 76.442099) (width 0.2))
(gr_arc (start 2.607456 -2.607456) (end 2.107456 1.699204) (angle 76.755304) (width 0.2))
(gr_arc (start 2.607456 -2.607456) (end 2.107456 1.799565) (angle 77.054334) (width 0.2))
(gr_arc (start 2.607456 -2.607456) (end 2.107456 1.899896) (angle 77.340133) (width 0.2))
(gr_arc (start 2.607456 -2.607456) (end 2.107456 1.899896) (angle 77.340133) (width 0.2))
(gr_line (start -0.188183 -2.107456) (end -1.899896 -2.107456) (width 0.2))
(gr_line (start 2.107456 0.188183) (end 2.107456 1.899896) (width 0.2))
))
(pad "" smd custom (at -2.607456 -2.607456) (size 1.495 1.495) (layers F.Paste)
(options (clearance outline) (anchor circle))
(primitives
(gr_arc (start 2.607456 2.607456) (end -0.188183 2.107456) (angle 69.719727) (width 0.2))
(gr_arc (start 2.607456 2.607456) (end -0.289416 2.107456) (angle 70.414473) (width 0.2))
(gr_arc (start 2.607456 2.607456) (end -0.390547 2.107456) (angle 71.06298) (width 0.2))
(gr_arc (start 2.607456 2.607456) (end -0.491585 2.107456) (angle 71.669736) (width 0.2))
(gr_arc (start 2.607456 2.607456) (end -0.59254 2.107456) (angle 72.238663) (width 0.2))
(gr_arc (start 2.607456 2.607456) (end -0.693419 2.107456) (angle 72.773205) (width 0.2))
(gr_arc (start 2.607456 2.607456) (end -0.794229 2.107456) (angle 73.276399) (width 0.2))
(gr_arc (start 2.607456 2.607456) (end -0.894975 2.107456) (angle 73.750934) (width 0.2))
(gr_arc (start 2.607456 2.607456) (end -0.995664 2.107456) (angle 74.199197) (width 0.2))
(gr_arc (start 2.607456 2.607456) (end -1.0963 2.107456) (angle 74.623318) (width 0.2))
(gr_arc (start 2.607456 2.607456) (end -1.196886 2.107456) (angle 75.025202) (width 0.2))
(gr_arc (start 2.607456 2.607456) (end -1.297427 2.107456) (angle 75.406558) (width 0.2))
(gr_arc (start 2.607456 2.607456) (end -1.397927 2.107456) (angle 75.768923) (width 0.2))
(gr_arc (start 2.607456 2.607456) (end -1.498388 2.107456) (angle 76.113685) (width 0.2))
(gr_arc (start 2.607456 2.607456) (end -1.598813 2.107456) (angle 76.442099) (width 0.2))
(gr_arc (start 2.607456 2.607456) (end -1.699204 2.107456) (angle 76.755304) (width 0.2))
(gr_arc (start 2.607456 2.607456) (end -1.799565 2.107456) (angle 77.054334) (width 0.2))
(gr_arc (start 2.607456 2.607456) (end -1.899896 2.107456) (angle 77.340133) (width 0.2))
(gr_arc (start 2.607456 2.607456) (end -1.899896 2.107456) (angle 77.340133) (width 0.2))
(gr_line (start 2.107456 -0.188183) (end 2.107456 -1.899896) (width 0.2))
(gr_line (start -0.188183 2.107456) (end -1.899896 2.107456) (width 0.2))
))
(pad "" smd custom (at 2.607456 -2.607456) (size 1.495 1.495) (layers F.Paste)
(options (clearance outline) (anchor circle))
(primitives
(gr_arc (start -2.607456 2.607456) (end -2.107456 -0.188183) (angle 69.719727) (width 0.2))
(gr_arc (start -2.607456 2.607456) (end -2.107456 -0.289416) (angle 70.414473) (width 0.2))
(gr_arc (start -2.607456 2.607456) (end -2.107456 -0.390547) (angle 71.06298) (width 0.2))
(gr_arc (start -2.607456 2.607456) (end -2.107456 -0.491585) (angle 71.669736) (width 0.2))
(gr_arc (start -2.607456 2.607456) (end -2.107456 -0.59254) (angle 72.238663) (width 0.2))
(gr_arc (start -2.607456 2.607456) (end -2.107456 -0.693419) (angle 72.773205) (width 0.2))
(gr_arc (start -2.607456 2.607456) (end -2.107456 -0.794229) (angle 73.276399) (width 0.2))
(gr_arc (start -2.607456 2.607456) (end -2.107456 -0.894975) (angle 73.750934) (width 0.2))
(gr_arc (start -2.607456 2.607456) (end -2.107456 -0.995664) (angle 74.199197) (width 0.2))
(gr_arc (start -2.607456 2.607456) (end -2.107456 -1.0963) (angle 74.623318) (width 0.2))
(gr_arc (start -2.607456 2.607456) (end -2.107456 -1.196886) (angle 75.025202) (width 0.2))
(gr_arc (start -2.607456 2.607456) (end -2.107456 -1.297427) (angle 75.406558) (width 0.2))
(gr_arc (start -2.607456 2.607456) (end -2.107456 -1.397927) (angle 75.768923) (width 0.2))
(gr_arc (start -2.607456 2.607456) (end -2.107456 -1.498388) (angle 76.113685) (width 0.2))
(gr_arc (start -2.607456 2.607456) (end -2.107456 -1.598813) (angle 76.442099) (width 0.2))
(gr_arc (start -2.607456 2.607456) (end -2.107456 -1.699204) (angle 76.755304) (width 0.2))
(gr_arc (start -2.607456 2.607456) (end -2.107456 -1.799565) (angle 77.054334) (width 0.2))
(gr_arc (start -2.607456 2.607456) (end -2.107456 -1.899896) (angle 77.340133) (width 0.2))
(gr_arc (start -2.607456 2.607456) (end -2.107456 -1.899896) (angle 77.340133) (width 0.2))
(gr_line (start 0.188183 2.107456) (end 1.899896 2.107456) (width 0.2))
(gr_line (start -2.107456 -0.188183) (end -2.107456 -1.899896) (width 0.2))
))
(pad 1 smd custom (at 3.7 0) (size 1.6 1.6) (layers F.Cu F.Mask)
(options (clearance outline) (anchor circle))
(primitives
(gr_circle (center -3.7 0) (end 0 0) (width 2))
))
(pad 1 smd circle (at 0 3.7) (size 1.6 1.6) (layers F.Cu))
(pad 1 smd circle (at -3.7 0) (size 1.6 1.6) (layers F.Cu))
(pad 1 smd circle (at 0 -3.7) (size 1.6 1.6) (layers F.Cu))
(pad "" np_thru_hole circle (at 0 0) (size 5.4 5.4) (drill 5.4) (layers *.Cu *.Mask))
(fp_text user %R (at 0 0) (layer F.Fab)
(effects (font (size 1 1) (thickness 0.15)))
)
(model ${KISYS3DMOD}/Mounting_Wuerth.3dshapes/Mounting_Wuerth_WA-SMSI-4.5mm_H6mm_9774060482.wrl
(at (xyz 0 0 0))
(scale (xyz 1 1 1))
(rotate (xyz 0 0 0))
)
) | {
"pile_set_name": "Github"
} |
1
00:00:34,051 --> 00:00:37,388
(平清盛)
いかに 宋との取り引きの場が→
2
00:00:37,388 --> 00:00:40,374
豊かで 生き生きとしておるか。
3
00:00:40,374 --> 00:00:44,378
<若き日の清盛が描いた夢>
4
00:00:44,378 --> 00:00:47,214
(信西)かの優れた国の
優れた品々を→
5
00:00:47,214 --> 00:00:49,717
民が じかに見て
じかに手に触れる。
6
00:00:49,717 --> 00:00:53,704
(兎丸)いつか お前が つくれ。
宋と商いして→
7
00:00:53,704 --> 00:00:55,706
「豊かな世」いうやつを。
8
00:00:55,706 --> 00:01:00,711
<宋との自由な交易による
新しい国づくり。→
9
00:01:00,711 --> 00:01:05,032
しかし その実現には
多くの困難が立ちはだかります>
10
00:01:05,032 --> 00:01:07,551
(藤原基房)武士とて→
11
00:01:07,551 --> 00:01:10,704
夢くらいは
見たいであろう。
12
00:01:10,704 --> 00:01:13,874
<公卿たちの
強い反発に
遭いつつも→
13
00:01:13,874 --> 00:01:18,045
清盛は 交易の拠点 福原に
移り住み→
14
00:01:18,045 --> 00:01:21,215
大輪田泊の建設に
取りかかります>
15
00:01:21,215 --> 00:01:26,203
宋や高麗 果ては天竺
南国の産物も取り引きし→
16
00:01:26,203 --> 00:01:30,541
富を得る。 この富によって
国を富ませる。
17
00:01:30,541 --> 00:01:34,712
<若き日の夢が
今 実現に向けて→
18
00:01:34,712 --> 00:01:37,381
動き出そうとしていました>
19
00:01:37,381 --> 00:01:54,381
♪♪~(テーマ音楽)
20
00:01:54,381 --> 00:04:13,081
♪♪~
21
00:04:24,048 --> 00:04:29,369
政子。 奥へ行って
膳の支度を手伝うてまいれ。
22
00:04:29,369 --> 00:04:34,069
また じい様どもの宴に
ござりますか?
政子。
23
00:04:38,695 --> 00:04:42,032
≪(藤九郎)時政殿!
24
00:04:42,032 --> 00:04:45,232
おお。 これは これは 藤九郎殿。
25
00:04:47,054 --> 00:04:49,039
(時政)佐殿…。
26
00:04:49,039 --> 00:04:51,558
(藤九郎)野駆けや魚取りに
お誘いしても→
27
00:04:51,558 --> 00:04:56,380
一向に動かれませぬ。 されど
時政殿の館ならと ようやく。
28
00:04:56,380 --> 00:04:58,715
さあ 殿。
(時政)それは それは…。
29
00:04:58,715 --> 00:05:06,039
≪(佐々木秀義)時政殿~!
(上総常澄)参りましたぞ!
30
00:05:06,039 --> 00:05:12,713
(三浦義明)ん? そちらの御仁は?
いや その…→
31
00:05:12,713 --> 00:05:15,716
私の縁者の者にござりましてな。
32
00:05:15,716 --> 00:05:20,537
ほう~ 時政殿の縁者とは思えぬ
端正な顔つきじゃなあ。→
33
00:05:20,537 --> 00:05:26,237
まあまあ さあ 上がれ 上がれ。
共に 酒でも飲もうぞ。
34
00:05:29,713 --> 00:05:35,719
(義明)清盛入道様が
福原に移り住まれたそうじゃ。
35
00:05:35,719 --> 00:05:38,372
(常澄)すると 平家の棟梁は?
36
00:05:38,372 --> 00:05:42,709
ご嫡男の大納言 重盛様じゃ。
37
00:05:42,709 --> 00:05:45,379
(源頼朝)<こうして 私は 時折→
38
00:05:45,379 --> 00:05:50,701
聞きたくもない
清盛の噂を聞くはめになった。→
39
00:05:50,701 --> 00:05:54,538
一方 福原の清盛は→
40
00:05:54,538 --> 00:06:00,043
着々と
新たな国づくりを進めていた>
41
00:06:00,043 --> 00:06:04,364
(平盛国)
平相国入道清盛様にござる。
42
00:06:04,364 --> 00:06:07,364
周新にござります。
43
00:06:10,053 --> 00:06:14,753
周新殿 ひさかたぶりじゃな。
44
00:06:18,045 --> 00:06:21,031
多謝。
いつもありがとうございます。
45
00:06:21,031 --> 00:06:24,051
平氏は 高く たくさん
買うてくれるゆえ→
46
00:06:24,051 --> 00:06:27,054
皆 喜んでおります。
47
00:06:27,054 --> 00:06:32,042
あやや! 清盛様とは
清盛様でござりましたか。
48
00:06:32,042 --> 00:06:35,879
博多よりの長旅
老いの身には こたえたであろう。
49
00:06:35,879 --> 00:06:38,079
まあ 茶でも飲むがよい。
50
00:06:40,951 --> 00:06:44,221
はるばる来てもろうたは
ほかでもない。
51
00:06:44,221 --> 00:06:47,374
これまで 大宰府で行っておった
取り引き→
52
00:06:47,374 --> 00:06:52,196
今後は この福原で行いたい。
あやや!
53
00:06:52,196 --> 00:06:56,867
ついては 宋の しかるべき人に
取り次いでもらいたい。
54
00:06:56,867 --> 00:07:00,704
あややや~!
誰ぞ いてはるやろ。
55
00:07:00,704 --> 00:07:09,363
宋の朝廷に 他国との
商いの元締めが。 会わせろ。
56
00:07:09,363 --> 00:07:13,033
おそれながら そのようなお人には
入道様といえど→
57
00:07:13,033 --> 00:07:16,536
そう やすやすとは…。
そこを なんとか!
58
00:07:16,536 --> 00:07:19,536
(盛国)よさぬか 兎丸。
59
00:07:23,043 --> 00:07:25,545
(盛国)いきなり会えぬは道理。→
60
00:07:25,545 --> 00:07:30,367
まずは 我らより
贈り物を致しとうござりまする。
61
00:07:30,367 --> 00:07:34,705
ついては 仲立ちを願えませぬか?
62
00:07:34,705 --> 00:07:39,209
いや ですから 私如き
一介の商人に さような力は…。
63
00:07:39,209 --> 00:07:43,380
(盛国)間に 一人 二人 挟めば…。
もうよい 盛国。
64
00:07:43,380 --> 00:07:46,383
ほかにも 船商人はおる。
65
00:07:46,383 --> 00:07:50,387
長年にわたる周新殿との
取り引きを取りやめ→
66
00:07:50,387 --> 00:07:57,060
よそへ頼めばよい事じゃ。
そ… そればかりは 何とぞ!
67
00:07:57,060 --> 00:08:05,202
♪♪~
68
00:08:05,202 --> 00:08:11,041
(盛国)さて 何を贈れば
喜ばれましょうなあ。
69
00:08:11,041 --> 00:08:15,041
兎丸 何とする?
70
00:08:17,714 --> 00:08:21,551
そりゃ 金やで。
ほう。
71
00:08:21,551 --> 00:08:27,708
平家の領地の中で
黄金を産する土地はありませぬ。
72
00:08:27,708 --> 00:08:33,046
金といえば 奥州であろう。
(兎丸)おう そやそや。
73
00:08:33,046 --> 00:08:37,718
宋よりの品も
取り引きしておるらしいのう。
74
00:08:37,718 --> 00:08:40,871
(盛国)
奥州 藤原氏
三代目棟梁→
75
00:08:40,871 --> 00:08:44,875
藤原秀衡の治める
奥州 平泉は→
76
00:08:44,875 --> 00:08:49,375
京にも劣らぬ 栄えた地との事。
77
00:08:52,199 --> 00:08:56,703
(盛国)その財力 兵力は
計り知れませぬ。
78
00:08:56,703 --> 00:09:02,726
都と離れておるをよい事に
好き勝手やっておるのじゃ。
79
00:09:02,726 --> 00:09:08,026
おそれながら
殿も 人の事は言えませぬ。
80
00:09:11,385 --> 00:09:14,204
今 秀衡の官職は?
81
00:09:14,204 --> 00:09:18,904
確か 出羽と陸奥の押領使かと。
82
00:09:21,878 --> 00:09:25,378
重盛を呼べ。
(盛国)はっ。
83
00:09:26,933 --> 00:09:29,870
(平重盛)藤原秀衡を→
84
00:09:29,870 --> 00:09:34,708
鎮守府将軍に任じては
いかがでござりましょうか?
85
00:09:34,708 --> 00:09:36,710
(藤原兼実)なんと…。
86
00:09:36,710 --> 00:09:40,864
秀衡は 蝦夷地との
盛んな交易にて→
87
00:09:40,864 --> 00:09:43,717
その産物を得ております。→
88
00:09:43,717 --> 00:09:47,371
それらを すみやかに
都に届けるためには→
89
00:09:47,371 --> 00:09:54,378
秀衡に この職を与え 朝廷の威に
服させるべきと存じます。
90
00:09:54,378 --> 00:09:57,714
藤原秀衡は奥州の夷狄。
91
00:09:57,714 --> 00:10:01,701
都の目の届かぬ所で
勝手をしておる者を→
92
00:10:01,701 --> 00:10:05,205
要職に就けるなど 乱世のもとぞ。
93
00:10:05,205 --> 00:10:11,194
方々の従者の
馬に欠かせぬ あざらしの皮や→
94
00:10:11,194 --> 00:10:16,049
鳥の羽根が手に入らなくなっても
構わぬと仰せですか?
95
00:10:16,049 --> 00:10:19,202
(藤原経宗)それは困る。
96
00:10:19,202 --> 00:10:23,707
方々! いま少し
慎重に お考えあれ!
97
00:10:23,707 --> 00:10:29,713
平家の棟梁よりの にわかな献策。
98
00:10:29,713 --> 00:10:33,216
まことのねらいは 何ぞ?
99
00:10:33,216 --> 00:10:38,054
おそれながら 平家か否かは→
100
00:10:38,054 --> 00:10:40,373
関わりなき事。
さよう。
101
00:10:40,373 --> 00:10:46,379
朝廷とは 国を豊かにする策を
講じる場にござりましょう。
102
00:10:46,379 --> 00:10:51,379
そうであろう そうであろう。
進めるがよい。
103
00:10:59,042 --> 00:11:03,046
<嘉応2年5月25日。→
104
00:11:03,046 --> 00:11:11,371
朝廷は 藤原秀衡を
従五位下 鎮守府将軍に任じた>
105
00:11:11,371 --> 00:11:19,212
父上。 此度は要職へのご就任
おめでとうござります。
106
00:11:19,212 --> 00:11:24,712
さて どなたの ご推挙か…。
107
00:11:30,040 --> 00:11:33,376
これらは都に贈るゆえ
運び上げよ。
108
00:11:33,376 --> 00:11:36,046
(一同)承知。
109
00:11:36,046 --> 00:11:38,715
<朝廷を動かす事によって→
110
00:11:38,715 --> 00:11:44,204
清盛は 奥州より 金や
さまざまな財宝を手に入れた>
111
00:11:44,204 --> 00:11:49,709
よ~し。 これらを貢ぎ物として
宋の朝廷に贈る。
112
00:11:49,709 --> 00:11:55,215
再び周新を呼び 事を進めよ。
よっしゃ。 任せとけ。
113
00:11:55,215 --> 00:12:00,415
おい お前ら 忙しくなるで!
(一同)おう!
114
00:12:04,724 --> 00:12:08,044
張り切っておりまするな。
うむ。
115
00:12:08,044 --> 00:12:12,382
あやつと海賊船で出会うてから
40年近い。
116
00:12:12,382 --> 00:12:17,454
ようやく
面白き事ができておるのじゃ。
117
00:12:17,454 --> 00:12:23,710
さて 次は いかにして
宋の使者を招くかじゃな。
118
00:12:23,710 --> 00:12:30,050
国と国との取り引きでなければ
おいで下さりますまい。
119
00:12:30,050 --> 00:12:33,703
法皇様に
お出まし頂くという事か。
120
00:12:33,703 --> 00:12:38,725
さすれば 体裁は整いましょう。
…が 王家のお方が→
121
00:12:38,725 --> 00:12:42,862
宋人と じきじきに
お会いになるなど異例の事。
122
00:12:42,862 --> 00:12:47,717
何より そう たやすく
わしの誘いに乗るまい。
123
00:12:47,717 --> 00:12:51,054
(平時忠)たやすい事にござります。
124
00:12:51,054 --> 00:12:55,375
これは 時忠様。
にわかに いかがなされました?
125
00:12:55,375 --> 00:13:03,199
今は 官職を解かれておる身。
都におっても やる事などないわ。
126
00:13:03,199 --> 00:13:07,704
何じゃ? これは。
(盛国)鳥の羽根にござりまする。→
127
00:13:07,704 --> 00:13:11,041
宋より 奥州に届いたものに
ござりましょう。
128
00:13:11,041 --> 00:13:14,044
ほう 珍しいものじゃな。
129
00:13:14,044 --> 00:13:19,382
時忠。 たやすいとは
いかなる事じゃ?
130
00:13:19,382 --> 00:13:25,682
その話にござりましたな。
まあ 私に お任せあれ。
131
00:13:33,713 --> 00:13:37,033
(西光)何用あって参った?→
132
00:13:37,033 --> 00:13:41,037
おおかた
清盛入道の使いであろう。
133
00:13:41,037 --> 00:13:43,390
入道に伝えよ。
134
00:13:43,390 --> 00:13:46,209
せんだっての
強訴での振る舞い→
135
00:13:46,209 --> 00:13:49,879
法皇様は いまだ
腹に落ちた訳ではない。
136
00:13:49,879 --> 00:13:55,179
その節は まこと
ご心労を おかけ致しました。
137
00:14:02,042 --> 00:14:04,210
それは 何ぞ?
138
00:14:04,210 --> 00:14:06,713
おお これですか。
139
00:14:06,713 --> 00:14:12,369
これは 宋より取り寄せた
鳥の羽根にござります。
140
00:14:12,369 --> 00:14:17,374
鳥じゃと?
さように赤い鳥が おるものか。
141
00:14:17,374 --> 00:14:20,543
いや~ おりまする。
142
00:14:20,543 --> 00:14:23,380
それ このように。
143
00:14:23,380 --> 00:14:54,210
♪♪~
144
00:14:54,210 --> 00:15:01,051
これは 一体 何という鳥じゃ?
さて 失念致しましたが→
145
00:15:01,051 --> 00:15:06,556
近く 福原に
宋人を招くそうにござります。
146
00:15:06,556 --> 00:15:09,042
いかがにござりましょう?
147
00:15:09,042 --> 00:15:14,881
法皇様も ご引見の上
じきじきに 話を聞かれてみては?
148
00:15:14,881 --> 00:15:19,052
面白や。 会うてみたい。
149
00:15:19,052 --> 00:15:21,037
なりませぬ!
150
00:15:21,037 --> 00:15:24,708
治天の君が 宋人に
じかに会われるなど 異例の事。
151
00:15:24,708 --> 00:15:28,044
なればこそ
朕が その先駆けとなる。
152
00:15:28,044 --> 00:15:30,213
(成親)きっと 朝廷が
黙ってはいますまい。
153
00:15:30,213 --> 00:15:32,215
なればこそ参る!
154
00:15:32,215 --> 00:15:36,515
その前に 何ぞ
清盛入道の たくらみ事のはず。
155
00:15:42,275 --> 00:15:49,215
なればこそ… 面白いのじゃ。
156
00:15:49,215 --> 00:15:53,219
(時忠)さすがは 法皇様。
157
00:15:53,219 --> 00:15:55,719
では 早速 手はずを。
158
00:16:02,045 --> 00:16:07,550
全く
口から生まれてきたような奴じゃ。
159
00:16:07,550 --> 00:16:12,372
敵に回したくはありませぬな。
160
00:16:12,372 --> 00:16:15,375
早速 周新に伝えよ。
161
00:16:15,375 --> 00:16:21,047
我らは 国と国の取り引きとして
宋の使者に会う用意があると。
162
00:16:21,047 --> 00:16:23,047
はっ。
163
00:16:27,370 --> 00:16:32,392
(基房)入道めが
福原に潜んだふりをして→
164
00:16:32,392 --> 00:16:38,715
朝廷を思うままに操るなど
こざかしいまねを。
165
00:16:38,715 --> 00:16:42,385
こそこそと
何をしておられるのでしょう。
166
00:16:42,385 --> 00:16:47,040
そもそも
交易で国を豊かにしようなど→
167
00:16:47,040 --> 00:16:52,045
成り上がり者の卑しい考え。
168
00:16:52,045 --> 00:16:57,345
何としても
たたきつぶしてくれるわ。
169
00:17:11,214 --> 00:17:15,718
(伊藤忠清)維盛様も資盛様も
鍛えようが まるで足りませぬ。
170
00:17:15,718 --> 00:17:19,706
それでは 武士の名門
平家の男子など名乗れませぬぞ。
171
00:17:19,706 --> 00:17:22,375
弓などできずとも
生きてゆけよう。
172
00:17:22,375 --> 00:17:26,375
私も 舞や糸竹の芸の方が
好みじゃ。
173
00:17:30,049 --> 00:17:32,702
維盛 資盛。
174
00:17:32,702 --> 00:17:35,205
父上。
175
00:17:35,205 --> 00:17:40,210
我らは 内裏大番役という
大きな役目を仰せつかっておる。
176
00:17:40,210 --> 00:17:43,713
いざという時に働けるよう
しかと鍛えよ。
177
00:17:43,713 --> 00:17:47,717
(2人)はい。
弓と馬の修練がすんだら→
178
00:17:47,717 --> 00:17:52,717
貞能のもとへ参れ。
読み書きを教えてくれよう。
179
00:18:23,202 --> 00:18:25,202
(経子)殿。
180
00:18:29,709 --> 00:18:35,715
経子。 すまぬ 起こしたか?
181
00:18:35,715 --> 00:18:37,715
いえ。
182
00:18:43,373 --> 00:18:48,373
かような夜更けまで
何を お読みです?
183
00:18:52,382 --> 00:18:57,682
西光殿に お借りした
「本朝世紀」の写しじゃ。
184
00:19:02,375 --> 00:19:08,381
この国の歴史が よう分かる。
185
00:19:08,381 --> 00:19:11,534
父上の留守を預かる上は→
186
00:19:11,534 --> 00:19:15,705
身につけねばならぬ事が
山ほどある。
187
00:19:15,705 --> 00:19:33,389
♪♪~
188
00:19:33,389 --> 00:19:39,212
おい。 そなたまで
つきあう事はない。
189
00:19:39,212 --> 00:19:42,715
知りとうございます。
190
00:19:42,715 --> 00:19:51,874
殿が何を読み 何を考え
何をなさっておられるのか。
191
00:19:51,874 --> 00:19:57,714
ほんの僅かでも
知りとうございます。
192
00:19:57,714 --> 00:20:32,014
♪♪~
193
00:20:49,382 --> 00:20:52,719
あれは何ぞ? はやっておるのか?
194
00:20:52,719 --> 00:20:55,038
(平貞能)
時忠様が ご自分の郎党に→
195
00:20:55,038 --> 00:20:59,375
そろいで つけさせている由に
ござります。
196
00:20:59,375 --> 00:21:02,545
さようか。
197
00:21:02,545 --> 00:21:06,049
<嘉応2年7月3日。→
198
00:21:06,049 --> 00:21:12,749
事件は 清盛の孫 資盛の
鷹狩りの帰りに起きた>
199
00:21:14,373 --> 00:21:18,711
急げ。 父上と母上に
早う見せてさしあげたい。
200
00:21:18,711 --> 00:21:20,711
はい。
201
00:21:30,223 --> 00:21:34,877
降りよ!
これは 摂政様のお輿ぞ!
202
00:21:34,877 --> 00:21:38,030
何事じゃ?
203
00:21:38,030 --> 00:21:41,217
無礼にも
輿を降りようとしませぬ。
204
00:21:41,217 --> 00:21:46,205
摂政様のお輿にござります。
「降りよ」との仰せ。
205
00:21:46,205 --> 00:21:49,542
(資盛)構わぬ。 進め。
されど…。
206
00:21:49,542 --> 00:21:54,046
私は 平家の棟梁の子
相国入道の孫ぞ。→
207
00:21:54,046 --> 00:21:57,546
さあ 早う 父上 母上のもとへ。
208
00:22:00,703 --> 00:22:02,705
許すまじ。
209
00:22:02,705 --> 00:22:06,709
ははっ。
早うせぬか! 早う!
210
00:22:06,709 --> 00:22:18,054
♪♪~
211
00:22:18,054 --> 00:22:22,058
な… 何をする!? 私は平家の…。
212
00:22:22,058 --> 00:22:32,051
♪♪~
213
00:22:32,051 --> 00:22:35,555
兄上! 兄上!
214
00:22:35,555 --> 00:22:40,042
なんという事をなされました!?
知らなんだとはいえ→
215
00:22:40,042 --> 00:22:46,716
よりによって
清盛入道の御孫君の輿を…。
216
00:22:46,716 --> 00:22:52,221
知っての事じゃ。
はっ?
217
00:22:52,221 --> 00:22:59,521
平家の輿と… 知っての事じゃ。
218
00:23:01,714 --> 00:23:08,371
分際をわきまえず 国づくりに
口など出せば どうなるか→
219
00:23:08,371 --> 00:23:13,042
思い知らせてやったのだ。
220
00:23:13,042 --> 00:23:16,379
仕返しをしてきたら
どうなさるおつもりですか!?
221
00:23:16,379 --> 00:23:20,550
平家の棟梁 重盛は
入道様と違い→
222
00:23:20,550 --> 00:23:25,605
何事にも筋を通そうとする。
223
00:23:25,605 --> 00:23:29,709
きっと 何も言えまい。
224
00:23:29,709 --> 00:23:39,009
♪♪~
225
00:23:41,053 --> 00:23:44,724
父上 これは
あまりな仕打ちにござります。
226
00:23:44,724 --> 00:23:49,224
資盛のため どうか
訴え出て下さりませ!
227
00:24:00,039 --> 00:24:02,039
資盛。
228
00:24:04,710 --> 00:24:08,410
輿を降りなんだというは
まことか?
229
00:24:10,383 --> 00:24:13,703
摂政様が通られる時には→
230
00:24:13,703 --> 00:24:16,872
こちらが輿を降りて
礼をせねばならぬ。
231
00:24:16,872 --> 00:24:18,874
その事は教えたであろう?
232
00:24:18,874 --> 00:24:22,712
殿。 資盛は 私たちに
獲物を早う見せようと…。
233
00:24:22,712 --> 00:24:25,712
(重盛)さような事は聞いておらぬ。
234
00:24:33,873 --> 00:24:41,673
此度の事は 礼を欠いた資盛
そなたの過ちじゃ。
235
00:24:44,367 --> 00:24:50,873
(時子)重盛。 これは
資盛と摂政様だけの事ではない。
236
00:24:50,873 --> 00:24:56,712
我ら平家と
藤原摂関家の一大事ぞ。→
237
00:24:56,712 --> 00:24:59,215
何を臆する事はない。
238
00:24:59,215 --> 00:25:06,515
そなたは 平家の棟梁として
正々堂々と訴え出ればよいのじゃ。
239
00:25:12,044 --> 00:25:14,213
ご無礼をつかまつります。
240
00:25:14,213 --> 00:25:19,719
殿。 摂政様の使いが
参っております。
241
00:25:19,719 --> 00:25:24,719
資盛様のお輿を襲った
供奉たちの職を解くとの事。
242
00:25:30,880 --> 00:25:34,680
さような筋の事ではないと伝えよ。
243
00:25:36,702 --> 00:25:38,702
はっ。
244
00:25:40,539 --> 00:25:46,045
重盛。 殿不在の京での この辱め
黙っておるつもりか!?
245
00:25:46,045 --> 00:25:49,699
母上。
246
00:25:49,699 --> 00:25:56,038
我ら平家は 既に
5人もの公卿を出した一門。
247
00:25:56,038 --> 00:26:03,212
いかなる時も 有職故実にのっとり
動かねばなりませぬ。
248
00:26:03,212 --> 00:26:06,699
それこそが→
249
00:26:06,699 --> 00:26:11,387
父上不在の一門を
保つ事にござりましょう。
250
00:26:11,387 --> 00:26:31,040
♪♪~
251
00:26:31,040 --> 00:26:36,879
なるほど。 重盛らしい裁断じゃ。
252
00:26:36,879 --> 00:26:42,385
(盛国)まこと 公明正大。
実に正しいお考えと存じまする。
253
00:26:42,385 --> 00:26:45,204
されど いささか
正しすぎましょう。
254
00:26:45,204 --> 00:26:47,206
正しすぎるという事は→
255
00:26:47,206 --> 00:26:50,506
もはや 間違うておるに
同じにござります。
256
00:26:55,047 --> 00:27:00,119
して 時忠。
法皇様は 9月20日は?
257
00:27:00,119 --> 00:27:03,055
その日は 京にて
競べ馬がございますが→
258
00:27:03,055 --> 00:27:07,255
それが終わり次第
福原に駆けつけられると。
259
00:27:09,879 --> 00:27:12,548
宋の使者を迎える支度を急げ。
260
00:27:12,548 --> 00:27:16,048
(盛国)承知つかまつりました。
261
00:27:20,372 --> 00:27:25,044
<嘉応2年9月20日。→
262
00:27:25,044 --> 00:27:30,116
清盛は 慣例を破って
宋人を福原にまで招き→
263
00:27:30,116 --> 00:27:34,416
ついに
後白河院への謁見まで果たした>
264
00:27:48,868 --> 00:27:53,873
(盛国)宋国よりの使者
趙殿にござりまする。→
265
00:27:53,873 --> 00:27:58,173
我が国の
治天の君にござります。
266
00:28:28,707 --> 00:28:31,043
何じゃ? これは。
267
00:28:31,043 --> 00:28:34,880
孔雀の羽根との事にござります。
268
00:28:34,880 --> 00:28:40,719
孔雀? 絵では見た事があるが…。
269
00:28:40,719 --> 00:28:45,719
(西光)まこと かように
美しいものにござりますな。
270
00:28:57,052 --> 00:28:59,538
<清盛の悲願であった→
271
00:28:59,538 --> 00:29:07,038
宋との交易を要とする国づくりが
いよいよ実現しようとしていた>
272
00:29:08,864 --> 00:29:14,720
(経宗)法皇様が
会われたとは まことか?
宋人に
273
00:29:14,720 --> 00:29:17,056
恐ろしや…。
274
00:29:17,056 --> 00:29:22,545
我が朝 延喜以来
未曽有の事なり。
275
00:29:22,545 --> 00:29:26,382
まさしく 天魔の仕業。
276
00:29:26,382 --> 00:29:28,717
天魔の? あっ…。
277
00:29:28,717 --> 00:29:32,037
一体 平家は
この先 何をするつもりじゃ?
278
00:29:32,037 --> 00:29:35,708
この国は
どうなってしまうのじゃ?
279
00:29:35,708 --> 00:29:38,408
騒ぐでない。
280
00:29:42,031 --> 00:29:47,031
また 輿でも襲うてやればよい。
281
00:29:51,707 --> 00:29:55,878
どうせ 重盛は 何も言えぬ。
282
00:29:55,878 --> 00:29:59,715
相国入道が都を離れ→
283
00:29:59,715 --> 00:30:05,054
勝手な国づくりに
うつつを抜かしているうちに→
284
00:30:05,054 --> 00:30:09,875
平家の土台を
たたき壊してやるのだ!
285
00:30:09,875 --> 00:30:16,375
(笑い声)
286
00:30:18,050 --> 00:30:22,221
(盛国)やはり
法皇様と宋人との面会は→
287
00:30:22,221 --> 00:30:25,541
都を騒がしたように
ござりまするな。
288
00:30:25,541 --> 00:30:27,710
これ以上の勝手はさせぬと→
289
00:30:27,710 --> 00:30:30,362
摂政様が
いきまいておられる様子。
290
00:30:30,362 --> 00:30:36,552
もとより 交易による国づくりなど
公卿方には理解の外。
291
00:30:36,552 --> 00:30:38,537
また いつ 誰が→
292
00:30:38,537 --> 00:30:41,707
いかようにして
邪魔立てしてくるか しれませぬ。
293
00:30:41,707 --> 00:30:46,712
(時忠)この先 ますます
そういった事が増えよう。
294
00:30:46,712 --> 00:30:50,382
時忠。
はい。
295
00:30:50,382 --> 00:30:53,052
わしは これより先→
296
00:30:53,052 --> 00:30:59,041
いよいよ 国づくりに
本腰を入れねばならぬ。
はい。
297
00:30:59,041 --> 00:31:02,741
都に憂いを残しとうないのだ。
298
00:31:06,715 --> 00:31:09,201
わしが この福原で→
299
00:31:09,201 --> 00:31:12,871
新しき国づくりに
いそしめるよう→
300
00:31:12,871 --> 00:31:18,394
そなたは 都で
そなたのつとめを果たしてくれ。
301
00:31:18,394 --> 00:31:32,374
♪♪~
302
00:31:32,374 --> 00:31:34,376
(平知盛)まこと このまま→
303
00:31:34,376 --> 00:31:38,714
仕返しも何もせぬおつもりに
ござりますか?
304
00:31:38,714 --> 00:31:45,871
摂政様は その後 更に
随身ら7名の任を解かれたと聞く。
305
00:31:45,871 --> 00:31:49,875
先方が 既に
筋を通しておられるのだ。
306
00:31:49,875 --> 00:31:52,875
この上 何をする事がある?
307
00:31:56,365 --> 00:32:06,225
おそれながら 兄上は
お怒りにならぬのですか?
308
00:32:06,225 --> 00:32:13,425
ほかでもない 兄上のお子が
辱められたのですぞ!
309
00:32:18,704 --> 00:32:26,044
棟梁たるもの 私心によって
物事を決める訳にはまいらぬ。
310
00:32:26,044 --> 00:33:28,557
♪♪~
311
00:33:28,557 --> 00:33:30,757
(徳子)母上。
312
00:33:33,045 --> 00:33:37,045
ああ 徳子。 何ぞ用か?
313
00:33:41,053 --> 00:33:48,053
琵琶の稽古の刻限にござります。
そうであったな。
314
00:33:50,546 --> 00:33:55,617
<それは 10月21日に起きた>
315
00:33:55,617 --> 00:34:11,383
♪♪~(基房の歌声)
316
00:34:11,383 --> 00:34:16,872
♪♪~(琵琶)
317
00:34:16,872 --> 00:34:18,874
さあ 弾いてみよ。
318
00:34:18,874 --> 00:34:21,074
はい。
319
00:34:24,213 --> 00:34:59,214
♪♪~
320
00:34:59,214 --> 00:35:01,414
≪何者だ!?
321
00:35:03,385 --> 00:35:05,370
何じゃ!?
322
00:35:05,370 --> 00:35:26,375
♪♪~
323
00:35:26,375 --> 00:35:32,047
お… お許しを! お許しを!
324
00:35:32,047 --> 00:35:45,210
♪♪~
325
00:35:45,210 --> 00:35:48,046
何事にござりますか?
326
00:35:48,046 --> 00:35:57,539
♪♪~
327
00:35:57,539 --> 00:35:59,558
(重盛)いかがされました?
328
00:35:59,558 --> 00:36:03,028
申し訳ござらぬ!
申し訳ござらぬ!
329
00:36:03,028 --> 00:36:05,028
摂政様?
330
00:36:11,703 --> 00:36:15,374
そなたではないと申すか?
331
00:36:15,374 --> 00:36:17,376
はっ?
332
00:36:17,376 --> 00:36:29,376
♪♪~
333
00:36:59,701 --> 00:37:39,725
♪♪~
334
00:37:39,725 --> 00:37:41,725
(維盛)父上。
335
00:37:43,712 --> 00:37:49,712
(平重衡)兄上 お帰りなさいませ。
(一同)お帰りなさいませ。
336
00:37:53,371 --> 00:37:56,725
お見それ致しました 兄上。
337
00:37:56,725 --> 00:37:59,711
三月 沈黙したあとでの
果断なご処置→
338
00:37:59,711 --> 00:38:03,711
摂政様も かえって
恐れをなした事でしょう。
339
00:38:12,874 --> 00:38:17,529
父上 ありがとうござりました。
340
00:38:17,529 --> 00:38:23,051
けがも治って よかったのう。
まこと よかった。
341
00:38:23,051 --> 00:38:54,051
♪♪~
342
00:39:42,047 --> 00:39:44,216
ああ~っ!
343
00:39:44,216 --> 00:39:48,537
ああ~っ! ああ~っ!
344
00:39:48,537 --> 00:39:51,039
殿! 殿!
345
00:39:51,039 --> 00:39:53,875
ああ~っ! ああ~っ!
346
00:39:53,875 --> 00:39:55,875
殿!
347
00:40:00,198 --> 00:40:03,398
間違うておったと申すか…。
348
00:40:07,873 --> 00:40:14,373
私が… 間違うておったと。
349
00:40:16,715 --> 00:40:22,537
いいえ。 …いいえ!
350
00:40:22,537 --> 00:40:32,214
♪♪~
351
00:40:32,214 --> 00:40:34,214
なれぬ。
352
00:40:38,536 --> 00:40:43,208
私は…→
353
00:40:43,208 --> 00:40:46,408
父上には…。
354
00:40:49,547 --> 00:40:51,549
なれぬ!
355
00:40:51,549 --> 00:41:27,869
♪♪~
356
00:41:27,869 --> 00:41:30,872
(義明)
そうそう 存じておいでか?→
357
00:41:30,872 --> 00:41:35,710
近頃は 赤い羽根の装束の
子どもたちを 町に放ち→
358
00:41:35,710 --> 00:41:41,049
平家を 悪しざまに言う者は
容赦なく捕らえておるそうじゃ。→
359
00:41:41,049 --> 00:41:43,868
禿というそうな。
360
00:41:43,868 --> 00:41:48,039
(常澄)
今や 都は平家の天下じゃのう。
361
00:41:48,039 --> 00:41:50,542
武士の天下となったのなら→
362
00:41:50,542 --> 00:41:53,712
いま少し 我らの暮らしが
楽になるよう→
363
00:41:53,712 --> 00:41:57,365
院を お諫めして下さらぬものか。
(常澄)まこと。
364
00:41:57,365 --> 00:42:01,202
(義明)何故 かような事に
なったのかのう。
365
00:42:01,202 --> 00:42:07,726
わしが思うに やはり
あの平治の戦が過ちであった。
366
00:42:07,726 --> 00:42:11,880
亡き義朝様が
あまりに浅慮であったのだ。
367
00:42:11,880 --> 00:42:14,699
さあ 大根を もっと。
いや 結構。→
368
00:42:14,699 --> 00:42:17,385
摂津源氏の頼政殿などは→
369
00:42:17,385 --> 00:42:22,207
あの戦のさなかに それを見抜き
今や 平家の郎党。
370
00:42:22,207 --> 00:42:24,392
(時政)さあ もっと飲んで。
(秀義)あ~っ。
失礼。
371
00:42:24,392 --> 00:42:28,046
(常澄)何をしておるのじゃ。
(藤九郎)殿 帰りましょう。
372
00:42:28,046 --> 00:42:33,385
ああ~! あの時 義朝様になど
従うのではなかったわ!→
373
00:42:33,385 --> 00:42:38,385
滅びゆく源氏の棟梁になど!
(時政)秀義殿!
374
00:42:41,059 --> 00:42:43,359
源氏は滅びぬ。
375
00:42:51,386 --> 00:42:54,686
我が身は滅びても…。
376
00:42:57,375 --> 00:43:01,379
源氏の魂は 断じて滅びぬ!
377
00:43:01,379 --> 00:43:22,550
♪♪~
378
00:43:22,550 --> 00:43:27,038
平家にあらずんば 人にあらず。
379
00:43:27,038 --> 00:43:30,542
より大きいものを食うた方が勝ち。
380
00:43:30,542 --> 00:43:33,695
徳子を 帝の后に!?
381
00:43:33,695 --> 00:43:37,866
清盛様とは さように恐ろしい
お方なのでござりますか?
382
00:43:37,866 --> 00:43:41,369
命ばかりは 何とぞ!
383
00:43:41,369 --> 00:43:43,721
ちょっと やり過ぎとちゃうか?
384
00:43:43,721 --> 00:43:46,421
誰にも邪魔はさせぬ。
385
00:43:48,209 --> 00:43:52,046
<六波羅を本拠地とした平家。→
386
00:43:52,046 --> 00:43:57,368
清盛は 都の南
今の梅小路公園の辺りに→
387
00:43:57,368 --> 00:44:02,373
もう一つの拠点
西八条第を造営しました。→
388
00:44:02,373 --> 00:44:05,710
ここの主は 清盛の妻 時子で→
389
00:44:05,710 --> 00:44:08,379
福原に
移った清盛は→
390
00:44:08,379 --> 00:44:10,548
都で
問題が起こると→
391
00:44:10,548 --> 00:44:12,717
この西八条第に
入り→
392
00:44:12,717 --> 00:44:15,036
その解決に
当たりました。→
393
00:44:15,036 --> 00:44:20,542
西八条第の鎮守社とされる
若一神社は→
394
00:44:20,542 --> 00:44:23,545
清盛が太政大臣になった事から→
395
00:44:23,545 --> 00:44:28,867
開運出世の神として
あがめられています。→
396
00:44:28,867 --> 00:44:34,722
清盛が病に倒れた時に
体を冷やしたという湧き水。→
397
00:44:34,722 --> 00:44:39,377
今でも 子どもの出世を願い
産湯としても使われるなど→
398
00:44:39,377 --> 00:44:43,047
訪れる人が絶えません。→
399
00:44:43,047 --> 00:44:47,552
都での政治拠点となった
西八条第。→
400
00:44:47,552 --> 00:44:54,552
清盛の影響力は 引退後も
衰える事はなかったのです>
| {
"pile_set_name": "Github"
} |
package com.github.sommeri.less4j.compiler;
import java.io.File;
import java.util.Collection;
import org.junit.runners.Parameterized.Parameters;
import com.github.sommeri.less4j.AbstractFileBasedTest;
public class CombinationsTest extends AbstractFileBasedTest {
private static final String standardCases = "src/test/resources/compile-basic-features/combinations/";
public CombinationsTest(File inputFile, File outputFile, File errorList, File mapdataFile, File configFile, String testName) {
super(inputFile, outputFile, errorList, mapdataFile, configFile, testName);
}
@Parameters(name="Less: {5}")
public static Collection<Object[]> allTestsParameters() {
return createTestFileUtils().loadTestFiles(standardCases);
}
}
| {
"pile_set_name": "Github"
} |
/**
* Created by mauricio on 2/17/15.
*/
module.exports = [{
src: '//cdnjs.cloudflare.com/ajax/libs/jquery/2.1.1/jquery.min.js',
entryPoint: 'jQuery'
}, {
entryPoint: 'Polymer',
additionalForbiddenTokens: 'global:Polymer.elements'
}, {
entryPoint: 'd3'
}, {
displayName: 'Lo-Dash',
entryPoint: '_',
src: '//cdnjs.cloudflare.com/ajax/libs/lodash.js/2.4.1/lodash.js'
}, {
src: '//fb.me/react-0.12.2.js',
entryPoint: 'React'
}, {
src: '//cdnjs.cloudflare.com/ajax/libs/angular.js/1.2.20/angular.js',
entryPoint: 'angular',
label: 'Angular JS'
}, {
src: '//cdnjs.cloudflare.com/ajax/libs/modernizr/2.8.2/modernizr.js',
entryPoint: 'Modernizr'
}, {
src: '//cdnjs.cloudflare.com/ajax/libs/handlebars.js/1.1.2/handlebars.js',
entryPoint: 'Handlebars'
}, {
label: 'EmberJS',
src: '//cdnjs.cloudflare.com/ajax/libs/jquery/2.1.1/jquery.min.js|//cdnjs.cloudflare.com/ajax/libs/handlebars.js/1.1.2/handlebars.js|//cdnjs.cloudflare.com/ajax/libs/ember.js/1.6.1/ember.js',
entryPoint: 'Ember',
forbiddenTokens: 'global:$|global:Handlebars|pojoviz:builtIn|global:window|global:document'
}, {
src: '//cdnjs.cloudflare.com/ajax/libs/lodash.js/2.4.1/lodash.js|//cdnjs.cloudflare.com/ajax/libs/backbone.js/1.1.2/backbone.js',
entryPoint: 'Backbone'
}, {
label: 'Marionette.js',
src: '//cdnjs.cloudflare.com/ajax/libs/jquery/2.1.1/jquery.min.js|//cdnjs.cloudflare.com/ajax/libs/lodash.js/2.4.1/lodash.js|//cdnjs.cloudflare.com/ajax/libs/backbone.js/1.1.2/backbone.js|http://marionettejs.com/downloads/backbone.marionette.js',
entryPoint: 'Marionette'
}]; | {
"pile_set_name": "Github"
} |
namespace PaymentService.Api.Queries.Dtos
{
public class PolicyAccountBalanceDto
{
public string PolicyAccountNumber { get; set; }
public string PolicyNumber { get; set; }
public decimal Balance { get; set; }
}
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=0">
<meta name="apple-mobile-web-app-capable" content="yes">
<title>OpenLayers WMTS Example</title>
<link rel="stylesheet" href="../theme/default/style.css" type="text/css"/>
<link rel="stylesheet" href="style.css" type="text/css">
<script src="../lib/Firebug/firebug.js"></script>
<script src="../lib/OpenLayers.js"></script>
<script src="wmts.js"></script>
<style>
.olControlAttribution {
bottom: 5px;
}
</style>
</head>
<body onload="init();">
<h1 id="title">Web Map Tile Service (WMTS) Layer</h1>
<div id="tags">
wmts
</div>
<p id="shortdesc">
The WMTS layer allows viewing of tiles from a server implementing
the OGC Web Map Tile Service (WMTS) standard version 1.0.0.
</p>
<div id="map" class="smallmap"></div>
<div id="docs">
<p>
This example uses an OpenLayers.Layer.WMTS layer to display
cached tiles over an OSM layer in spherical mercator coordinates.
</p><p>
See the <a href="wmts.js" target="_blank">
wmts.js source</a> to see how this is done.
</p>
</div>
</body>
</html>
| {
"pile_set_name": "Github"
} |
package workload
import (
"errors"
"sync"
)
type clientState int
const (
clientStateInit clientState = iota
clientStateStarted
clientStateStopped
)
type clientStateManager struct {
sync.Mutex
state clientState
}
func newClientStateManager() *clientStateManager {
return &clientStateManager{}
}
func (s *clientStateManager) StartIfStartable() error {
s.Lock()
defer s.Unlock()
if s.state == clientStateStarted {
return errors.New("client already started")
}
if s.state == clientStateStopped {
return errors.New("client cannot start once stopped")
}
s.state = clientStateStarted
return nil
}
func (s *clientStateManager) StopIfStoppable() error {
s.Lock()
defer s.Unlock()
if s.state == clientStateInit {
return errors.New("client hasn't started")
}
if s.state == clientStateStopped {
return errors.New("client is already stopped")
}
s.state = clientStateStopped
return nil
}
| {
"pile_set_name": "Github"
} |
using System;
using VRage;
using VRage.Game.Entity;
using VRage.Library.Collections;
using VRage.Library.Utils;
using VRage.Trace;
using VRageMath;
namespace Sandbox.Game.Replication.History
{
public class MyPredictedSnapshotSyncSetup : MySnapshotSyncSetup
{
public float MaxPositionFactor;
public float MaxLinearFactor;
public float MaxRotationFactor;
public float MaxAngularFactor;
public float IterationsFactor;
}
public class MyPredictedSnapshotSync : IMySnapshotSync
{
// debug data
public static bool SetTransformCorrections = true;
public static bool SetPhysicsCorrections = true;
public static float DeltaFactor = 0.7f;
public static int SmoothTimesteps = 30;
public static bool SmoothPositionCorrection = true;
public static float MinPositionDelta = 0.05f;
public static float MaxPositionDelta = 0.5f;
public static float ReferenceLinearVelocity = 10.0f;
public static bool SmoothRotationCorrection = true;
public static float MinRotationAngle = 2.0f / 180.0f * MathHelper.Pi;
public static float MaxRotationAngle = 10.0f / 180.0f * MathHelper.Pi;
public static float ReferenceAngularVelocity = 0.5f;
public static bool SmoothLinearVelocityCorrection = true;
public static float MinLinearVelocityDelta = 0.01f;
public static float MaxLinearVelocityDelta = 4.0f;
public static bool SmoothAngularVelocityCorrection = true;
public static float MinAngularVelocityDelta = 0.01f;
public static float MinVelocityChangeToReset = 10;
private readonly MySnapshotHistory m_clientHistory = new MySnapshotHistory();
private readonly MySnapshotHistory m_receivedQueue = new MySnapshotHistory();
private int m_animDeltaLinearVelocityIterations;
private MyTimeSpan m_animDeltaLinearVelocityTimestamp;
private Vector3 m_animDeltaLinearVelocity;
private int m_animDeltaPositionIterations;
private MyTimeSpan m_animDeltaPositionTimestamp;
private Vector3D m_animDeltaPosition;
private int m_animDeltaRotationIterations;
private MyTimeSpan m_animDeltaRotationTimestamp;
private Quaternion m_animDeltaRotation;
private int m_animDeltaAngularVelocityIterations;
private MyTimeSpan m_animDeltaAngularVelocityTimestamp;
private Vector3 m_animDeltaAngularVelocity;
private readonly MyEntity m_entity;
private MySnapshot m_lastServerSnapshot;
private MyTimeSpan m_lastServerTimestamp;
private MySnapshot m_lastClientSnapshot;
private MyTimeSpan m_lastClientTimestamp;
private MySnapshot m_lastSnapshot;
private MyTimeSpan m_lastTimestamp;
private readonly string m_trackName = "cenda";
private bool m_wasReset = true;
private Vector3 m_lastServerVelocity;
private int m_stopSuspected;
public MyPredictedSnapshotSync(MyEntity entity)
{
m_entity = entity;
}
public void Update(MyTimeSpan clientTimestamp, MySnapshotSyncSetup setup)
{
// skip entities with parent
if (m_entity.Parent != null)
return;
if (m_entity.Physics == null) //trees
return;
if (m_entity.Physics.RigidBody != null && !m_entity.Physics.RigidBody.IsActive)
return;
VRage.Profiler.ProfilerShort.Begin("Sync Predicted" + m_entity.DisplayName);
UpdatePrediction(clientTimestamp, setup);
VRage.Profiler.ProfilerShort.End();
}
public void UpdatePrediction(MyTimeSpan clientTimestamp, MySnapshotSyncSetup setup)
{
var currentSnapshot = new MySnapshot(m_entity);
var tmpSnapshot = currentSnapshot;
m_clientHistory.Add(currentSnapshot, clientTimestamp);
var serverTmpSnapshot = m_receivedQueue.GetItem(clientTimestamp);
var timeDelta = (float)(m_lastServerTimestamp - serverTmpSnapshot.Timestamp).Seconds;
var predictedSetup = setup as MyPredictedSnapshotSyncSetup;
var serverDeltaItem = UpdateFromServerQueue(clientTimestamp, predictedSetup);
bool animated = m_animDeltaPositionIterations > 0 || m_animDeltaLinearVelocityIterations > 0 || m_animDeltaRotationIterations > 0 ||
m_animDeltaAngularVelocityIterations > 0;
bool applySnapshot = false;
if (serverDeltaItem.Valid)
{
currentSnapshot.Add(serverDeltaItem.Snapshot);
m_clientHistory.ApplyDelta(serverDeltaItem.Timestamp, serverDeltaItem.Snapshot);
applySnapshot = true;
}
if (animated)
{
if (m_animDeltaPositionIterations > 0)
{
m_clientHistory.ApplyDeltaPosition(m_animDeltaPositionTimestamp, m_animDeltaPosition);
currentSnapshot.Position += m_animDeltaPosition;
m_animDeltaPositionIterations--;
}
if (m_animDeltaLinearVelocityIterations > 0)
{
m_clientHistory.ApplyDeltaLinearVelocity(m_animDeltaLinearVelocityTimestamp, m_animDeltaLinearVelocity);
currentSnapshot.LinearVelocity += m_animDeltaLinearVelocity;
m_animDeltaLinearVelocityIterations--;
}
if (m_animDeltaAngularVelocityIterations > 0)
{
m_clientHistory.ApplyDeltaAngularVelocity(m_animDeltaAngularVelocityTimestamp, m_animDeltaAngularVelocity);
currentSnapshot.AngularVelocity += m_animDeltaAngularVelocity;
m_animDeltaAngularVelocityIterations--;
}
if (m_animDeltaRotationIterations > 0)
{
m_clientHistory.ApplyDeltaRotation(m_animDeltaRotationTimestamp, m_animDeltaRotation);
currentSnapshot.Rotation = currentSnapshot.Rotation * Quaternion.Inverse(m_animDeltaRotation);
currentSnapshot.Rotation.Normalize();
m_animDeltaRotationIterations--;
}
applySnapshot = true;
}
if (applySnapshot)
{
currentSnapshot.Apply(m_entity, setup.ApplyRotation, setup.ApplyPhysics, serverDeltaItem.Type == MySnapshotHistory.SnapshotType.Reset);
}
//if (MyCompilationSymbols.EnableNetworkClientUpdateTracking)
{
if (m_entity.DisplayName.Contains("dicykal") && (serverDeltaItem.Valid || animated))
{
var velocity = (serverTmpSnapshot.Snapshot.Position - m_lastServerSnapshot.Position) / timeDelta;
m_lastServerSnapshot = serverTmpSnapshot.Snapshot;
var clientTimeDelta = (float)(m_lastClientTimestamp - clientTimestamp).Seconds;
var clientVelocity = (currentSnapshot.Position - m_lastClientSnapshot.Position) / clientTimeDelta;
m_lastClientSnapshot = currentSnapshot;
m_lastClientTimestamp = clientTimestamp;
VRage.Trace.MyTrace.Send(VRage.Trace.TraceWindow.MPositions3, m_entity.DisplayName + ": " +
tmpSnapshot + " / " + currentSnapshot + " / " + serverTmpSnapshot.Snapshot + "; cvel " + velocity + " / " + clientVelocity + "; " +
serverDeltaItem.Valid + ", " + animated + "; " + m_animDeltaPosition * 60);
/*var delta = tmpSnapshot.Diff(currentSnapshot);
var posLen = delta.Position.Length();
var lenLV = delta.LinearVelocity.Length();
var sdPosLen = serverDeltaItem.Snapshot.Position.Length();
var aPosLen = m_animDeltaPosition.Length();
VRage.Trace.MyTrace.Send(VRage.Trace.TraceWindow.MPositions3, m_entity.DisplayName +
": pos " + delta.Position + " / " + posLen + " lv " + tmpSnapshot.LinearVelocity + " / " + lenLV +
"; sdelta " + serverDeltaItem.Valid + " pos " + serverDeltaItem.Snapshot.Position + " / " + sdPosLen +
" anim " + m_animDeltaPositionIterations + " pos " + m_animDeltaPosition + " / " + aPosLen);*/
}
}
}
public void Write(BitStream stream)
{
var snapshot = new MySnapshot(m_entity);
snapshot.Write(stream);
}
public void Read(BitStream stream, MyTimeSpan timeStamp)
{
var snapshot = new MySnapshot(stream);
if (m_entity.Parent == null && m_entity.Physics != null)
{
if (m_entity.Physics.IsInWorld && m_entity.Physics.RigidBody != null && !m_entity.Physics.RigidBody.IsActive && snapshot.Active)
m_entity.Physics.RigidBody.Activate();
if (m_entity.Physics.RigidBody == null || m_entity.Physics.RigidBody.IsActive)
m_receivedQueue.Add(snapshot, timeStamp);
}
}
public void Reset()
{
m_clientHistory.Reset();
m_animDeltaRotationIterations = m_animDeltaLinearVelocityIterations =
m_animDeltaPositionIterations = m_animDeltaAngularVelocityIterations = 0;
m_lastServerVelocity = Vector3D.PositiveInfinity;
m_wasReset = true;
}
private MySnapshotHistory.MyItem UpdateFromServerQueue(MyTimeSpan clientTimestamp, MyPredictedSnapshotSyncSetup setup)
{
bool recalc = false;
var serverItem = m_receivedQueue.GetItem(clientTimestamp);
if (serverItem.Valid)
{
if (serverItem.Timestamp != m_lastServerTimestamp)
{
var item = m_clientHistory.Get(serverItem.Timestamp, MyTimeSpan.Zero);
if (item.Valid && (item.Type == MySnapshotHistory.SnapshotType.Exact || item.Type == MySnapshotHistory.SnapshotType.Interpolation))
{
m_lastServerTimestamp = serverItem.Timestamp;
m_receivedQueue.Prune(clientTimestamp, MyTimeSpan.Zero, 3);
m_clientHistory.Prune(serverItem.Timestamp, MyTimeSpan.Zero, 10);
MySnapshot delta;
if (!serverItem.Snapshot.Active && !setup.IsControlled)
{
var currentSnapshot = new MySnapshot(m_entity);
delta = serverItem.Snapshot.Diff(currentSnapshot);
Reset();
}
else delta = serverItem.Snapshot.Diff(item.Snapshot);
if (m_lastServerVelocity.IsValid())
{
var deltaVelocity = serverItem.Snapshot.LinearVelocity - m_lastServerVelocity;
m_lastServerVelocity = serverItem.Snapshot.LinearVelocity;
var deltaVelocityLengthSqr = deltaVelocity.LengthSquared();
if (m_stopSuspected > 0)
{
var currentSnapshot = new MySnapshot(m_entity);
var maxVelocityDeltaSqr = (MinVelocityChangeToReset / 2) * (MinVelocityChangeToReset / 2);
if ((serverItem.Snapshot.LinearVelocity - currentSnapshot.LinearVelocity).LengthSquared() > maxVelocityDeltaSqr)
{
Reset();
delta = serverItem.Snapshot.Diff(currentSnapshot);
m_stopSuspected = 0;
}
}
if (deltaVelocityLengthSqr > (MinVelocityChangeToReset * MinVelocityChangeToReset))
{
m_stopSuspected = 10;
if (MyCompilationSymbols.EnableNetworkPositionTracking)
VRage.Trace.MyTrace.Send(VRage.Trace.TraceWindow.MPositions3, "!!!!!!!!!!!!!!!!!!! sudden server velocity change (" + m_entity.DisplayName + "): " + Math.Sqrt(deltaVelocityLengthSqr));
}
else if (m_stopSuspected > 0) m_stopSuspected--;
}
else m_lastServerVelocity = serverItem.Snapshot.LinearVelocity;
if (m_wasReset)
{
delta.Position += serverItem.Snapshot.LinearVelocity * (float)(clientTimestamp - serverItem.Timestamp).Seconds;
m_wasReset = false;
serverItem.Snapshot = delta;
serverItem.Type = MySnapshotHistory.SnapshotType.Reset;
return serverItem;
}
/*{
var lvsqr = delta.LinearVelocity.LengthSquared();
var maxLinearFactorSqr = setup.MaxLinearFactor * setup.MaxLinearFactor;
var maxLinVel = MaxLinearVelocityDelta * MaxLinearVelocityDelta * maxLinearFactorSqr;
if (lvsqr > maxLinVel)
{
var similarItem = m_clientHistory.GetSimilar(serverItem.Timestamp, serverItem.Snapshot.LinearVelocity);
if (similarItem.Valid)
{
var newDelta = serverItem.Snapshot.LinearVelocity - similarItem.Snapshot.LinearVelocity;
if (newDelta.LengthSquared() < maxLinVel)
{
if (MyCompilationSymbols.EnableNetworkClientControlledTracking)
if (m_entity.DisplayName.Contains(m_trackName))
VRage.Trace.MyTrace.Send(VRage.Trace.TraceWindow.MPositions3,
m_entity.DisplayName + ": old " + newDelta + " new " + delta.LinearVelocity +
" ------------------------------------------" +
serverItem.Snapshot + " >> " + item.Snapshot + " >> " + similarItem.Snapshot);
delta.LinearVelocity = Vector3.Zero;
}
}
}
}*/
var serverAngVelSqr = serverItem.Snapshot.AngularVelocity.LengthSquared();
bool anyAngVel = serverAngVelSqr > 0.001f;
var minLinearVelocityFactor = Math.Max(Math.Min(serverItem.Snapshot.LinearVelocity.LengthSquared() /
(ReferenceLinearVelocity * ReferenceLinearVelocity), 1.0f), 0.01f);
var minAngularVelocityFactor = Math.Max(Math.Min(serverAngVelSqr /
(ReferenceAngularVelocity * ReferenceAngularVelocity), 1.0f), 0.01f);
int iterations = (int)(SmoothTimesteps * setup.IterationsFactor);
var maxFactorSqr = setup.MaxPositionFactor * setup.MaxPositionFactor;
var anyDelta = setup.ApplyPhysics && setup.ApplyRotation && delta.AngularVelocity.LengthSquared() > 0.00001f;
// position
{
var psqr = delta.Position.LengthSquared();
if (psqr > MaxPositionDelta * MaxPositionDelta * maxFactorSqr)
{
var dir = delta.Position;
var length = dir.Normalize();
var newLength = length - MaxPositionDelta * (1.0f - DeltaFactor);
delta.Position = dir * newLength;
anyDelta = true;
m_animDeltaPositionIterations = 0;
}
else if (!SmoothPositionCorrection)
{
delta.Position *= DeltaFactor;
anyDelta = true;
m_animDeltaPositionIterations = 0;
}
else
{
if (psqr > MinPositionDelta * MinPositionDelta * minLinearVelocityFactor)
m_animDeltaPositionIterations = iterations;
if (m_animDeltaPositionIterations > 0)
{
m_animDeltaPosition = delta.Position / m_animDeltaPositionIterations;
m_animDeltaPositionTimestamp = serverItem.Timestamp;
}
delta.Position = Vector3D.Zero;
}
}
// rotation
if (setup.ApplyRotation)
{
Vector3 axis;
float angle;
delta.Rotation.GetAxisAngle(out axis, out angle);
if (angle > MathHelper.Pi)
{
axis = -axis;
angle = 2 * MathHelper.Pi - angle;
}
if (angle > MaxRotationAngle * setup.MaxRotationFactor)
{
delta.Rotation = Quaternion.CreateFromAxisAngle(axis, angle - MaxRotationAngle * (1.0f - DeltaFactor));
anyDelta = true;
m_animDeltaRotationIterations = 0;
}
else if (!SmoothRotationCorrection)
{
delta.Rotation = Quaternion.CreateFromAxisAngle(axis, angle * DeltaFactor);
anyDelta = true;
m_animDeltaRotationIterations = 0;
}
else
{
if (angle > MinRotationAngle * minAngularVelocityFactor)
m_animDeltaRotationIterations = iterations;
if (m_animDeltaRotationIterations > 0)
{
m_animDeltaRotation = Quaternion.CreateFromAxisAngle(axis, angle / m_animDeltaRotationIterations);
m_animDeltaRotationTimestamp = serverItem.Timestamp;
}
delta.Rotation = Quaternion.Identity;
}
}
if (setup.ApplyPhysics)
{
// linear velocity
{
var lvsqr = delta.LinearVelocity.LengthSquared();
if (!SmoothLinearVelocityCorrection)
{
delta.LinearVelocity *= DeltaFactor;
anyDelta = true;
m_animDeltaLinearVelocityIterations = 0;
}
else
{
if (lvsqr > MinLinearVelocityDelta * MinLinearVelocityDelta)
m_animDeltaLinearVelocityIterations = iterations;
if (m_animDeltaLinearVelocityIterations > 0)
{
m_animDeltaLinearVelocity = delta.LinearVelocity * DeltaFactor / m_animDeltaLinearVelocityIterations;
m_animDeltaLinearVelocityTimestamp = serverItem.Timestamp;
}
delta.LinearVelocity = Vector3.Zero;
}
}
// angular velocity
{
var lvsqr = delta.AngularVelocity.LengthSquared();
if (!SmoothAngularVelocityCorrection)
{
delta.AngularVelocity *= DeltaFactor;
anyDelta = true;
m_animDeltaAngularVelocityIterations = 0;
}
else
{
if (lvsqr > MinAngularVelocityDelta * MinAngularVelocityDelta)
m_animDeltaAngularVelocityIterations = iterations;
if (m_animDeltaAngularVelocityIterations > 0)
{
m_animDeltaAngularVelocity = delta.AngularVelocity * DeltaFactor / m_animDeltaAngularVelocityIterations;
m_animDeltaAngularVelocityTimestamp = serverItem.Timestamp;
}
delta.AngularVelocity = Vector3.Zero;
}
}
}
if (!SetTransformCorrections)
{
delta.Position = Vector3D.Zero;
delta.Rotation = Quaternion.Identity;
m_animDeltaPositionIterations = m_animDeltaRotationIterations = 0;
}
if (!SetPhysicsCorrections)
{
delta.LinearVelocity = Vector3.Zero;
delta.AngularVelocity = Vector3.Zero;
m_animDeltaLinearVelocityIterations = m_animDeltaAngularVelocityIterations = 0;
}
if (anyDelta)
{
if (MyCompilationSymbols.EnableNetworkPositionTracking)
if (m_entity.DisplayName.Contains(m_trackName))
VRage.Trace.MyTrace.Send(VRage.Trace.TraceWindow.MPositions3,
m_entity.DisplayName + ": " + serverItem.Snapshot + " >> " + item.Snapshot + " >> " + delta);
}
serverItem.Snapshot = delta;
serverItem.Valid = anyDelta;
}
else
{
serverItem.Valid = false;
recalc = m_wasReset;
if (!m_wasReset && MyCompilationSymbols.EnableNetworkPositionTracking)
MyTrace.Send(TraceWindow.MPositions3,
"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! " + m_entity.DisplayName + ": " + item.Type);
}
}
else
{
serverItem.Valid = false;
recalc = m_wasReset;
}
}
else
{
if (!m_receivedQueue.Empty())
recalc = true;
m_clientHistory.Prune(clientTimestamp, MyTimeSpan.FromMilliseconds(1500));
}
if (recalc)
{
serverItem = m_receivedQueue.Get(clientTimestamp, MyTimeSpan.Zero);
if (serverItem.Valid && serverItem.Type == MySnapshotHistory.SnapshotType.Exact ||
serverItem.Type == MySnapshotHistory.SnapshotType.Interpolation ||
serverItem.Type == MySnapshotHistory.SnapshotType.Extrapolation)
{
var currentSnapshot = new MySnapshot(m_entity);
var delta = serverItem.Snapshot.Diff(currentSnapshot);
serverItem.Valid = true;
serverItem.Snapshot = delta;
serverItem.Type = MySnapshotHistory.SnapshotType.Reset;
return serverItem;
}
else
{
serverItem.Valid = false;
if (MyCompilationSymbols.EnableNetworkPositionTracking)
MyTrace.Send(TraceWindow.MPositions3,
"------------------------------------------- " + m_entity.DisplayName + ": " +
m_receivedQueue.ToStringTimestamps() + " / " + serverItem.Type);
}
}
return serverItem;
}
}
} | {
"pile_set_name": "Github"
} |
include "llvm/TableGen/SearchableTable.td"
include "llvm/IR/Intrinsics.td"
def AMDGPUImageDMaskIntrinsicTable : GenericTable {
let FilterClass = "AMDGPUImageDMaskIntrinsic";
let Fields = ["Intr"];
let PrimaryKey = ["Intr"];
let PrimaryKeyName = "getAMDGPUImageDMaskIntrinsic";
let PrimaryKeyEarlyOut = 1;
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.7.0_181) on Sat Nov 17 00:48:03 UTC 2018 -->
<title>org.opencv.features2d</title>
<meta name="date" content="2018-11-17">
<link rel="stylesheet" type="text/css" href="../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="org.opencv.features2d";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../overview-summary.html">Overview</a></li>
<li class="navBarCell1Rev">Package</li>
<li>Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../index-all.html">Index</a></li>
<li><a href="../../../help-doc.html">Help</a></li>
</ul>
<div class="aboutLanguage"><em>OpenCV 3.4.4</em></div>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../org/opencv/face/package-summary.html">Prev Package</a></li>
<li><a href="../../../org/opencv/img_hash/package-summary.html">Next Package</a></li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?org/opencv/features2d/package-summary.html" target="_top">Frames</a></li>
<li><a href="package-summary.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h1 title="Package" class="title">Package org.opencv.features2d</h1>
</div>
<div class="contentContainer">
<ul class="blockList">
<li class="blockList">
<table class="packageSummary" border="0" cellpadding="3" cellspacing="0" summary="Class Summary table, listing classes, and an explanation">
<caption><span>Class Summary</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Class</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="../../../org/opencv/features2d/AgastFeatureDetector.html" title="class in org.opencv.features2d">AgastFeatureDetector</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="../../../org/opencv/features2d/AKAZE.html" title="class in org.opencv.features2d">AKAZE</a></td>
<td class="colLast"> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="../../../org/opencv/features2d/BFMatcher.html" title="class in org.opencv.features2d">BFMatcher</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="../../../org/opencv/features2d/BOWImgDescriptorExtractor.html" title="class in org.opencv.features2d">BOWImgDescriptorExtractor</a></td>
<td class="colLast"> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="../../../org/opencv/features2d/BOWKMeansTrainer.html" title="class in org.opencv.features2d">BOWKMeansTrainer</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="../../../org/opencv/features2d/BOWTrainer.html" title="class in org.opencv.features2d">BOWTrainer</a></td>
<td class="colLast"> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="../../../org/opencv/features2d/BRISK.html" title="class in org.opencv.features2d">BRISK</a></td>
<td class="colLast"> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="../../../org/opencv/features2d/DescriptorMatcher.html" title="class in org.opencv.features2d">DescriptorMatcher</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="../../../org/opencv/features2d/FastFeatureDetector.html" title="class in org.opencv.features2d">FastFeatureDetector</a></td>
<td class="colLast"> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="../../../org/opencv/features2d/Feature2D.html" title="class in org.opencv.features2d">Feature2D</a></td>
<td class="colLast"> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="../../../org/opencv/features2d/Features2d.html" title="class in org.opencv.features2d">Features2d</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="../../../org/opencv/features2d/FlannBasedMatcher.html" title="class in org.opencv.features2d">FlannBasedMatcher</a></td>
<td class="colLast"> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="../../../org/opencv/features2d/GFTTDetector.html" title="class in org.opencv.features2d">GFTTDetector</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="../../../org/opencv/features2d/KAZE.html" title="class in org.opencv.features2d">KAZE</a></td>
<td class="colLast"> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="../../../org/opencv/features2d/MSER.html" title="class in org.opencv.features2d">MSER</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="../../../org/opencv/features2d/ORB.html" title="class in org.opencv.features2d">ORB</a></td>
<td class="colLast"> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="../../../org/opencv/features2d/Params.html" title="class in org.opencv.features2d">Params</a></td>
<td class="colLast"> </td>
</tr>
</tbody>
</table>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../overview-summary.html">Overview</a></li>
<li class="navBarCell1Rev">Package</li>
<li>Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../index-all.html">Index</a></li>
<li><a href="../../../help-doc.html">Help</a></li>
</ul>
<div class="aboutLanguage"><em><a href="http://docs.opencv.org">OpenCV 3.4.4 Documentation</a></em></div>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../org/opencv/face/package-summary.html">Prev Package</a></li>
<li><a href="../../../org/opencv/img_hash/package-summary.html">Next Package</a></li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?org/opencv/features2d/package-summary.html" target="_top">Frames</a></li>
<li><a href="package-summary.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
| {
"pile_set_name": "Github"
} |
import json
import re
from textwrap import dedent
import jmespath
class TestAddHostFirewall:
def test_no_hosts(self, host):
result = host.run('stack add host firewall')
assert result.rc == 255
assert result.stderr == dedent('''\
error - "host" argument is required
{host ...} {action=string} {chain=string} {protocol=string} {service=string} [comment=string] [flags=string] [network=string] [output-network=string] [rulename=string] [table=string]
''')
def test_invalid_host(self, host, invalid_host):
result = host.run(f'stack add host firewall {invalid_host}')
assert result.rc == 255
assert result.stderr == f'error - cannot resolve host "{invalid_host}"\n'
def test_no_service(self, host):
result = host.run(
'stack add host firewall frontend-0-0 chain=INPUT action=ACCEPT protocol=TCP'
)
assert result.rc == 255
assert result.stderr == dedent('''\
error - "service" parameter is required
{host ...} {action=string} {chain=string} {protocol=string} {service=string} [comment=string] [flags=string] [network=string] [output-network=string] [rulename=string] [table=string]
''')
def test_no_chain(self, host):
result = host.run('stack add host firewall frontend-0-0 service=1234 action=ACCEPT protocol=TCP')
assert result.rc == 255
assert result.stderr == dedent('''\
error - "chain" parameter is required
{host ...} {action=string} {chain=string} {protocol=string} {service=string} [comment=string] [flags=string] [network=string] [output-network=string] [rulename=string] [table=string]
''')
def test_no_action(self, host):
result = host.run('stack add host firewall frontend-0-0 service=1234 chain=INPUT protocol=TCP')
assert result.rc == 255
assert result.stderr == dedent('''\
error - "action" parameter is required
{host ...} {action=string} {chain=string} {protocol=string} {service=string} [comment=string] [flags=string] [network=string] [output-network=string] [rulename=string] [table=string]
''')
def test_no_protocol(self, host):
result = host.run('stack add host firewall frontend-0-0 service=1234 chain=INPUT action=ACCEPT')
assert result.rc == 255
assert result.stderr == dedent('''\
error - "protocol" parameter is required
{host ...} {action=string} {chain=string} {protocol=string} {service=string} [comment=string] [flags=string] [network=string] [output-network=string] [rulename=string] [table=string]
''')
def test_one_host(self, host):
# Add the rule
result = host.run(
'stack add host firewall frontend-0-0 service=1234 chain=INPUT '
'action=ACCEPT protocol=TCP rulename=test'
)
assert result.rc == 0
# Make sure it is in the DB now
result = host.run('stack list host firewall frontend-0-0 output-format=json')
assert result.rc == 0
rule = jmespath.search("[?name=='test']", json.loads(result.stdout))
assert rule == [{
'host': 'frontend-0-0',
'name': 'test',
'table': 'filter',
'service': '1234',
'protocol': 'TCP',
'chain': 'INPUT',
'action': 'ACCEPT',
'network': None,
'output-network': None,
'flags': None,
'comment': None,
'source': 'H',
'type': 'var'
}]
def test_multiple_hosts(self, host, add_host):
# Add the rules
result = host.run(
'stack add host firewall frontend-0-0 backend-0-0 service=1234 '
'chain=INPUT action=ACCEPT protocol=TCP rulename=test'
)
assert result.rc == 0
# Make sure they are in the DB now
for hostname in ('frontend-0-0', 'backend-0-0'):
result = host.run(f'stack list host firewall {hostname} output-format=json')
assert result.rc == 0
rule = jmespath.search("[?name=='test']", json.loads(result.stdout))
assert rule == [{
'host': hostname,
'name': 'test',
'table': 'filter',
'service': '1234',
'protocol': 'TCP',
'chain': 'INPUT',
'action': 'ACCEPT',
'network': None,
'output-network': None,
'flags': None,
'comment': None,
'source': 'H',
'type': 'var'
}]
def test_network_existiing(self, host):
# Add the rule
result = host.run(
'stack add host firewall frontend-0-0 service=1234 chain=INPUT '
'action=ACCEPT protocol=TCP network=private rulename=test'
)
assert result.rc == 0
# Make sure it is in the DB now
result = host.run('stack list host firewall frontend-0-0 output-format=json')
assert result.rc == 0
rule = jmespath.search("[?name=='test']", json.loads(result.stdout))
assert rule == [{
'host': 'frontend-0-0',
'name': 'test',
'table': 'filter',
'service': '1234',
'protocol': 'TCP',
'chain': 'INPUT',
'action': 'ACCEPT',
'network': 'private',
'output-network': None,
'flags': None,
'comment': None,
'source': 'H',
'type': 'var'
}]
def test_invalid_network(self, host):
# Add the rule
result = host.run(
'stack add host firewall frontend-0-0 service=1234 chain=INPUT '
'action=ACCEPT protocol=TCP network=test rulename=test'
)
assert result.rc == 255
assert result.stderr == 'error - "test" is not a valid network\n'
def test_output_network_existing(self, host):
# Add the rule
result = host.run(
'stack add host firewall frontend-0-0 service=1234 chain=INPUT '
'action=ACCEPT protocol=TCP output-network=private rulename=test'
)
assert result.rc == 0
# Make sure it is in the DB now
result = host.run('stack list host firewall frontend-0-0 output-format=json')
assert result.rc == 0
rule = jmespath.search("[?name=='test']", json.loads(result.stdout))
assert rule == [{
'host': 'frontend-0-0',
'name': 'test',
'table': 'filter',
'service': '1234',
'protocol': 'TCP',
'chain': 'INPUT',
'action': 'ACCEPT',
'network': None,
'output-network': 'private',
'flags': None,
'comment': None,
'source': 'H',
'type': 'var'
}]
def test_invalid_output_network(self, host):
# Add the rule
result = host.run(
'stack add host firewall frontend-0-0 service=1234 chain=INPUT '
'action=ACCEPT protocol=TCP output-network=test rulename=test'
)
assert result.rc == 255
assert result.stderr == 'error - "test" is not a valid network\n'
def test_all_parameters(self, host):
# Add the rule
result = host.run(
'stack add host firewall frontend-0-0 table=nat service=1234 '
'chain=INPUT action=ACCEPT protocol=TCP network=private '
'output-network=private flags=test_flag comment=test_comment '
'rulename=test'
)
assert result.rc == 0
# Make sure it is in the DB now
result = host.run('stack list host firewall frontend-0-0 output-format=json')
assert result.rc == 0
rule = jmespath.search("[?name=='test']", json.loads(result.stdout))
assert rule == [{
'host': 'frontend-0-0',
'name': 'test',
'table': 'nat',
'service': '1234',
'protocol': 'TCP',
'chain': 'INPUT',
'action': 'ACCEPT',
'network': 'private',
'output-network': 'private',
'flags': 'test_flag',
'comment': 'test_comment',
'source': 'H',
'type': 'var'
}]
def test_no_rulename(self, host):
# Add the rule
result = host.run(
'stack add host firewall frontend-0-0 service=1234 chain=INPUT '
'action=ACCEPT protocol=TCP network=private'
)
assert result.rc == 0
# Make sure it is in the DB now
result = host.run('stack list host firewall frontend-0-0 output-format=json')
assert result.rc == 0
rule = jmespath.search("[?service=='1234']", json.loads(result.stdout))
# Make sure our rule name was a UUID and then remove it for the match
assert re.match(
r'[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}',
rule[0]['name']
)
del rule[0]['name']
assert rule == [{
'host': 'frontend-0-0',
'table': 'filter',
'service': '1234',
'protocol': 'TCP',
'chain': 'INPUT',
'action': 'ACCEPT',
'network': 'private',
'output-network': None,
'flags': None,
'comment': None,
'source': 'H',
'type': 'var'
}]
def test_duplicate(self, host):
# Add the rule
result = host.run(
'stack add host firewall frontend-0-0 service=1234 chain=INPUT '
'action=ACCEPT protocol=TCP'
)
assert result.rc == 0
# Now add it again and make sure it fails
result = host.run(
'stack add host firewall frontend-0-0 service=1234 chain=INPUT '
'action=ACCEPT protocol=TCP'
)
assert result.rc == 255
assert result.stderr == 'error - firewall rule already exists\n'
| {
"pile_set_name": "Github"
} |
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
/* eslint-disable @typescript-eslint/no-var-requires */
const { createConfig } = require('@bfc/test-utils');
module.exports = createConfig('ui-plugin/luis', 'react');
| {
"pile_set_name": "Github"
} |
package org.bouncycastle.pqc.jcajce.spec;
import java.security.spec.AlgorithmParameterSpec;
public class XMSSParameterSpec
implements AlgorithmParameterSpec
{
/**
* Use SHA-256 for the tree generation function.
*/
public static final String SHA256 = "SHA256";
/**
* Use SHA512 for the tree generation function.
*/
public static final String SHA512 = "SHA512";
/**
* Use SHAKE128 for the tree generation function.
*/
public static final String SHAKE128 = "SHAKE128";
/**
* Use SHAKE256 for the tree generation function.
*/
public static final String SHAKE256 = "SHAKE256";
/**
* Standard XMSS parameters
*/
public static final XMSSParameterSpec SHA2_10_256 = new XMSSParameterSpec(10, SHA256);
public static final XMSSParameterSpec SHA2_16_256 = new XMSSParameterSpec(16, SHA256);
public static final XMSSParameterSpec SHA2_20_256 = new XMSSParameterSpec(20, SHA256);
public static final XMSSParameterSpec SHAKE_10_256 = new XMSSParameterSpec(10, SHAKE128);
public static final XMSSParameterSpec SHAKE_16_256 = new XMSSParameterSpec(16, SHAKE128);
public static final XMSSParameterSpec SHAKE_20_256 = new XMSSParameterSpec(20, SHAKE128);
public static final XMSSParameterSpec SHA2_10_512 = new XMSSParameterSpec(10, SHA512);
public static final XMSSParameterSpec SHA2_16_512 = new XMSSParameterSpec(16, SHA512);
public static final XMSSParameterSpec SHA2_20_512 = new XMSSParameterSpec(20, SHA512);
public static final XMSSParameterSpec SHAKE_10_512 = new XMSSParameterSpec(10, SHAKE256);
public static final XMSSParameterSpec SHAKE_16_512 = new XMSSParameterSpec(16, SHAKE256);
public static final XMSSParameterSpec SHAKE_20_512 = new XMSSParameterSpec(20, SHAKE256);
private final int height;
private final String treeDigest;
public XMSSParameterSpec(int height, String treeDigest)
{
this.height = height;
this.treeDigest = treeDigest;
}
public String getTreeDigest()
{
return treeDigest;
}
public int getHeight()
{
return height;
}
}
| {
"pile_set_name": "Github"
} |
/**
* @name Includes per file
* @description The number of files directly included by this file using
* `#include`.
* @kind treemap
* @id cpp/direct-includes-per-file
* @treemap.warnOn highValues
* @metricType file
* @metricAggregate avg max
* @tags maintainability
* modularity
*/
import cpp
from File f, int n
where
f.fromSource() and
n = count(Include i | i.getFile() = f)
select f, n
| {
"pile_set_name": "Github"
} |
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by Steve Nygard.
//
#import "MMObject.h"
#import "PBCoding.h"
@class NSArray, NSString;
@interface GameCenterGiftInfo : MMObject <PBCoding>
{
NSString *title;
NSArray *giftList;
NSString *moreTitle;
NSString *moreURL;
}
+ (void)initialize;
@property(retain, nonatomic) NSString *moreURL; // @synthesize moreURL;
@property(retain, nonatomic) NSString *moreTitle; // @synthesize moreTitle;
@property(retain, nonatomic) NSArray *giftList; // @synthesize giftList;
@property(retain, nonatomic) NSString *title; // @synthesize title;
- (void).cxx_destruct;
- (void)parseFromResp:(id)arg1;
- (const map_0e718273 *)getValueTagIndexMap;
- (id)getValueTypeTable;
// Remaining properties
@property(readonly, copy) NSString *debugDescription;
@property(readonly, copy) NSString *description;
@property(readonly) unsigned long long hash;
@property(readonly) Class superclass;
@end
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
# TODO(mark): sys.path manipulation is some temporary testing stuff.
try:
import gyp
except ImportError, e:
import os.path
sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), 'pylib'))
import gyp
if __name__ == '__main__':
sys.exit(gyp.script_main())
| {
"pile_set_name": "Github"
} |
/*
* ioctl.c
*
* Copyright (C) 1995, 1996 by Volker Lendecke
* Modified 1997 Peter Waltenberg, Bill Hawes, David Woodhouse for 2.1 dcache
* Modified 1998, 1999 Wolfram Pienkoss for NLS
*
*/
#include <linux/capability.h>
#include <linux/compat.h>
#include <linux/errno.h>
#include <linux/fs.h>
#include <linux/ioctl.h>
#include <linux/time.h>
#include <linux/mm.h>
#include <linux/mount.h>
#include <linux/slab.h>
#include <linux/highuid.h>
#include <linux/vmalloc.h>
#include <linux/sched.h>
#include <asm/uaccess.h>
#include "ncp_fs.h"
/* maximum limit for ncp_objectname_ioctl */
#define NCP_OBJECT_NAME_MAX_LEN 4096
/* maximum limit for ncp_privatedata_ioctl */
#define NCP_PRIVATE_DATA_MAX_LEN 8192
/* maximum negotiable packet size */
#define NCP_PACKET_SIZE_INTERNAL 65536
static int
ncp_get_fs_info(struct ncp_server * server, struct inode *inode,
struct ncp_fs_info __user *arg)
{
struct ncp_fs_info info;
if (copy_from_user(&info, arg, sizeof(info)))
return -EFAULT;
if (info.version != NCP_GET_FS_INFO_VERSION) {
DPRINTK("info.version invalid: %d\n", info.version);
return -EINVAL;
}
/* TODO: info.addr = server->m.serv_addr; */
SET_UID(info.mounted_uid, from_kuid_munged(current_user_ns(), server->m.mounted_uid));
info.connection = server->connection;
info.buffer_size = server->buffer_size;
info.volume_number = NCP_FINFO(inode)->volNumber;
info.directory_id = NCP_FINFO(inode)->DosDirNum;
if (copy_to_user(arg, &info, sizeof(info)))
return -EFAULT;
return 0;
}
static int
ncp_get_fs_info_v2(struct ncp_server * server, struct inode *inode,
struct ncp_fs_info_v2 __user * arg)
{
struct ncp_fs_info_v2 info2;
if (copy_from_user(&info2, arg, sizeof(info2)))
return -EFAULT;
if (info2.version != NCP_GET_FS_INFO_VERSION_V2) {
DPRINTK("info.version invalid: %d\n", info2.version);
return -EINVAL;
}
info2.mounted_uid = from_kuid_munged(current_user_ns(), server->m.mounted_uid);
info2.connection = server->connection;
info2.buffer_size = server->buffer_size;
info2.volume_number = NCP_FINFO(inode)->volNumber;
info2.directory_id = NCP_FINFO(inode)->DosDirNum;
info2.dummy1 = info2.dummy2 = info2.dummy3 = 0;
if (copy_to_user(arg, &info2, sizeof(info2)))
return -EFAULT;
return 0;
}
#ifdef CONFIG_COMPAT
struct compat_ncp_objectname_ioctl
{
s32 auth_type;
u32 object_name_len;
compat_caddr_t object_name; /* a userspace data, in most cases user name */
};
struct compat_ncp_fs_info_v2 {
s32 version;
u32 mounted_uid;
u32 connection;
u32 buffer_size;
u32 volume_number;
u32 directory_id;
u32 dummy1;
u32 dummy2;
u32 dummy3;
};
struct compat_ncp_ioctl_request {
u32 function;
u32 size;
compat_caddr_t data;
};
struct compat_ncp_privatedata_ioctl
{
u32 len;
compat_caddr_t data; /* ~1000 for NDS */
};
#define NCP_IOC_GET_FS_INFO_V2_32 _IOWR('n', 4, struct compat_ncp_fs_info_v2)
#define NCP_IOC_NCPREQUEST_32 _IOR('n', 1, struct compat_ncp_ioctl_request)
#define NCP_IOC_GETOBJECTNAME_32 _IOWR('n', 9, struct compat_ncp_objectname_ioctl)
#define NCP_IOC_SETOBJECTNAME_32 _IOR('n', 9, struct compat_ncp_objectname_ioctl)
#define NCP_IOC_GETPRIVATEDATA_32 _IOWR('n', 10, struct compat_ncp_privatedata_ioctl)
#define NCP_IOC_SETPRIVATEDATA_32 _IOR('n', 10, struct compat_ncp_privatedata_ioctl)
static int
ncp_get_compat_fs_info_v2(struct ncp_server * server, struct inode *inode,
struct compat_ncp_fs_info_v2 __user * arg)
{
struct compat_ncp_fs_info_v2 info2;
if (copy_from_user(&info2, arg, sizeof(info2)))
return -EFAULT;
if (info2.version != NCP_GET_FS_INFO_VERSION_V2) {
DPRINTK("info.version invalid: %d\n", info2.version);
return -EINVAL;
}
info2.mounted_uid = from_kuid_munged(current_user_ns(), server->m.mounted_uid);
info2.connection = server->connection;
info2.buffer_size = server->buffer_size;
info2.volume_number = NCP_FINFO(inode)->volNumber;
info2.directory_id = NCP_FINFO(inode)->DosDirNum;
info2.dummy1 = info2.dummy2 = info2.dummy3 = 0;
if (copy_to_user(arg, &info2, sizeof(info2)))
return -EFAULT;
return 0;
}
#endif
#define NCP_IOC_GETMOUNTUID16 _IOW('n', 2, u16)
#define NCP_IOC_GETMOUNTUID32 _IOW('n', 2, u32)
#define NCP_IOC_GETMOUNTUID64 _IOW('n', 2, u64)
#ifdef CONFIG_NCPFS_NLS
/* Here we are select the iocharset and the codepage for NLS.
* Thanks Petr Vandrovec for idea and many hints.
*/
static int
ncp_set_charsets(struct ncp_server* server, struct ncp_nls_ioctl __user *arg)
{
struct ncp_nls_ioctl user;
struct nls_table *codepage;
struct nls_table *iocharset;
struct nls_table *oldset_io;
struct nls_table *oldset_cp;
int utf8;
int err;
if (copy_from_user(&user, arg, sizeof(user)))
return -EFAULT;
codepage = NULL;
user.codepage[NCP_IOCSNAME_LEN] = 0;
if (!user.codepage[0] || !strcmp(user.codepage, "default"))
codepage = load_nls_default();
else {
codepage = load_nls(user.codepage);
if (!codepage) {
return -EBADRQC;
}
}
iocharset = NULL;
user.iocharset[NCP_IOCSNAME_LEN] = 0;
if (!user.iocharset[0] || !strcmp(user.iocharset, "default")) {
iocharset = load_nls_default();
utf8 = 0;
} else if (!strcmp(user.iocharset, "utf8")) {
iocharset = load_nls_default();
utf8 = 1;
} else {
iocharset = load_nls(user.iocharset);
if (!iocharset) {
unload_nls(codepage);
return -EBADRQC;
}
utf8 = 0;
}
mutex_lock(&server->root_setup_lock);
if (server->root_setuped) {
oldset_cp = codepage;
oldset_io = iocharset;
err = -EBUSY;
} else {
if (utf8)
NCP_SET_FLAG(server, NCP_FLAG_UTF8);
else
NCP_CLR_FLAG(server, NCP_FLAG_UTF8);
oldset_cp = server->nls_vol;
server->nls_vol = codepage;
oldset_io = server->nls_io;
server->nls_io = iocharset;
err = 0;
}
mutex_unlock(&server->root_setup_lock);
unload_nls(oldset_cp);
unload_nls(oldset_io);
return err;
}
static int
ncp_get_charsets(struct ncp_server* server, struct ncp_nls_ioctl __user *arg)
{
struct ncp_nls_ioctl user;
int len;
memset(&user, 0, sizeof(user));
mutex_lock(&server->root_setup_lock);
if (server->nls_vol && server->nls_vol->charset) {
len = strlen(server->nls_vol->charset);
if (len > NCP_IOCSNAME_LEN)
len = NCP_IOCSNAME_LEN;
strncpy(user.codepage, server->nls_vol->charset, len);
user.codepage[len] = 0;
}
if (NCP_IS_FLAG(server, NCP_FLAG_UTF8))
strcpy(user.iocharset, "utf8");
else if (server->nls_io && server->nls_io->charset) {
len = strlen(server->nls_io->charset);
if (len > NCP_IOCSNAME_LEN)
len = NCP_IOCSNAME_LEN;
strncpy(user.iocharset, server->nls_io->charset, len);
user.iocharset[len] = 0;
}
mutex_unlock(&server->root_setup_lock);
if (copy_to_user(arg, &user, sizeof(user)))
return -EFAULT;
return 0;
}
#endif /* CONFIG_NCPFS_NLS */
static long __ncp_ioctl(struct inode *inode, unsigned int cmd, unsigned long arg)
{
struct ncp_server *server = NCP_SERVER(inode);
int result;
struct ncp_ioctl_request request;
char* bouncebuffer;
void __user *argp = (void __user *)arg;
switch (cmd) {
#ifdef CONFIG_COMPAT
case NCP_IOC_NCPREQUEST_32:
#endif
case NCP_IOC_NCPREQUEST:
#ifdef CONFIG_COMPAT
if (cmd == NCP_IOC_NCPREQUEST_32) {
struct compat_ncp_ioctl_request request32;
if (copy_from_user(&request32, argp, sizeof(request32)))
return -EFAULT;
request.function = request32.function;
request.size = request32.size;
request.data = compat_ptr(request32.data);
} else
#endif
if (copy_from_user(&request, argp, sizeof(request)))
return -EFAULT;
if ((request.function > 255)
|| (request.size >
NCP_PACKET_SIZE - sizeof(struct ncp_request_header))) {
return -EINVAL;
}
bouncebuffer = vmalloc(NCP_PACKET_SIZE_INTERNAL);
if (!bouncebuffer)
return -ENOMEM;
if (copy_from_user(bouncebuffer, request.data, request.size)) {
vfree(bouncebuffer);
return -EFAULT;
}
ncp_lock_server(server);
/* FIXME: We hack around in the server's structures
here to be able to use ncp_request */
server->has_subfunction = 0;
server->current_size = request.size;
memcpy(server->packet, bouncebuffer, request.size);
result = ncp_request2(server, request.function,
bouncebuffer, NCP_PACKET_SIZE_INTERNAL);
if (result < 0)
result = -EIO;
else
result = server->reply_size;
ncp_unlock_server(server);
DPRINTK("ncp_ioctl: copy %d bytes\n",
result);
if (result >= 0)
if (copy_to_user(request.data, bouncebuffer, result))
result = -EFAULT;
vfree(bouncebuffer);
return result;
case NCP_IOC_CONN_LOGGED_IN:
if (!(server->m.int_flags & NCP_IMOUNT_LOGGEDIN_POSSIBLE))
return -EINVAL;
mutex_lock(&server->root_setup_lock);
if (server->root_setuped)
result = -EBUSY;
else {
result = ncp_conn_logged_in(inode->i_sb);
if (result == 0)
server->root_setuped = 1;
}
mutex_unlock(&server->root_setup_lock);
return result;
case NCP_IOC_GET_FS_INFO:
return ncp_get_fs_info(server, inode, argp);
case NCP_IOC_GET_FS_INFO_V2:
return ncp_get_fs_info_v2(server, inode, argp);
#ifdef CONFIG_COMPAT
case NCP_IOC_GET_FS_INFO_V2_32:
return ncp_get_compat_fs_info_v2(server, inode, argp);
#endif
/* we have too many combinations of CONFIG_COMPAT,
* CONFIG_64BIT and CONFIG_UID16, so just handle
* any of the possible ioctls */
case NCP_IOC_GETMOUNTUID16:
{
u16 uid;
SET_UID(uid, from_kuid_munged(current_user_ns(), server->m.mounted_uid));
if (put_user(uid, (u16 __user *)argp))
return -EFAULT;
return 0;
}
case NCP_IOC_GETMOUNTUID32:
{
uid_t uid = from_kuid_munged(current_user_ns(), server->m.mounted_uid);
if (put_user(uid, (u32 __user *)argp))
return -EFAULT;
return 0;
}
case NCP_IOC_GETMOUNTUID64:
{
uid_t uid = from_kuid_munged(current_user_ns(), server->m.mounted_uid);
if (put_user(uid, (u64 __user *)argp))
return -EFAULT;
return 0;
}
case NCP_IOC_GETROOT:
{
struct ncp_setroot_ioctl sr;
result = -EACCES;
mutex_lock(&server->root_setup_lock);
if (server->m.mounted_vol[0]) {
struct dentry* dentry = inode->i_sb->s_root;
if (dentry) {
struct inode* s_inode = dentry->d_inode;
if (s_inode) {
sr.volNumber = NCP_FINFO(s_inode)->volNumber;
sr.dirEntNum = NCP_FINFO(s_inode)->dirEntNum;
sr.namespace = server->name_space[sr.volNumber];
result = 0;
} else
DPRINTK("ncpfs: s_root->d_inode==NULL\n");
} else
DPRINTK("ncpfs: s_root==NULL\n");
} else {
sr.volNumber = -1;
sr.namespace = 0;
sr.dirEntNum = 0;
result = 0;
}
mutex_unlock(&server->root_setup_lock);
if (!result && copy_to_user(argp, &sr, sizeof(sr)))
result = -EFAULT;
return result;
}
case NCP_IOC_SETROOT:
{
struct ncp_setroot_ioctl sr;
__u32 vnum;
__le32 de;
__le32 dosde;
struct dentry* dentry;
if (copy_from_user(&sr, argp, sizeof(sr)))
return -EFAULT;
mutex_lock(&server->root_setup_lock);
if (server->root_setuped)
result = -EBUSY;
else {
if (sr.volNumber < 0) {
server->m.mounted_vol[0] = 0;
vnum = NCP_NUMBER_OF_VOLUMES;
de = 0;
dosde = 0;
result = 0;
} else if (sr.volNumber >= NCP_NUMBER_OF_VOLUMES) {
result = -EINVAL;
} else if (ncp_mount_subdir(server, sr.volNumber,
sr.namespace, sr.dirEntNum,
&vnum, &de, &dosde)) {
result = -ENOENT;
} else
result = 0;
if (result == 0) {
dentry = inode->i_sb->s_root;
if (dentry) {
struct inode* s_inode = dentry->d_inode;
if (s_inode) {
NCP_FINFO(s_inode)->volNumber = vnum;
NCP_FINFO(s_inode)->dirEntNum = de;
NCP_FINFO(s_inode)->DosDirNum = dosde;
server->root_setuped = 1;
} else {
DPRINTK("ncpfs: s_root->d_inode==NULL\n");
result = -EIO;
}
} else {
DPRINTK("ncpfs: s_root==NULL\n");
result = -EIO;
}
}
}
mutex_unlock(&server->root_setup_lock);
return result;
}
#ifdef CONFIG_NCPFS_PACKET_SIGNING
case NCP_IOC_SIGN_INIT:
{
struct ncp_sign_init sign;
if (argp)
if (copy_from_user(&sign, argp, sizeof(sign)))
return -EFAULT;
ncp_lock_server(server);
mutex_lock(&server->rcv.creq_mutex);
if (argp) {
if (server->sign_wanted) {
memcpy(server->sign_root,sign.sign_root,8);
memcpy(server->sign_last,sign.sign_last,16);
server->sign_active = 1;
}
/* ignore when signatures not wanted */
} else {
server->sign_active = 0;
}
mutex_unlock(&server->rcv.creq_mutex);
ncp_unlock_server(server);
return 0;
}
case NCP_IOC_SIGN_WANTED:
{
int state;
ncp_lock_server(server);
state = server->sign_wanted;
ncp_unlock_server(server);
if (put_user(state, (int __user *)argp))
return -EFAULT;
return 0;
}
case NCP_IOC_SET_SIGN_WANTED:
{
int newstate;
/* get only low 8 bits... */
if (get_user(newstate, (unsigned char __user *)argp))
return -EFAULT;
result = 0;
ncp_lock_server(server);
if (server->sign_active) {
/* cannot turn signatures OFF when active */
if (!newstate)
result = -EINVAL;
} else {
server->sign_wanted = newstate != 0;
}
ncp_unlock_server(server);
return result;
}
#endif /* CONFIG_NCPFS_PACKET_SIGNING */
#ifdef CONFIG_NCPFS_IOCTL_LOCKING
case NCP_IOC_LOCKUNLOCK:
{
struct ncp_lock_ioctl rqdata;
if (copy_from_user(&rqdata, argp, sizeof(rqdata)))
return -EFAULT;
if (rqdata.origin != 0)
return -EINVAL;
/* check for cmd */
switch (rqdata.cmd) {
case NCP_LOCK_EX:
case NCP_LOCK_SH:
if (rqdata.timeout == 0)
rqdata.timeout = NCP_LOCK_DEFAULT_TIMEOUT;
else if (rqdata.timeout > NCP_LOCK_MAX_TIMEOUT)
rqdata.timeout = NCP_LOCK_MAX_TIMEOUT;
break;
case NCP_LOCK_LOG:
rqdata.timeout = NCP_LOCK_DEFAULT_TIMEOUT; /* has no effect */
case NCP_LOCK_CLEAR:
break;
default:
return -EINVAL;
}
/* locking needs both read and write access */
if ((result = ncp_make_open(inode, O_RDWR)) != 0)
{
return result;
}
result = -EISDIR;
if (!S_ISREG(inode->i_mode))
goto outrel;
if (rqdata.cmd == NCP_LOCK_CLEAR)
{
result = ncp_ClearPhysicalRecord(NCP_SERVER(inode),
NCP_FINFO(inode)->file_handle,
rqdata.offset,
rqdata.length);
if (result > 0) result = 0; /* no such lock */
}
else
{
int lockcmd;
switch (rqdata.cmd)
{
case NCP_LOCK_EX: lockcmd=1; break;
case NCP_LOCK_SH: lockcmd=3; break;
default: lockcmd=0; break;
}
result = ncp_LogPhysicalRecord(NCP_SERVER(inode),
NCP_FINFO(inode)->file_handle,
lockcmd,
rqdata.offset,
rqdata.length,
rqdata.timeout);
if (result > 0) result = -EAGAIN;
}
outrel:
ncp_inode_close(inode);
return result;
}
#endif /* CONFIG_NCPFS_IOCTL_LOCKING */
#ifdef CONFIG_COMPAT
case NCP_IOC_GETOBJECTNAME_32:
{
struct compat_ncp_objectname_ioctl user;
size_t outl;
if (copy_from_user(&user, argp, sizeof(user)))
return -EFAULT;
down_read(&server->auth_rwsem);
user.auth_type = server->auth.auth_type;
outl = user.object_name_len;
user.object_name_len = server->auth.object_name_len;
if (outl > user.object_name_len)
outl = user.object_name_len;
result = 0;
if (outl) {
if (copy_to_user(compat_ptr(user.object_name),
server->auth.object_name,
outl))
result = -EFAULT;
}
up_read(&server->auth_rwsem);
if (!result && copy_to_user(argp, &user, sizeof(user)))
result = -EFAULT;
return result;
}
#endif
case NCP_IOC_GETOBJECTNAME:
{
struct ncp_objectname_ioctl user;
size_t outl;
if (copy_from_user(&user, argp, sizeof(user)))
return -EFAULT;
down_read(&server->auth_rwsem);
user.auth_type = server->auth.auth_type;
outl = user.object_name_len;
user.object_name_len = server->auth.object_name_len;
if (outl > user.object_name_len)
outl = user.object_name_len;
result = 0;
if (outl) {
if (copy_to_user(user.object_name,
server->auth.object_name,
outl))
result = -EFAULT;
}
up_read(&server->auth_rwsem);
if (!result && copy_to_user(argp, &user, sizeof(user)))
result = -EFAULT;
return result;
}
#ifdef CONFIG_COMPAT
case NCP_IOC_SETOBJECTNAME_32:
#endif
case NCP_IOC_SETOBJECTNAME:
{
struct ncp_objectname_ioctl user;
void* newname;
void* oldname;
size_t oldnamelen;
void* oldprivate;
size_t oldprivatelen;
#ifdef CONFIG_COMPAT
if (cmd == NCP_IOC_SETOBJECTNAME_32) {
struct compat_ncp_objectname_ioctl user32;
if (copy_from_user(&user32, argp, sizeof(user32)))
return -EFAULT;
user.auth_type = user32.auth_type;
user.object_name_len = user32.object_name_len;
user.object_name = compat_ptr(user32.object_name);
} else
#endif
if (copy_from_user(&user, argp, sizeof(user)))
return -EFAULT;
if (user.object_name_len > NCP_OBJECT_NAME_MAX_LEN)
return -ENOMEM;
if (user.object_name_len) {
newname = memdup_user(user.object_name,
user.object_name_len);
if (IS_ERR(newname))
return PTR_ERR(newname);
} else {
newname = NULL;
}
down_write(&server->auth_rwsem);
oldname = server->auth.object_name;
oldnamelen = server->auth.object_name_len;
oldprivate = server->priv.data;
oldprivatelen = server->priv.len;
server->auth.auth_type = user.auth_type;
server->auth.object_name_len = user.object_name_len;
server->auth.object_name = newname;
server->priv.len = 0;
server->priv.data = NULL;
up_write(&server->auth_rwsem);
kfree(oldprivate);
kfree(oldname);
return 0;
}
#ifdef CONFIG_COMPAT
case NCP_IOC_GETPRIVATEDATA_32:
#endif
case NCP_IOC_GETPRIVATEDATA:
{
struct ncp_privatedata_ioctl user;
size_t outl;
#ifdef CONFIG_COMPAT
if (cmd == NCP_IOC_GETPRIVATEDATA_32) {
struct compat_ncp_privatedata_ioctl user32;
if (copy_from_user(&user32, argp, sizeof(user32)))
return -EFAULT;
user.len = user32.len;
user.data = compat_ptr(user32.data);
} else
#endif
if (copy_from_user(&user, argp, sizeof(user)))
return -EFAULT;
down_read(&server->auth_rwsem);
outl = user.len;
user.len = server->priv.len;
if (outl > user.len) outl = user.len;
result = 0;
if (outl) {
if (copy_to_user(user.data,
server->priv.data,
outl))
result = -EFAULT;
}
up_read(&server->auth_rwsem);
if (result)
return result;
#ifdef CONFIG_COMPAT
if (cmd == NCP_IOC_GETPRIVATEDATA_32) {
struct compat_ncp_privatedata_ioctl user32;
user32.len = user.len;
user32.data = (unsigned long) user.data;
if (copy_to_user(argp, &user32, sizeof(user32)))
return -EFAULT;
} else
#endif
if (copy_to_user(argp, &user, sizeof(user)))
return -EFAULT;
return 0;
}
#ifdef CONFIG_COMPAT
case NCP_IOC_SETPRIVATEDATA_32:
#endif
case NCP_IOC_SETPRIVATEDATA:
{
struct ncp_privatedata_ioctl user;
void* new;
void* old;
size_t oldlen;
#ifdef CONFIG_COMPAT
if (cmd == NCP_IOC_SETPRIVATEDATA_32) {
struct compat_ncp_privatedata_ioctl user32;
if (copy_from_user(&user32, argp, sizeof(user32)))
return -EFAULT;
user.len = user32.len;
user.data = compat_ptr(user32.data);
} else
#endif
if (copy_from_user(&user, argp, sizeof(user)))
return -EFAULT;
if (user.len > NCP_PRIVATE_DATA_MAX_LEN)
return -ENOMEM;
if (user.len) {
new = memdup_user(user.data, user.len);
if (IS_ERR(new))
return PTR_ERR(new);
} else {
new = NULL;
}
down_write(&server->auth_rwsem);
old = server->priv.data;
oldlen = server->priv.len;
server->priv.len = user.len;
server->priv.data = new;
up_write(&server->auth_rwsem);
kfree(old);
return 0;
}
#ifdef CONFIG_NCPFS_NLS
case NCP_IOC_SETCHARSETS:
return ncp_set_charsets(server, argp);
case NCP_IOC_GETCHARSETS:
return ncp_get_charsets(server, argp);
#endif /* CONFIG_NCPFS_NLS */
case NCP_IOC_SETDENTRYTTL:
{
u_int32_t user;
if (copy_from_user(&user, argp, sizeof(user)))
return -EFAULT;
/* 20 secs at most... */
if (user > 20000)
return -EINVAL;
user = (user * HZ) / 1000;
atomic_set(&server->dentry_ttl, user);
return 0;
}
case NCP_IOC_GETDENTRYTTL:
{
u_int32_t user = (atomic_read(&server->dentry_ttl) * 1000) / HZ;
if (copy_to_user(argp, &user, sizeof(user)))
return -EFAULT;
return 0;
}
}
return -EINVAL;
}
long ncp_ioctl(struct file *filp, unsigned int cmd, unsigned long arg)
{
struct inode *inode = file_inode(filp);
struct ncp_server *server = NCP_SERVER(inode);
kuid_t uid = current_uid();
int need_drop_write = 0;
long ret;
switch (cmd) {
case NCP_IOC_SETCHARSETS:
case NCP_IOC_CONN_LOGGED_IN:
case NCP_IOC_SETROOT:
if (!capable(CAP_SYS_ADMIN)) {
ret = -EPERM;
goto out;
}
break;
}
if (!uid_eq(server->m.mounted_uid, uid)) {
switch (cmd) {
/*
* Only mount owner can issue these ioctls. Information
* necessary to authenticate to other NDS servers are
* stored here.
*/
case NCP_IOC_GETOBJECTNAME:
case NCP_IOC_SETOBJECTNAME:
case NCP_IOC_GETPRIVATEDATA:
case NCP_IOC_SETPRIVATEDATA:
#ifdef CONFIG_COMPAT
case NCP_IOC_GETOBJECTNAME_32:
case NCP_IOC_SETOBJECTNAME_32:
case NCP_IOC_GETPRIVATEDATA_32:
case NCP_IOC_SETPRIVATEDATA_32:
#endif
ret = -EACCES;
goto out;
/*
* These require write access on the inode if user id
* does not match. Note that they do not write to the
* file... But old code did mnt_want_write, so I keep
* it as is. Of course not for mountpoint owner, as
* that breaks read-only mounts altogether as ncpmount
* needs working NCP_IOC_NCPREQUEST and
* NCP_IOC_GET_FS_INFO. Some of these codes (setdentryttl,
* signinit, setsignwanted) should be probably restricted
* to owner only, or even more to CAP_SYS_ADMIN).
*/
case NCP_IOC_GET_FS_INFO:
case NCP_IOC_GET_FS_INFO_V2:
case NCP_IOC_NCPREQUEST:
case NCP_IOC_SETDENTRYTTL:
case NCP_IOC_SIGN_INIT:
case NCP_IOC_LOCKUNLOCK:
case NCP_IOC_SET_SIGN_WANTED:
#ifdef CONFIG_COMPAT
case NCP_IOC_GET_FS_INFO_V2_32:
case NCP_IOC_NCPREQUEST_32:
#endif
ret = mnt_want_write_file(filp);
if (ret)
goto out;
need_drop_write = 1;
ret = inode_permission(inode, MAY_WRITE);
if (ret)
goto outDropWrite;
break;
/*
* Read access required.
*/
case NCP_IOC_GETMOUNTUID16:
case NCP_IOC_GETMOUNTUID32:
case NCP_IOC_GETMOUNTUID64:
case NCP_IOC_GETROOT:
case NCP_IOC_SIGN_WANTED:
ret = inode_permission(inode, MAY_READ);
if (ret)
goto out;
break;
/*
* Anybody can read these.
*/
case NCP_IOC_GETCHARSETS:
case NCP_IOC_GETDENTRYTTL:
default:
/* Three codes below are protected by CAP_SYS_ADMIN above. */
case NCP_IOC_SETCHARSETS:
case NCP_IOC_CONN_LOGGED_IN:
case NCP_IOC_SETROOT:
break;
}
}
ret = __ncp_ioctl(inode, cmd, arg);
outDropWrite:
if (need_drop_write)
mnt_drop_write_file(filp);
out:
return ret;
}
#ifdef CONFIG_COMPAT
long ncp_compat_ioctl(struct file *file, unsigned int cmd, unsigned long arg)
{
long ret;
arg = (unsigned long) compat_ptr(arg);
ret = ncp_ioctl(file, cmd, arg);
return ret;
}
#endif
| {
"pile_set_name": "Github"
} |
;define([], function () {
'use strict';
function Foo() {}
return Foo;
});
| {
"pile_set_name": "Github"
} |
{
"locale": "to-TO",
"date": {
"ca": [
"gregory",
"generic"
],
"hourNo0": true,
"hour12": true,
"formats": {
"short": "{1} {0}",
"medium": "{1}, {0}",
"full": "{1}, {0}",
"long": "{1}, {0}",
"availableFormats": {
"d": "d",
"E": "ccc",
"Ed": "d E",
"Ehm": "E h:mm a",
"EHm": "E HH:mm",
"Ehms": "E h:mm:ss a",
"EHms": "E HH:mm:ss",
"Gy": "y G",
"GyMMM": "MMM y G",
"GyMMMd": "d MMM y G",
"GyMMMEd": "E d MMM y G",
"h": "h a",
"H": "HH",
"hm": "h:mm a",
"Hm": "HH:mm",
"hms": "h:mm:ss a",
"Hms": "HH:mm:ss",
"hmsv": "h:mm:ss a v",
"Hmsv": "HH:mm:ss v",
"hmv": "h:mm a v",
"Hmv": "HH:mm v",
"M": "L",
"Md": "d/M",
"MEd": "E d/M",
"MMM": "LLL",
"MMMd": "d MMM",
"MMMEd": "E d MMM",
"MMMMd": "d MMMM",
"MMMMEd": "E d MMMM",
"ms": "mm:ss",
"y": "y",
"yM": "M/y",
"yMd": "d/M/y",
"yMEd": "E d/M/y",
"yMM": "MM-y",
"yMMM": "MMM y",
"yMMMd": "d MMM y",
"yMMMEd": "E d MMM y",
"yMMMM": "MMMM y",
"yQQQ": "y QQQ",
"yQQQQ": "y QQQQ"
},
"dateFormats": {
"yMMMMEEEEd": "EEEE d MMMM y",
"yMMMMd": "d MMMM y",
"yMMMd": "d MMM y",
"yMd": "d/M/yy"
},
"timeFormats": {
"hmmsszzzz": "h:mm:ss a zzzz",
"hmsz": "h:mm:ss a z",
"hms": "h:mm:ss a",
"hm": "h:mm a"
}
},
"calendars": {
"generic": {
"months": {
"narrow": [
"1",
"2",
"3",
"4",
"5",
"6",
"7",
"8",
"9",
"10",
"11",
"12"
],
"short": [
"M01",
"M02",
"M03",
"M04",
"M05",
"M06",
"M07",
"M08",
"M09",
"M10",
"M11",
"M12"
],
"long": [
"M01",
"M02",
"M03",
"M04",
"M05",
"M06",
"M07",
"M08",
"M09",
"M10",
"M11",
"M12"
]
},
"days": {
"narrow": [
"S",
"M",
"T",
"P",
"T",
"F",
"T"
],
"short": [
"Sāp",
"Mōn",
"Tūs",
"Pul",
"Tuʻa",
"Fal",
"Tok"
],
"long": [
"Sāpate",
"Mōnite",
"Tūsite",
"Pulelulu",
"Tuʻapulelulu",
"Falaite",
"Tokonaki"
]
},
"eras": {
"narrow": [
"ERA0",
"ERA1"
],
"short": [
"ERA0",
"ERA1"
],
"long": [
"ERA0",
"ERA1"
]
},
"dayPeriods": {
"am": "AM",
"pm": "PM"
}
},
"gregory": {
"months": {
"narrow": [
"S",
"F",
"M",
"E",
"M",
"S",
"S",
"A",
"S",
"O",
"N",
"T"
],
"short": [
"Sān",
"Fēp",
"Maʻa",
"ʻEpe",
"Mē",
"Sun",
"Siu",
"ʻAok",
"Sep",
"ʻOka",
"Nōv",
"Tīs"
],
"long": [
"Sānuali",
"Fēpueli",
"Maʻasi",
"ʻEpeleli",
"Mē",
"Sune",
"Siulai",
"ʻAokosi",
"Sepitema",
"ʻOkatopa",
"Nōvema",
"Tīsema"
]
},
"days": {
"narrow": [
"S",
"M",
"T",
"P",
"T",
"F",
"T"
],
"short": [
"Sāp",
"Mōn",
"Tūs",
"Pul",
"Tuʻa",
"Fal",
"Tok"
],
"long": [
"Sāpate",
"Mōnite",
"Tūsite",
"Pulelulu",
"Tuʻapulelulu",
"Falaite",
"Tokonaki"
]
},
"eras": {
"narrow": [
"KM",
"TS",
"BCE",
"CE"
],
"short": [
"KM",
"TS",
"BCE",
"CE"
],
"long": [
"ki muʻa",
"taʻu ʻo Sīsū",
"KM",
"TS"
]
},
"dayPeriods": {
"am": "AM",
"pm": "PM"
}
}
}
},
"number": {
"nu": [
"latn"
],
"patterns": {
"decimal": {
"positivePattern": "{number}",
"negativePattern": "{minusSign}{number}"
},
"currency": {
"positivePattern": "{currency} {number}",
"negativePattern": "{minusSign}{currency} {number}"
},
"percent": {
"positivePattern": "{number}{percentSign}",
"negativePattern": "{minusSign}{number}{percentSign}"
}
},
"symbols": {
"latn": {
"decimal": ".",
"group": ",",
"nan": "TF",
"plusSign": "+",
"minusSign": "-",
"percentSign": "%",
"infinity": "∞"
}
},
"currencies": {
"AUD": "AUD$",
"BRL": "R$",
"CAD": "CA$",
"CNY": "CN¥",
"EUR": "€",
"GBP": "£",
"HKD": "HK$",
"ILS": "₪",
"INR": "₹",
"JPY": "JP¥",
"KRW": "₩",
"MXN": "MX$",
"NZD": "NZD$",
"TOP": "T$",
"TWD": "NT$",
"USD": "US$",
"VND": "₫",
"XAF": "FCFA",
"XCD": "EC$",
"XOF": "CFA",
"XPF": "CFPF"
}
}
} | {
"pile_set_name": "Github"
} |
[36m# HG changeset patch
[0m[36m# User Dan Kenigsberg <[email protected]>
[0m[36m# Date 1317492169 -10800
[0m[36m# Node ID a9a87f0e7c509ec6768379c08a0cf56f43d71b4a
[0m[36m# Parent b0ef6a5a6dccab0089d287bf6b9bcb8132bdbd0d
[0m[36mxml.dom.minidom toprettyxml: omit whitespace for Text nodes
[0m[36m
[0m[36mhttp://bugs.python.org/issue4147
[0m[36m
[0m[36mThis patch was very lightly tested, but I think it is nicer than the former one,
[0m[36mas Text.writexml() should better know not to wrap its data with whitespace.
[0m[36mEver.
[0m[36m
[0m[36mdiff -r b0ef6a5a6dcc -r a9a87f0e7c50 Lib/test/test_minidom.py
[0m[33m--- a/Lib/test/test_minidom.py Fri Sep 30 08:46:25 2011 +0300
[0m[33m+++ b/Lib/test/test_minidom.py Sat Oct 01 21:02:49 2011 +0300
[0m[1;34m@@ -467,6 +467,13 @@
[0m[0m dom.unlink()
[0m[0m self.confirm(domstr == str.replace("\n", "\r\n"))
[0m[32m+
[0m[32m+ def testPrettyTextNode(self):
[0m[32m+ str = '<A>B</A>'
[0m[32m+ dom = parseString(str)
[0m[32m+ dom2 = parseString(dom.toprettyxml())
[0m[32m+ self.confirm(dom.childNodes[0].childNodes[0].toxml()==
[0m[32m+ dom2.childNodes[0].childNodes[0].toxml())
[0m[0m
[0m[0m def testProcessingInstruction(self):
[0m[0m dom = parseString('<e><?mypi \t\n data \t\n ?></e>')
[0m[0m pi = dom.documentElement.firstChild
[0m[36mdiff -r b0ef6a5a6dcc -r a9a87f0e7c50 Lib/xml/dom/minidom.py
[0m[33m--- a/Lib/xml/dom/minidom.py Fri Sep 30 08:46:25 2011 +0300
[0m[33m+++ b/Lib/xml/dom/minidom.py Sat Oct 01 21:02:49 2011 +0300
[0m[1;34m@@ -836,7 +836,9 @@
[0m[0m _write_data(writer, attrs[a_name].value)
[0m[0m writer.write("\"")
[0m[0m if self.childNodes:
[0m[1;31m-[0m[31m writer.write(">[7m[31m%s[0m[31m"[7m[31m%(newl)[0m[31m)
[0m[32m+[0m[32m writer.write(">")
[0m[32m+ if self.childNodes[0].nodeType != Node.TEXT_NODE:
[0m[32m+ writer.write(newl)
[0m[0m for node in self.childNodes:
[0m[0m node.writexml(writer,indent+addindent,addindent,newl)
[0m[0m writer.write("%s</%s>%s" % (indent,self.tagName,newl))
[0m[1;34m@@ -1061,7 +1063,7 @@
[0m[0m return newText
[0m[0m
[0m[0m def writexml(self, writer, indent="", addindent="", newl=""):
[0m[1;31m-[0m[31m _write_data(writer, [7m[31m"%s%s%s"%(indent, [0m[31mself.data[7m[31m, newl)[0m[31m)
[0m[32m+[0m[32m _write_data(writer, self.data)
[0m[0m
[0m[0m # DOM Level 3 (WD 9 April 2002)
[0m[0m
[0m | {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef AAPT_TRACEBUFFER_H
#define AAPT_TRACEBUFFER_H
#include <string>
#include <vector>
#include <androidfw/StringPiece.h>
namespace aapt {
// Record timestamps for beginning and end of a task and generate systrace json fragments.
// This is an in-process ftrace which has the advantage of being platform independent.
// These methods are NOT thread-safe since aapt2 is not multi-threaded.
// Convenience RIAA object to automatically finish an event when object goes out of scope.
class Trace {
public:
Trace(const std::string& tag);
Trace(const std::string& tag, const std::vector<android::StringPiece>& args);
~Trace();
};
// Manual markers.
void BeginTrace(const std::string& tag);
void EndTrace();
// A master trace is required to flush events to disk. Events are formatted in systrace
// json format.
class FlushTrace {
public:
explicit FlushTrace(const std::string& basepath, const std::string& tag);
explicit FlushTrace(const std::string& basepath, const std::string& tag,
const std::vector<android::StringPiece>& args);
explicit FlushTrace(const std::string& basepath, const std::string& tag,
const std::vector<std::string>& args);
~FlushTrace();
private:
std::string basepath_;
};
#define TRACE_CALL() Trace __t(__func__)
#define TRACE_NAME(tag) Trace __t(tag)
#define TRACE_NAME_ARGS(tag, args) Trace __t(tag, args)
#define TRACE_FLUSH(basename, tag) FlushTrace __t(basename, tag)
#define TRACE_FLUSH_ARGS(basename, tag, args) FlushTrace __t(basename, tag, args)
} // namespace aapt
#endif //AAPT_TRACEBUFFER_H
| {
"pile_set_name": "Github"
} |
# Eventbrite React Testing Best Practices
Guidelines and best practices used by Eventbrite to provide consistency and prevent errors in testing React components. This does not cover testing utility/helper functions (including Redux reducers) that are used in conjunction with React as those follow general testing guidelines.
## Table of Contents
0. [Testing environment](#testing-environment)
0. [Testing philosophy](#testing-philosophy)
0. [Writing a test case](#writing-a-test-case)
0. [Finding nodes](#finding-nodes)
0. [Finding components](#finding-components)
0. [Testing existence](#testing-existence)
0. [Assertion helpers](#assertion-helpers)
0. [Types of renderers](#types-of-renderers)
0. [Testing render](#testing-render)
0. [Testing events](#testing-events)
0. [Testing state](#testing-state)
0. [Testing updated props](#testing-updated-props)
## Testing environment
Eventbrite uses [Jest](http://facebook.github.io/jest/) and [`enzyme`](http://airbnb.io/enzyme/) for unit testing React components. We also leverage [`jest-enzyme`](https://github.com/blainekasten/enzyme-matchers) assertion helpers. Enzyme wraps [`ReactTestUtils`](https://facebook.github.io/react/docs/test-utils.html), which contains a bunch of primitives for testing components.
Don't use `ReactTestUtils` directly; use Enzyme!
**[⬆ back to top](#table-of-contents)**
## Testing philosophy
Unit testing React components can be a little tricky compared to testing the input/output of traditional JavaScript functions. But it's still doable! Just like with "normal" unit testing, we want to test all of the logic within the component via its public interface. The public _input_ to a component is its props. The public _output_ of a component is the combination of the elements it specifically renders (see [Testing render](#testing-render)) as well as the callback handlers it invokes (see [Testing events](#testing-events)). The goal is to render components with various configurations of their props, so that we can assert that what is rendered and what callbacks are invoked is as expected.
**[⬆ back to top](#table-of-contents)**
## Writing a test case
Use [arrow functions](https://www.eventbrite.com/engineering/learning-es6-arrow-functions/) to force functional test cases:
```js
it('does what it is supposed to do', () => {
});
```
Using arrow functions prevents being able to use `beforeEach` & `afterEach` because `this` is now lexically scoped. In the past, data common to each test case was stored on `this` in `beforeEach` (and cleaned up in `afterEach`) so that each individual test case didn't have to generate the data itself. However, `beforeEach` devolved into a dumping ground for _anything_ that _may_ get used by more than one test case. As such way more data was generated than was needed, unnecessarily slowing down test execution.
Instead, factor out helper data generation functions and call them as needed in the test cases:
```js
const generateComponent = (additionalProps={}) => (
<Component {...additionalProps} />
);
it('does what it is supposed to do', () => {
let wrapper = mount(generateComponent());
});
```
**[⬆ back to top](#table-of-contents)**
## Finding nodes
Search for nodes within a component by adding `data-spec` attributes to them. In the past, Eventbrite used special `js-*` CSS classes for references to nodes in JavaScript code. These `js-*` classes were used when testing as well. Now with React testing, instead of using special CSS classes, [refs](https://github.com/eventbrite/javascript/tree/master/react#refs), or attempting to traverse the DOM with Enzyme's [`find`](http://airbnb.io/enzyme/docs/api/ReactWrapper/find.html) helper, we use `data-spec` attributes.
The `data-spec` attribute is specific to testing and not tied to presentation like CSS classes would be. If we decide to rename or remove a CSS class, the tests should not be impacted because there is no implicit link between styles and tests. We leverage a helper, `getSpecWrapper`, to find nodes with the `data-spec` attribute. Suppose we had the following (simplified) generated markup for a `Notification` component:
```html
<div class="notification">
<button class="notification__close" data-spec="notification-close">X</button>
<p class="notification__message">
You have successfully registered for this event!
</p>
<a href="https://www.eventbrite.com/d/" class="notification__more-link" data-spec="notification-more-link">Browse all events</a>
</div>
```
Tests using `getSpecWrapper` would look like:
```js
// good
it('has more link pointing to browse URL when `type` is browse', () => {
let onMoreAction = jest.fn();
let wrapper = mount(<Notification type="browse" onMoreAction={onMoreAction} />);
let moreLinkWrapper = getSpecWrapper(wrapper, 'notification-more-link');
moreLinkWrapper.simulate('click');
expect(onMoreAction).toHaveBeenCalled();
});
// bad (searches by tag name and CSS class)
it('has more link pointing to browse URL when `type` is browse', () => {
let onMoreAction = jest.fn();
let wrapper = mount(<Notification type="browse" onMoreAction={onMoreAction} />);
let moreLinkWrapper = wrapper.find('a.notification__more-link');
moreLinkWrapper.simulate('click');
expect(onMoreAction).toHaveBeenCalled();
});
```
As a reference, here are the implementations for `getSpecWrapper`:
```js
// utils/unitTest.js
export const DATA_SPEC_ATTRIBUTE_NAME = 'data-spec';
/**
* Finds all instances of components in the rendered `componentWrapper` that are DOM components
* with the `data-spec` attribute matching `name`.
* @param {ReactWrapper} componentWrapper - Rendered componentWrapper (result of mount, shallow, or render)
* @param {string} specName - Name of `data-spec` attribute value to find
* @param {string|Function} typeFilter - (Optional) Expected type of the wrappers (defaults to all HTML tags)
* @returns {ReactComponent[]} All matching DOM components
*/
export const getSpecWrapper = (componentWrapper, specName, typeFilter) => {
let specWrappers;
if (!typeFilter) {
specWrappers = componentWrapper.find(`[${DATA_SPEC_ATTRIBUTE_NAME}="${specName}"]`);
} else {
specWrappers = componentWrapper.findWhere((wrapper) => (
wrapper.prop(DATA_SPEC_ATTRIBUTE_NAME) === specName && wrapper.type() === typeFilter
));
}
return specWrappers;
};
```
**[⬆ back to top](#table-of-contents)**
## Finding components
You can find a component simply by using Enzyme's [`find`](http://airbnb.io/enzyme/docs/api/ReactWrapper/find.html) and passing the component class:
```js
it('should render a checked checkbox if it is selected', () => {
let wrapper = mount(<Component isSelected={true} />);
let checkboxWrapper = wrapper.find(Checkbox);
expect(checkboxWrapper).toHaveProp('isChecked', true);
});
```
This works as long as there's only one `Checkbox` rendered within `Component`. If there are multiple `Checkbox` components within `Component`, `checkboxWrapper` would have multiple elements in it. Instead you can add a `data-spec` attribute to the specific `Checkbox` and use `getSpecWrapper`:
```js
// good
it('should render a checked checkbox if it is selected', () => {
let wrapper = mount(<Component isSelected={true} />);
// pass the component class as the third parameter to `getSpecWrapper`
let selectAllCheckboxWrapper = getSpecWrapper(wrapper, 'component-selectAll', Checkbox);
expect(selectAllCheckboxWrapper).toHaveProp('isChecked', true);
});
// bad (finds the appropriate Checkbox based on source order)
it('should render a checked checkbox if it is selected', () => {
let wrapper = mount(<Component isSelected={true} />);
let selectAllCheckboxWrapper = wrapper.find(Checkbox).at(2);
expect(selectAllCheckboxWrapper).toHaveProp('isChecked', true);
});
```
The key in the "good" example is the third parameter passed to `getSpecWrapper`. By default `getSpecWrapper` will try to find a node with the specified `data-spec`. But if you specify the component class (`Checkbox` in this case), it'll return a reference to the component wrapper.
**[⬆ back to top](#table-of-contents)**
## Testing existence
### Testing node existence
To [find nodes](#finding-nodes) you use the `getSpecWrapper` helper and use the `jest-enzyme` [`.toBePresent`](https://github.com/blainekasten/enzyme-matchers#tobepresent) and [`.toBeEmpty`](https://github.com/blainekasten/enzyme-matchers#tobeempty) assertion matchers:
```js
let wrapper = mount(<Spinner />);
// assert that node exists (doesn't throw an Error)
expect(wrapper).toBePresent();
// assert that node doesn't exist (throws an Error)
expect(wrapper).toBeEmpty();
```
**[⬆ back to top](#table-of-contents)**
### Testing component existence
Typically, you'll [find components](#finding-components) by using Enzyme's `find` method which returns an an Enzyme [`ReactWrapper`](https://github.com/airbnb/enzyme/tree/master/docs/api/ReactWrapper) and the `jest-enzyme` [`.toBePresent`](https://github.com/blainekasten/enzyme-matchers#tobepresent) and [`.toBeEmpty`](https://github.com/blainekasten/enzyme-matchers#tobeempty) assertion matchers:
```js
let wrapper = mount(<Select values={dummyValues} />);
let selectOptionWrappers = wrapper.find(SelectOption);
// assert that there are no found nodes
expect(selectOptionWrappers).toBeEmpty();
// assert that there are more than zero found nodes
expect(selectOptionWrappers).toBePresent();
// assert there to be a specific number of found nodes
expect(selectOptionWrappers).toHaveLength(dummyValues.length);
```
**[⬆ back to top](#table-of-contents)**
## Assertion helpers
Whenever possible, use `jest-enzyme` assertion helpers in favor of the normal assertion helpers that just come with `jest`:
```js
// good (leverages `.prop` from `jest-enzyme`)
it('should render a checked checkbox if it is selected', () => {
let wrapper = mount(<Component isSelected={true} />);
let checkboxWrapper = wrapper.find(Checkbox);
expect(checkboxWrapper).toHaveProp('isChecked', true);
});
// bad (just uses `enzyme` with vanilla `jest`)
it('should render a checked checkbox if it is selected', () => {
let wrapper = mount(<Component isSelected={true} />);
let checkboxWrapper = wrapper.find(Checkbox);
expect(checkboxWrapper.prop('isChecked')).toBe(true);
});
```
Functionally the "good" and "bad" assertions are the same. The assertions will both pass when the `isChecked` prop is `true` and both fail when it's `false`. The difference is in the reported error when they fail.
When the "good" assertion (using `jest-enzyme`'s [`.toHaveProp`](https://github.com/blainekasten/enzyme-matchers#tobeempty) helper) fails, you'll receive an error such as:
```console
AssertionError: expected the node in <div /> to have a 'isChecked' prop with the value true, but the value was false
```
However, when the "bad" assertion fails, you'll receive a more cryptic (and less helpful) error such as:
```console
AssertionError: expected false to equal true
```
The "good" example has significantly more context and should be significantly more helpful when looking through failed test logs.
**[⬆ back to top](#table-of-contents)**
## Types of renderers
Enzyme provides three types of renderers for testing React components:
- [`mount`](http://airbnb.io/enzyme/docs/api/mount.html) - for components that may interact with DOM APIs, or may require the full lifecycle in order to fully test the component (i.e., `componentDidMount` etc.)
- [`shallow`](http://airbnb.io/enzyme/docs/api/shallow.html) - performant renderer because it renders only single level of children (no descendants of those children) in order to ensure that tests aren't indirectly asserting on behavior of child components
- [`render`](http://airbnb.io/enzyme/docs/api/render.html) - renders the components to traversable static HTML markup
Eventbrite uses `mount` for rendering **all** components when testing.
For components that just render markup ([atoms](http://bradfrost.com/blog/post/atomic-web-design/#atoms) in atomic web design), rendering with `mount` makes the most sense because they are the most likely to access the DOM API. Shallow rendering (via `shallow`) would be of little to no use.
For components that are a mix of markup and small components ([molecules](http://bradfrost.com/blog/post/atomic-web-design/#molecules) in atomic web design), rendering with `mount` also makes the most sense because of all the markup that still exists. It's simpler to stay consistent without the test file and use `mount` for all tests.
For components that are basically a composite of other components ([organisms](http://bradfrost.com/blog/post/atomic-web-design/#organisms) in atomic web design), we would ideally render with `shallow` because you're basically just testing that that the child components are receiving the correct props. Furthermore, it's faster to just render one level than render the entire markup tree, especially when the component is big. But in practice we make heavy use of [helper components](README.md#helper-components) in order to keep `render()` lean. As a result, what ends up being shallow rendered is not the actual child component, but an intermediary helper component. This means that if you wrote a test using `shallow` and then refactored the code to use helper components, your tests will break when the resultant render is actually still the same. Because of this nuance of when and where `shallow` can work, we've chosen to opt for `mount` because it always works. The trade-off is performance, which for now hasn't been a big enough issue.
**[⬆ back to top](#table-of-contents)**
## Testing render
When testing what a React component is rendering, _only test what the component itself renders_. Therefore if a parent component renders child components, such as a `TextFormField` component rendering `Label` & `TextInput` components, only test that the parent renders those child components and passes the appropriate props to them. **Do not** test the markup rendered by the child components because the tests for that child component will cover that.
```js
// good
it('displays label when `labelText` is specified', () => {
let wrapper = mount(<TextFormField labelText="Name" />);
let labelWrapper = wrapper.find(Label);
// assert that when `labelText` is specified
// the Label component is rendered
expect(labelWrapper).toBePresent();
// assuming that `labelText` gets passed like:
// <Label>{labelText}</Label>
// asserts that it's properly passed
expect(labelWrapper).toHaveProp('children', 'Name');
});
// bad (assumes the markup the child component is rendering)
it('displays label when `labelText` is specified', () => {
let wrapper = mount(<TextFormField labelText="Name" />);
expect(labelWrapper).toBePresent();
expect(labelWrapper).toHaveText('Name');
});
```
The "bad" example assumes that the `Label` component is rendering a `<label>` tag, but the `TextFormField` component shouldn't really know or care what `Label` renders. It treats `Label` as a black box in its implementation so the test should do the same. Imagine if the `Label` component changed to render a `<div>` instead of a `<label>`. All of the tests for components using a `Label` component would now unnecessarily fail. On the other hand, the "good" example tests that the `TextFormField` properly renders the `<Label>` component and that the `labelText` prop is passed as its content (the `children` prop).
The easiest way to test HTML elements and their attributes, is to use [Jest snapshots](http://facebook.github.io/jest/docs/snapshot-testing.html):
```js
// good
it('includes the disabled CSS class when `isDisabled` is `true`', () => {
let wrapper = mount(<Spinner isDisabled={true} />);
// assert that the current render matches the saved snapshot
expect(wrapper).toMatchSnapshot();
});
```
While snapshot testing is very simple, that simplicity comes at a cost. The initial snapshot file is generated the first time the test is run, so you need to _visually_ inspect that the generated snapshot is correct, otherwise you could be saving a bad test case. Furthermore, the snapshot does not convey the intent of the test so you need to have a very verbose/descriptive test case title (the `it()`).
Also because we use [`mount`](#types-of-renderers) for rendering, the **entire** component tree is in the snapshot, including any helper components, higher-order components, etc. The larger the component, the larger a snapshot will be. For _atoms_, you can use snapshots liberally because atoms are exclusively markup and are small. _Organisms_ are generally large components composed of several molecules and other smaller organisms; the component itself has very little markup making the snapshots bloated not very meaningful. As such, you should use snapshot testing sparingly and instead test that child components are rendered and get the appropriate props. _Molecules_ are somewhere in between and you should use your best judgment as to when to use snapshot testing.
Lastly, since snapshot files are saved to disk, running the tests are slower than traditional means of unit testing.
**[⬆ back to top](#table-of-contents)**
## Testing events
As mentioned in our [Testing philosophy](#testing-philosophy), part of the output of your component are the callback handlers it invokes. These event callbacks are functions passed as props to your component and need to be tested.
Test event callbacks by triggering the events that in turn will invoke the callback handler. The type of event triggered depends on whether the component contains HTML markup or child components.
### Testing events triggered by DOM
If you are testing an event callback that is triggered by a DOM event (such as `onClick` of an `<button>` node), you will need to simulate that DOM event. You will also need to stub the event callback prop to assert that it is being called with the correct arguments.
Let's say that there is a `TextInput` component that wraps an `<input type="text" />` DOM node. The `TextInput` has an `onChange` prop that gets called whenever the input field value changes. The `onChange` prop is also called with the current value that's in the input field. The test case would be set up like:
```js
it('properly fires `onChange` when input changes', () => {
let onChange = jest.fn();
// pass mock function to component as `onChange` prop
let wrapper = mount(<TextInput onChange={onChange} />);
let inputWrapper = getSpecWrapper(wrapper, 'text-input');
let inputValue = 'Here is a value';
// Create a fake event with the properties needed by the component
let mockEvent = {
target: {
value: inputValue
}
};
// simulate onChange event on input DOM
inputWrapper.simulate('change', mockEvent);
// assert that the stubbed function was called with the
// expected value
expect(onChange).toHaveBeenCalledWith(inputValue);
});
```
The test case above uses [`jest.fn()`](http://facebook.github.io/jest/docs/mock-function-api.html) to create a mock function. The mock is passed as the `TextInput` component's `onChange` prop so that we can make assertions on it at the end. After [finding a reference](#finding-nodes) to the input field, we simulate a fake `onChange` DOM event on the input field (using Enzyme's [`.simulate`](https://github.com/airbnb/enzyme/blob/master/docs/api/ReactWrapper/simulate.md) helper). Because the `TextInput` implementation expects to read `e.target.value` from an actual DOM event when it's running the browser, we have to mock that event with an object of the same structure. We don't need a full mock DOM event; we only need to mock what the code is actually calling.
Simulating the fake event on the input field will ultimately call our `onChange` with its current value. Therefore, our assertion is that `onChange` was not only called, but also called with the expected input value. This assertion leverages the `.toHaveBeenCalledWith` assertion helper from `jest-enzyme`.
**[⬆ back to top](#table-of-contents)**
### Testing events triggered by child components
More than likely instead of your component adding event handlers directly to DOM nodes, it will be adding handlers to child components. Therefore instead of simulating a DOM event, simulate the child component's event handler being invoked.
Let's say you have an `AutocompleteField` component that has a child `TextInput`. The `AutocompleteField` has an `onChange` prop that is invoked whenever its child `TextInput`'s `onChange` event is invoked. The `AutocompleteField`'s `onChange` prop also passes the current input value. The test case would be set up like:
```js
it('properly fires `onChange` when input changes', () => {
let onChange = jest.fn();
// pass stubbed function to component as `onChange` prop
let wrapper = mount(<AutocompleteField suggestions={[]} onChange={onChange} />);
let textInputWrapper = wrapper.find(TextInput);
let inputValue = 'Here is a value';
// We don't want to make any assumptions about the markup of `TextInput`. The
// `AutocompleteField` component handles `onChange` of `TextInput`, so all we need to
// do is call the prop directly like `TextInput` would and ensure we get the appropriate
// value
textInputWrapper.prop('onChange')(inputValue);
// assert that the stubbed function was called with the
// expected value
expect(onChange).toHaveBeenCalledWith(inputValue);
});
```
The test case above uses [`jest.fn()`](http://facebook.github.io/jest/docs/mock-function-api.html) to create a mock function. The mock is passed as the `AutocompleteField` component's `onChange` prop so that we can make assertions on it at the end. After [finding a reference](#finding-components) to the `TextInput`, we simulate how `TextInput` would invoke its `onChange` callback prop. We get a reference to the prop using Enzyme's [`.prop`](https://github.com/airbnb/enzyme/blob/master/docs/api/ReactWrapper/prop.md) helper and call the function with the `inputValue`. This exactly how `TextInput` would call it when its DOM input field changes. However, because we don't want to make any assumptions about the markup of `TextInput` we simulate its `onChange` prop instead of digging into it in order to simulate its DOM.
Invoking the `onChange` prop will ultimately call our `onChange` with the value. Therefore, our assertion is that `onChange` was not only called, but also called with the expected input value. This assertion leverages the `.toHaveBeenCalledWith` assertion helper from `jest-enzyme`.
**[⬆ back to top](#table-of-contents)**
## Testing state
Although `jest-enzyme` provides a [`.toHaveState()`](https://github.com/blainekasten/enzyme-matchers#tohavestate) helper method for asserting component state, it shouldn't be used in tests because the component's state is internal (and shouldn't be tested). Based on our [testing philosophy](#testing-philosophy), we only want to test the public API of the component.
When a component's state changes, the component is re-rendered, resulting in a change in markup. By testing only the changed markup (part of the component's public output), instead of the component's internal state, we can refactor the component's internals and have all of our test cases still pass. In sum, our test cases are a little less fragile.
Let's say for instance we had a component that has a `Checkbox` child component that toggles the component between inactive and active states. The active state is publicly represented by an `isActive` class added to the root DOM node. The test case could look something like:
```js
// good (tests internal state *indirectly* via re-rendered markup)
it('toggles active state when checkbox is toggled', () => {
let wrapper = mount(<Component />);
let checkboxWrapper = wrapper.find(Checkbox);
// first assert that by default the active class is *not* present
expect(wrapper).toMatchSnapshot();
// simulate toggling the checkbox on by calling its
// onChange callback handler passing `true` for
// checked state
checkboxWrapper.prop('onChange')(true);
// now assert that the active class *is* present
expect(wrapper).toMatchSnapshot();
// simulate toggling the checkbox back off
checkboxWrapper.prop('onChange')(false);
// finally assert once again that active class is *not*
// present
expect(wrapper).toMatchSnapshot();
});
// bad (tests internal state directly)
it('toggles active state when checkbox is toggled', () => {
let wrapper = mount(<Component />);
let checkboxWrapper = wrapper.find(Checkbox);
// assert that component's `isActive` internal state is
// initially false
expect(wrapper).toHaveState('isActive', false);
// simulate toggling the checkbox on by calling its
// onChange callback handler passing `true` for
// checked state
checkboxWrapper.prop('onChange')(true);
// now assert that the `isActive` internal state is
// true
expect(wrapper).toHaveState('isActive', true);
// simulate toggling the checkbox back off
checkboxWrapper.prop('onChange')(false);
// finally assert once again that `isActive` internal
// state is false
expect(wrapper).toHaveState('isActive', false);
});
```
Both the "good" and "bad" test cases are basically the same. The only difference is what is asserted. Ultimately, what we care about is that the root node has the appropriate CSS class; the changing of the internal `isActive` state just happens to be the mechanism that we accomplish it. This is what makes the "good" example better.
See [Testing events triggered by child components](#testing-events-triggered-by-child-components) for more on simulating child component events.
**[⬆ back to top](#table-of-contents)**
## Testing updated props
Typically components are stateless, meaning that what is rendered by the component is 100% based upon the props that are based in. In these cases creating a component with initial props when [testing render](#testing-render) and [testing events](#testing-events) as explained above should suffice. There shouldn't be a need to test the re-render of a component receiving new props.
However, when a component leverages internal state and its props are changed, what will be rendered will be based on a combination of those updated props and the existing state. In this case, test that the new markup is as it should be, indirectly verifying that the updated prop(s) either have or have not overridden the existing state.
Let's say we have a `TextInput` component. It has `initialValue` & `value` props (among many others). The `initialValue` prop will initialize the `TextInput` component's underlying `<input>` node's value, but won't override the node if the prop is later updated. However, the `value` prop will both initialize the `<input>` as well as override its value.
To test the `initialValue` prop behavior:
```js
it('does NOT allow `initialValue` to override existing <input> value', () => {
let initialValue = 'react';
let newValue = 'enzyme';
let wrapper = mount(<TextInput initialValue={initialValue} />);
// ensure that the `initialValue` is properly reflected
// by checking the <input> node
expect(wrapper).toMatchSnapshot();
// update the TextInput's props
wrapper.setProps({initialValue: newValue});
// ensure that the <input> node's value hasn't changed
expect(wrapper).toMatchSnapshot();
});
```
To test the `value` prop behavior:
```js
it('DOES allow `value` to override existing <input> value', () => {
let initialValue = 'react';
let newValue = 'enzyme';
let wrapper = mount(<TextInput initialValue={initialValue} />);
// ensure that the `initialValue` is properly reflected
// by checking the <input> node
expect(wrapper).toMatchSnapshot();
// update the TextInput's props
wrapper.setProps({value: newValue});
// ensure that the <input> node's value has changed
expect(wrapper).toMatchSnapshot();
});
```
The key to passing new props to the existing `TextInput` component is the [`setProps`](https://github.com/airbnb/enzyme/blob/master/docs/api/mount.md#setpropsnextprops--reactwrapper) helper method. It will cause a re-render, which will allow us to assert that the new markup is as it should be.
**[⬆ back to top](#table-of-contents)**
| {
"pile_set_name": "Github"
} |
<?php
namespace Royalcms\Component\DirectoryHasher\Hasher;
/**
* Hasher which generates SHA1-Hashes of the files
*/
class SHA1 extends HasherAbstract
{
/**
* {@inheritdoc}
*/
public function getHashsForFile($file) {
return array('sha1' => sha1_file($file));
}
}
| {
"pile_set_name": "Github"
} |
(module "TFML-110-02-L-D"
(layer F.Cu)
(tedit 56B15E0A)
(pad 1 smd rect
(at -5.7150 1.7150)
(size 0.7400 2.9200)
(layers F.Cu F.Mask F.Paste))
(pad 2 smd rect
(at -5.7150 -1.7150)
(size 0.7400 2.9200)
(layers F.Cu F.Mask F.Paste))
(pad 3 smd rect
(at -4.4450 1.7150)
(size 0.7400 2.9200)
(layers F.Cu F.Mask F.Paste))
(pad 4 smd rect
(at -4.4450 -1.7150)
(size 0.7400 2.9200)
(layers F.Cu F.Mask F.Paste))
(pad 5 smd rect
(at -3.1750 1.7150)
(size 0.7400 2.9200)
(layers F.Cu F.Mask F.Paste))
(pad 6 smd rect
(at -3.1750 -1.7150)
(size 0.7400 2.9200)
(layers F.Cu F.Mask F.Paste))
(pad 7 smd rect
(at -1.9050 1.7150)
(size 0.7400 2.9200)
(layers F.Cu F.Mask F.Paste))
(pad 8 smd rect
(at -1.9050 -1.7150)
(size 0.7400 2.9200)
(layers F.Cu F.Mask F.Paste))
(pad 9 smd rect
(at -0.6350 1.7150)
(size 0.7400 2.9200)
(layers F.Cu F.Mask F.Paste))
(pad 10 smd rect
(at -0.6350 -1.7150)
(size 0.7400 2.9200)
(layers F.Cu F.Mask F.Paste))
(pad 11 smd rect
(at 0.6350 1.7150)
(size 0.7400 2.9200)
(layers F.Cu F.Mask F.Paste))
(pad 12 smd rect
(at 0.6350 -1.7150)
(size 0.7400 2.9200)
(layers F.Cu F.Mask F.Paste))
(pad 13 smd rect
(at 1.9050 1.7150)
(size 0.7400 2.9200)
(layers F.Cu F.Mask F.Paste))
(pad 14 smd rect
(at 1.9050 -1.7150)
(size 0.7400 2.9200)
(layers F.Cu F.Mask F.Paste))
(pad 15 smd rect
(at 3.1750 1.7150)
(size 0.7400 2.9200)
(layers F.Cu F.Mask F.Paste))
(pad 16 smd rect
(at 3.1750 -1.7150)
(size 0.7400 2.9200)
(layers F.Cu F.Mask F.Paste))
(pad 17 smd rect
(at 4.4450 1.7150)
(size 0.7400 2.9200)
(layers F.Cu F.Mask F.Paste))
(pad 18 smd rect
(at 4.4450 -1.7150)
(size 0.7400 2.9200)
(layers F.Cu F.Mask F.Paste))
(pad 19 smd rect
(at 5.7150 1.7150)
(size 0.7400 2.9200)
(layers F.Cu F.Mask F.Paste))
(pad 20 smd rect
(at 5.7150 -1.7150)
(size 0.7400 2.9200)
(layers F.Cu F.Mask F.Paste))
(fp_line
(start -7.9400 -2.8600)
(end 7.9400 -2.8600)
(layer F.Fab)
(width 0.0100))
(fp_line
(start 7.9400 -2.8600)
(end 7.9400 2.8600)
(layer F.Fab)
(width 0.0100))
(fp_line
(start 7.9400 2.8600)
(end -7.9400 2.8600)
(layer F.Fab)
(width 0.0100))
(fp_line
(start -7.9400 2.8600)
(end -7.9400 -2.8600)
(layer F.Fab)
(width 0.0100))
(fp_line
(start -8.6000 -3.4500)
(end 8.6000 -3.4500)
(layer F.CrtYd)
(width 0.0100))
(fp_line
(start 8.6000 -3.4500)
(end 8.6000 3.4500)
(layer F.CrtYd)
(width 0.0100))
(fp_line
(start 8.6000 3.4500)
(end -8.6000 3.4500)
(layer F.CrtYd)
(width 0.0100))
(fp_line
(start -8.6000 3.4500)
(end -8.6000 -3.4500)
(layer F.CrtYd)
(width 0.0100))
(fp_line
(start -6.3650 -2.7850)
(end -7.8650 -2.7850)
(layer F.SilkS)
(width 0.1500))
(fp_line
(start -7.8650 -2.7850)
(end -7.8650 2.7850)
(layer F.SilkS)
(width 0.1500))
(fp_line
(start -7.8650 2.7850)
(end -6.3650 2.7850)
(layer F.SilkS)
(width 0.1500))
(fp_line
(start 6.3650 2.7850)
(end 7.8650 1.2850)
(layer F.SilkS)
(width 0.1500))
(fp_line
(start 7.8650 1.2850)
(end 7.8650 -1.2850)
(layer F.SilkS)
(width 0.1500))
(fp_line
(start 7.8650 -1.2850)
(end 6.3650 -2.7850)
(layer F.SilkS)
(width 0.1500))
(fp_text reference REF**
(at 0 -4.1250)
(layer F.Fab)
(effects
(font
(size 1.0000 1.0000)
(thickness 0.1500))))
(fp_text value "TFML-110-02-L-D"
(at 0 4.1250)
(layer F.Fab)
(effects
(font
(size 1.0000 1.0000)
(thickness 0.1500))))) | {
"pile_set_name": "Github"
} |
// RUN: %clang_cc1 -x objective-c -fsyntax-only -verify -Wno-objc-root-class %s
// RUN: %clang_cc1 -x objective-c++ -fsyntax-only -verify -Wno-objc-root-class %s
// rdar://10593227
@class UIWindow;
@interface CNAppDelegate
@property (strong, nonatomic) UIWindow *window;
@end
@interface CNAppDelegate ()
@property (nonatomic,retain) id foo;
@end
@implementation CNAppDelegate
@synthesize foo;
@synthesize window = _window;
+(void)myClassMethod;
{
foo = 0; // expected-error {{instance variable 'foo' accessed in class method}}
}
@end
| {
"pile_set_name": "Github"
} |
import { check } from 'meteor/check';
const ANNOTATION_TYPE_TEXT = 'text';
const ANNOTATION_TYPE_PENCIL = 'pencil';
const DEFAULT_TEXT_WIDTH = 30;
const DEFAULT_TEXT_HEIGHT = 20;
// line, triangle, ellipse, rectangle
function handleCommonAnnotation(meetingId, whiteboardId, userId, annotation) {
const {
id, status, annotationType, annotationInfo, wbId, position,
} = annotation;
const selector = {
meetingId,
id,
userId,
};
const modifier = {
$set: {
whiteboardId,
meetingId,
id,
status,
annotationType,
annotationInfo,
wbId,
},
$setOnInsert: {
position,
},
$inc: { version: 1 },
};
return { selector, modifier };
}
function handleTextUpdate(meetingId, whiteboardId, userId, annotation) {
const {
id, status, annotationType, annotationInfo, wbId, position,
} = annotation;
const { textBoxWidth, textBoxHeight, calcedFontSize } = annotationInfo;
const useDefaultSize = (textBoxWidth === 0 && textBoxHeight === 0)
|| textBoxWidth < calcedFontSize
|| textBoxHeight < calcedFontSize;
if (useDefaultSize) {
annotationInfo.textBoxWidth = DEFAULT_TEXT_WIDTH;
annotationInfo.textBoxHeight = DEFAULT_TEXT_HEIGHT;
if (100 - annotationInfo.x < DEFAULT_TEXT_WIDTH) {
annotationInfo.textBoxWidth = 100 - annotationInfo.x;
}
if (100 - annotationInfo.y < DEFAULT_TEXT_HEIGHT) {
annotationInfo.textBoxHeight = 100 - annotationInfo.y;
}
}
const selector = {
meetingId,
id,
userId,
};
annotationInfo.text = annotationInfo.text.replace(/[\r]/g, '\n');
const modifier = {
$set: {
whiteboardId,
meetingId,
id,
status,
annotationType,
annotationInfo,
wbId,
},
$setOnInsert: {
position,
},
$inc: { version: 1 },
};
return { selector, modifier };
}
function handlePencilUpdate(meetingId, whiteboardId, userId, annotation) {
const DRAW_START = 'DRAW_START';
const DRAW_UPDATE = 'DRAW_UPDATE';
const DRAW_END = 'DRAW_END';
const {
id, status, annotationType, annotationInfo, wbId, position,
} = annotation;
const baseSelector = {
meetingId,
id,
userId,
whiteboardId,
};
let baseModifier;
switch (status) {
case DRAW_START:
// on start we split the points
// create the 'pencil_base'
// TODO: find and removed unused props (chunks, version, etc)
baseModifier = {
$set: {
id,
userId,
meetingId,
whiteboardId,
position,
status,
annotationType,
annotationInfo,
wbId,
version: 1,
},
};
break;
case DRAW_UPDATE:
baseModifier = {
$push: {
'annotationInfo.points': { $each: annotationInfo.points },
},
$set: {
status,
},
$inc: { version: 1 },
};
break;
case DRAW_END:
// Updating the main pencil object with the final info
baseModifier = {
$set: {
whiteboardId,
meetingId,
id,
status,
annotationType,
annotationInfo,
wbId,
position,
},
$inc: { version: 1 },
};
break;
default:
break;
}
return { selector: baseSelector, modifier: baseModifier };
}
export default function addAnnotation(meetingId, whiteboardId, userId, annotation) {
check(meetingId, String);
check(whiteboardId, String);
check(annotation, Object);
switch (annotation.annotationType) {
case ANNOTATION_TYPE_TEXT:
return handleTextUpdate(meetingId, whiteboardId, userId, annotation);
case ANNOTATION_TYPE_PENCIL:
return handlePencilUpdate(meetingId, whiteboardId, userId, annotation);
default:
return handleCommonAnnotation(meetingId, whiteboardId, userId, annotation);
}
}
| {
"pile_set_name": "Github"
} |
//
// DLWMMenu.m
// DLWidgetMenu
//
// Created by Vincent Esche on 05/11/13.
// Copyright (c) 2013 Vincent Esche. All rights reserved.
//
#import "DLWMMenu.h"
#import "DLWMMenuItem.h"
#import "DLWMMenuAnimator.h"
#import "DLWMSpringMenuAnimator.h"
const CGFloat DLWMFullCircle = M_PI * 2;
NSString * const DLWMMenuLayoutChangedNotification = @"DLWMMenuLayoutChangedNotification";
@interface DLWMMenu ()
@property (readwrite, assign, nonatomic) CGPoint centerPointWhileOpen;
@property (readwrite, strong, nonatomic) DLWMMenuItem *mainItem;
@property (readwrite, strong, nonatomic) NSArray *items;
@property (readwrite, strong, nonatomic) NSTimer *timer;
@end
@implementation DLWMMenu
- (id)initWithFrame:(CGRect)frame {
self = [super initWithFrame:frame];
if (self) {
if (![self commonInit_DLWMMenu]) {
return nil;
}
}
return self;
}
- (id)initWithCoder:(NSCoder *)decoder {
self = [super initWithCoder:decoder];
if (self) {
if (![self commonInit_DLWMMenu]) {
return nil;
}
}
return self;
}
- (BOOL)commonInit_DLWMMenu {
self.items = [NSMutableArray array];
self.enabled = YES;
DLWMMenuAnimator * openAnimator = nil;
if (floor(NSFoundationVersionNumber) <= NSFoundationVersionNumber_iOS_6_1) {
openAnimator = [DLWMMenuAnimator new];
} else {
openAnimator = [DLWMSpringMenuAnimator new];
}
self.openAnimator = openAnimator;
self.closeAnimator = [[DLWMMenuAnimator alloc] init];
self.openAnimationDelayBetweenItems = 0.025;
self.closeAnimationDelayBetweenItems = 0.025;
UITapGestureRecognizer *singleTapRecognizer = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(receivedSingleTapOutside:)];
[self addGestureRecognizer:singleTapRecognizer];
return YES;
}
- (id)initWithMainItemView:(UIView *)mainItemView
dataSource:(id<DLWMMenuDataSource>)dataSource
itemSource:(id<DLWMMenuItemSource>)itemSource
delegate:(id<DLWMMenuDelegate>)delegate
itemDelegate:(id<DLWMMenuItemDelegate>)itemDelegate
layout:(id<DLWMMenuLayout>)layout
representedObject:(id)representedObject {
self = [self initWithFrame:mainItemView.frame];
if (self) {
NSAssert(dataSource, @"Method argument 'dataSource' must not be nil.");
NSAssert(itemSource, @"Method argument 'itemSource' must not be nil.");
NSAssert(layout, @"Method argument 'layout' must not be nil.");
self.state = DLWMMenuStateClosed;
self.representedObject = representedObject;
self.centerPointWhileOpen = mainItemView.center;
self.mainItem = [[DLWMMenuItem alloc] initWithContentView:mainItemView representedObject:representedObject];
self.dataSource = dataSource;
self.itemSource = itemSource;
self.delegate = delegate;
self.itemDelegate = itemDelegate;
self.layout = layout;
[self reloadData];
[self adjustGeometryForState:DLWMMenuStateClosed];
}
return self;
}
- (void)adjustGeometryForState:(DLWMMenuState)state {
CGPoint itemLocation;
if (state == DLWMMenuStateClosed) {
CGRect itemBounds = self.mainItem.bounds;
itemLocation = CGPointMake(CGRectGetMidX(itemBounds), CGRectGetMidY(itemBounds));
CGPoint menuCenter = self.mainItem.center;
self.bounds = itemBounds;
self.center = menuCenter;
} else {
CGRect menuFrame = self.superview.bounds;
itemLocation = self.center;
self.frame = menuFrame;
}
self.mainItem.center = itemLocation;
self.mainItem.layoutLocation = itemLocation;
self.centerPointWhileOpen = itemLocation;
}
#pragma mark - Custom Accessors
- (void)setMainItem:(DLWMMenuItem *)mainItem {
NSAssert(mainItem, @"Method argument 'mainItem' must not be nil.");
if (_mainItem) {
[_mainItem removeFromSuperview];
[self removeGestureRecognizersFromMenuItem:_mainItem];
}
_mainItem = mainItem;
[self addGestureRecognizersToMenuItem:mainItem];
mainItem.userInteractionEnabled = YES;
mainItem.center = self.centerPointWhileOpen;
[self addSubview:mainItem];
}
- (void)setDataSource:(id<DLWMMenuDataSource>)dataSource {
NSAssert(dataSource, @"Method argument 'dataSource' must not be nil.");
_dataSource = dataSource;
[self reloadData];
}
- (void)setItemSource:(id<DLWMMenuItemSource>)itemSource {
NSAssert(itemSource, @"Method argument 'itemSource' must not be nil.");
_itemSource = itemSource;
[self reloadData];
}
- (void)setLayout:(id<DLWMMenuLayout>)layout {
NSAssert(layout, @"Method argument 'layout' must not be nil.");
if (_layout) {
[[NSNotificationCenter defaultCenter] removeObserver:self name:DLWMMenuLayoutChangedNotification object:_layout];
}
_layout = layout;
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(layoutDidChange:) name:DLWMMenuLayoutChangedNotification object:_layout];
[UIView animateWithDuration:0.5 delay:0.0 options:UIViewAnimationOptionCurveEaseInOut animations:^{
[self layoutItemsWithCenter:self.centerPointWhileOpen animated:YES];
} completion:nil];
}
- (void)setEnabled:(BOOL)enabled {
[self setEnabled:enabled animated:YES];
}
- (void)setEnabled:(BOOL)enabled animated:(BOOL)animated {
BOOL oldEnabled = _enabled;
[self willChangeValueForKey:NSStringFromSelector(@selector(enabled))];
_enabled = enabled;
[self didChangeValueForKey:NSStringFromSelector(@selector(enabled))];
if (enabled != oldEnabled) {
NSTimeInterval duration = (animated) ? 0.5 : 0.0;
[UIView animateWithDuration:duration animations:^{
self.alpha = (enabled) ? 1.0 : 0.33;
}];
}
}
- (void)setDebuggingEnabled:(BOOL)debuggingEnabled {
_debuggingEnabled = debuggingEnabled;
self.backgroundColor = (debuggingEnabled) ? [[UIColor redColor] colorWithAlphaComponent:0.5] : nil;
}
#pragma mark - Observing
- (void)layoutDidChange:(NSNotification *)notification {
[self layoutItemsWithCenter:self.centerPointWhileOpen animated:YES];
}
#pragma mark - Reloading
- (void)reloadData {
id<DLWMMenuDataSource> dataSource = self.dataSource;
id<DLWMMenuItemSource> itemSource = self.itemSource;
NSUInteger itemCount = [dataSource numberOfObjectsInMenu:self];
NSUInteger currentItemCount = self.items.count;
NSUInteger minCount = MIN(itemCount, currentItemCount);
// Remove all items not needed any more:
if (itemCount < currentItemCount) {
for (NSUInteger i = 0; i < currentItemCount - itemCount; i++) {
[self removeLastItem];
}
}
// Update existing items:
NSIndexSet *indexes = [NSIndexSet indexSetWithIndexesInRange:NSMakeRange(0, minCount)];
[self.items enumerateObjectsAtIndexes:indexes options:0 usingBlock:^(DLWMMenuItem *item, NSUInteger index, BOOL *stop) {
id object = [dataSource objectAtIndex:index inMenu:self];
UIView *contentView = [itemSource viewForObject:object atIndex:index inMenu:self];
item.contentView = contentView;
item.representedObject = object;
}];
// Add all additional items:
if (itemCount > currentItemCount) {
for (NSUInteger i = currentItemCount; i < itemCount; i++) {
id object = [dataSource objectAtIndex:i inMenu:self];
UIView *contentView = [itemSource viewForObject:object atIndex:i inMenu:self];
DLWMMenuItem *item = [[DLWMMenuItem alloc] initWithContentView:contentView representedObject:object];
item.layoutLocation = [self isClosed] ? self.center : self.centerPointWhileOpen;
[self addItem:item];
}
}
[self layoutItemsWithCenter:self.centerPointWhileOpen animated:YES];
}
#pragma mark - Opening/Closing
- (void)open {
[self openAnimated:YES];
}
- (void)openAnimated:(BOOL)animated {
if ([self isOpenedOrOpening]) {
return;
}
[self.timer invalidate];
self.timer = nil;
self.state = DLWMMenuStateOpening;
[self adjustGeometryForState:self.state];
NSArray *items = self.items;
DLWMMenuAnimator *animator = self.openAnimator ?: [DLWMMenuAnimator sharedInstantAnimator];
NSTimeInterval openAnimationDelayBetweenItems = self.openAnimationDelayBetweenItems;
NSTimeInterval totalDuration = (items.count - 1) * openAnimationDelayBetweenItems + animator.duration;
if ([self.delegate respondsToSelector:@selector(willOpenMenu:withDuration:)]) {
[self.delegate willOpenMenu:self withDuration:totalDuration];
}
[self.layout layoutItems:items forCenterPoint:self.centerPointWhileOpen inMenu:self];
[items enumerateObjectsUsingBlock:^(DLWMMenuItem *item, NSUInteger index, BOOL *stop) {
double delayInSeconds = openAnimationDelayBetweenItems * index;
dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(delayInSeconds * NSEC_PER_SEC));
dispatch_after(popTime, dispatch_get_main_queue(), ^(void){
if ([self.itemDelegate respondsToSelector:@selector(willOpenItem:inMenu:withDuration:)]) {
[self.itemDelegate willOpenItem:item inMenu:self withDuration:animator.duration];
}
[animator animateItem:item atIndex:index inMenu:self animated:animated completion:^(DLWMMenuItem *menuItem, NSUInteger itemIndex, DLWMMenu *menu, BOOL finished) {
if ([self.itemDelegate respondsToSelector:@selector(didOpenItem:inMenu:withDuration:)]) {
[self.itemDelegate didOpenItem:menuItem inMenu:self withDuration:animator.duration];
}
}];
});
}];
self.timer = [NSTimer scheduledTimerWithTimeInterval:totalDuration
target:self
selector:@selector(handleDidOpenMenu:)
userInfo:nil
repeats:NO];
}
- (void)handleDidOpenMenu:(NSTimer *)timer {
self.timer = nil;
self.state = DLWMMenuStateOpened;
if ([self.delegate respondsToSelector:@selector(didOpenMenu:)]) {
[self.delegate didOpenMenu:self];
}
}
- (void)close {
[self closeAnimated:YES];
}
- (void)closeAnimated:(BOOL)animated {
[self closeWithSpecialAnimator:nil forItem:nil animated:animated];
}
- (void)closeWithSpecialAnimator:(DLWMMenuAnimator *)itemAnimator forItem:(DLWMMenuItem *)item {
[self closeWithSpecialAnimator:itemAnimator forItem:item animated:YES];
}
- (void)closeWithSpecialAnimator:(DLWMMenuAnimator *)specialAnimator forItem:(DLWMMenuItem *)specialItem animated:(BOOL)animated {
if ([self isClosedOrClosing]) {
return;
}
[self.timer invalidate];
self.timer = nil;
if (specialItem == self.mainItem) {
specialItem = nil;
}
NSArray *items = self.items;
__block DLWMMenuAnimator *animator = self.closeAnimator ?: [DLWMMenuAnimator sharedInstantAnimator];
NSTimeInterval closeAnimationDelayBetweenItems = self.closeAnimationDelayBetweenItems;
NSTimeInterval totalDuration = (items.count - 1) * closeAnimationDelayBetweenItems + animator.duration;
if ([self.delegate respondsToSelector:@selector(willCloseMenu:withDuration:)]) {
[self.delegate willCloseMenu:self withDuration:totalDuration];
}
self.state = DLWMMenuStateClosing;
if (specialItem) {
// make sure special items is the first one being animated
items = [@[specialItem] arrayByAddingObjectsFromArray:[self itemsWithoutItem:specialItem]];
}
[items enumerateObjectsUsingBlock:^(DLWMMenuItem *item, NSUInteger index, BOOL *stop) {
DLWMMenuAnimator *itemAnimator = animator;
if (item == specialItem) {
itemAnimator = specialAnimator ?: [DLWMMenuAnimator sharedInstantAnimator];
}
double delayInSeconds = closeAnimationDelayBetweenItems * index;
dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(delayInSeconds * NSEC_PER_SEC));
dispatch_after(popTime, dispatch_get_main_queue(), ^(void){
if ([self.itemDelegate respondsToSelector:@selector(willCloseItem:inMenu:withDuration:)]) {
[self.itemDelegate willCloseItem:item inMenu:self withDuration:itemAnimator.duration];
}
[itemAnimator animateItem:item atIndex:index inMenu:self animated:animated completion:^(DLWMMenuItem *menuItem, NSUInteger itemIndex, DLWMMenu *menu, BOOL finished) {
if ([self.itemDelegate respondsToSelector:@selector(didCloseItem:inMenu:withDuration:)]) {
[self.itemDelegate didCloseItem:menuItem inMenu:self withDuration:itemAnimator.duration];
}
}];
});
}];
self.timer = [NSTimer scheduledTimerWithTimeInterval:totalDuration
target:self
selector:@selector(handleDidCloseMenu:)
userInfo:nil
repeats:NO];
}
- (void)handleDidCloseMenu:(NSTimer *)timer {
self.timer = nil;
self.state = DLWMMenuStateClosed;
[self adjustGeometryForState:self.state];
if ([self.delegate respondsToSelector:@selector(didCloseMenu:)]) {
[self.delegate didCloseMenu:self];
}
}
- (NSArray *)itemsWithoutItem:(DLWMMenuItem *)item {
NSArray *items = self.items;
if (item) {
items = [items filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(DLWMMenuItem *menuItem, NSDictionary *bindings) {
return menuItem != item;
}]];
}
return items;
}
#pragma mark - UIGestureRecognizer Handlers
- (BOOL)gestureRecognizerShouldBegin:(UIGestureRecognizer *)gestureRecognizer {
return self.enabled;
}
- (void)receivedPinch:(UIPinchGestureRecognizer *)recognizer {
if (!self.enabled) {
return;
}
if ([self.delegate respondsToSelector:@selector(receivedPinch:onItem:inMenu:)]) {
[self.delegate receivedPinch:recognizer onItem:(DLWMMenuItem *)recognizer.view inMenu:self];
}
}
- (void)receivedPan:(UIPanGestureRecognizer *)recognizer {
if (!self.enabled) {
return;
}
if ([self.delegate respondsToSelector:@selector(receivedPan:onItem:inMenu:)]) {
[self.delegate receivedPan:recognizer onItem:(DLWMMenuItem *)recognizer.view inMenu:self];
}
}
- (void)receivedLongPress:(UILongPressGestureRecognizer *)recognizer {
if (!self.enabled) {
return;
}
if ([self.delegate respondsToSelector:@selector(receivedLongPress:onItem:inMenu:)]) {
[self.delegate receivedLongPress:recognizer onItem:(DLWMMenuItem *)recognizer.view inMenu:self];
}
}
- (void)receivedDoubleTap:(UITapGestureRecognizer *)recognizer {
if (!self.enabled) {
return;
}
if ([self.delegate respondsToSelector:@selector(receivedDoubleTap:onItem:inMenu:)]) {
[self.delegate receivedDoubleTap:recognizer onItem:(DLWMMenuItem *)recognizer.view inMenu:self];
}
}
- (void)receivedSingleTap:(UITapGestureRecognizer *)recognizer {
if (!self.enabled) {
return;
}
if ([self.delegate respondsToSelector:@selector(receivedSingleTap:onItem:inMenu:)]) {
[self.delegate receivedSingleTap:recognizer onItem:(DLWMMenuItem *)recognizer.view inMenu:self];
}
}
- (void)receivedSingleTapOutside:(UITapGestureRecognizer *)recognizer {
if (!self.enabled) {
return;
}
if ([self.delegate respondsToSelector:@selector(receivedSingleTap:outsideOfMenu:)]) {
[self.delegate receivedSingleTap:recognizer outsideOfMenu:self];
}
}
#pragma mark - States
- (BOOL)isClosed {
return self.state == DLWMMenuStateClosed;
}
- (BOOL)isClosing {
return self.state == DLWMMenuStateClosing;
}
- (BOOL)isClosedOrClosing {
return self.state == DLWMMenuStateClosed || self.state == DLWMMenuStateClosing;
}
- (BOOL)isOpened {
return self.state == DLWMMenuStateOpened;
}
- (BOOL)isOpening {
return self.state == DLWMMenuStateOpening;
}
- (BOOL)isOpenedOrOpening {
return self.state == DLWMMenuStateOpened || self.state == DLWMMenuStateOpening;
}
- (BOOL)isAnimating {
return self.state == DLWMMenuStateOpening || self.state == DLWMMenuStateClosing;
}
#pragma mark - Add/Remove Items
- (void)addGestureRecognizersToMenuItem:(DLWMMenuItem *)menuItem {
NSAssert(menuItem, @"Method argument 'menuItem' must not be nil.");
UIPinchGestureRecognizer *pinchRecognizer = [[UIPinchGestureRecognizer alloc] initWithTarget:self action:@selector(receivedPinch:)];
[menuItem addGestureRecognizer:pinchRecognizer];
UIPanGestureRecognizer *panRecognizer = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(receivedPan:)];
[menuItem addGestureRecognizer:panRecognizer];
UILongPressGestureRecognizer *longPressRecognizer = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(receivedLongPress:)];
[menuItem addGestureRecognizer:longPressRecognizer];
UITapGestureRecognizer *doubleTapRecognizer = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(receivedDoubleTap:)];
[doubleTapRecognizer setNumberOfTapsRequired:2];
[menuItem addGestureRecognizer:doubleTapRecognizer];
UITapGestureRecognizer *singleTapRecognizer = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(receivedSingleTap:)];
[singleTapRecognizer requireGestureRecognizerToFail:doubleTapRecognizer];
[menuItem addGestureRecognizer:singleTapRecognizer];
}
- (void)removeGestureRecognizersFromMenuItem:(DLWMMenuItem *)menuItem {
for (UIGestureRecognizer *recognizer in menuItem.gestureRecognizers) {
[menuItem removeGestureRecognizer:recognizer];
}
}
- (void)addItem:(DLWMMenuItem *)item {
NSAssert(item, @"Method argument 'menuItem' must not be nil.");
[((NSMutableArray *)self.items) addObject:item];
[self addGestureRecognizersToMenuItem:item];
item.userInteractionEnabled = YES;
BOOL hidden = [self isClosed];
item.hidden = hidden;
if (!hidden) {
item.alpha = 0.0;
[UIView animateWithDuration:0.25 delay:0.0 options:UIViewAnimationOptionCurveEaseInOut animations:^{
item.alpha = 1.0;
} completion:nil];
}
item.center = self.centerPointWhileOpen;
[self insertSubview:item belowSubview:self.mainItem];
}
- (void)removeItem:(DLWMMenuItem *)item {
NSAssert(item, @"Method argument 'menuItem' must not be nil.");
NSAssert(item.superview == self, @"Method argument 'menuItem' must be member of menu.");
[item removeFromSuperview];
[self removeGestureRecognizersFromMenuItem:item];
[((NSMutableArray *)self.items) removeObject:item];
}
- (void)removeLastItem {
DLWMMenuItem *item = [self.items lastObject];
if (item) {
[self removeItem:item];
}
}
- (void)moveTo:(CGPoint)centerPoint {
[self moveTo:centerPoint animated:YES];
}
- (void)moveTo:(CGPoint)centerPoint animated:(BOOL)animated {
// Moving the items' layoutLocation so that layouts which
// rely on an item's previous location can be supported.
// A potential candidate would be a force-directed layout.
[self layoutItemsWithCenter:centerPoint animated:animated];
if ([self isClosed]) {
NSTimeInterval duration = (animated) ? 0.5 : 0.0;
[UIView animateWithDuration:duration delay:0.0 options:UIViewAnimationOptionCurveEaseInOut animations:^{
self.center = centerPoint;
} completion:nil];
} else {
self.centerPointWhileOpen = centerPoint;
}
}
- (void)layoutItemsWithCenter:(CGPoint)centerPoint animated:(BOOL)animated {
NSArray *items = self.items;
if (!items.count) {
return;
}
[self.layout layoutItems:items forCenterPoint:centerPoint inMenu:self];
if ([self isOpenedOrOpening]) {
NSTimeInterval duration = (animated) ? 0.5 : 0.0;
[UIView animateWithDuration:duration delay:0.0 options:UIViewAnimationOptionCurveEaseInOut animations:^{
self.mainItem.center = centerPoint;
[items enumerateObjectsUsingBlock:^(DLWMMenuItem *item, NSUInteger index, BOOL *stop) {
item.center = item.layoutLocation;
}];
} completion:nil];
}
}
- (NSUInteger)indexOfItem:(DLWMMenuItem *)item {
return [self.items indexOfObjectIdenticalTo:item];
}
@end
| {
"pile_set_name": "Github"
} |
//
// WeatherConditions.cs
//
// Author: Jeffrey Stedfast <[email protected]>
//
// Copyright (c) 2011 Xamarin Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
using System;
namespace WeatherMap
{
public enum WeatherConditions {
Sunny,
PartlyCloudy,
Cloudy,
Showers,
ScatteredShowers,
Thunderstorms,
Snow
}
}
| {
"pile_set_name": "Github"
} |
/*
* Scilab ( http://www.scilab.org/ ) - This file is part of Scilab
* Copyright (C) 2008 - DIGITEO - Antoine ELIAS
*
* Copyright (C) 2012 - 2016 - Scilab Enterprises
*
* This file is hereby licensed under the terms of the GNU GPL v2.0,
* pursuant to article 5.3.4 of the CeCILL v.2.1.
* This file was originally licensed under the terms of the CeCILL v2.1,
* and continues to be available under such terms.
* For more information, see the COPYING file which you should have received
* along with this program.
*
*/
#include "basic_functions.h"
#include <string.h>
void franck_matrix(int _iSize, double *_pData)
{
int iIndex1 = 0;
int iIndex2 = 0;
double dblVal = _iSize;
memset(_pData, 0x00, sizeof(double) * _iSize * _iSize);
_pData[0] = dblVal;
if (_iSize == 1)
{
return;
}
for (iIndex1 = 1 ; iIndex1 < _iSize ; iIndex1++)
{
dblVal = _iSize - iIndex1;
_pData[(iIndex1 - 1) * _iSize + iIndex1] = dblVal;
for (iIndex2 = 0 ; iIndex2 <= iIndex1 ; iIndex2++)
{
_pData[iIndex1 * _iSize + iIndex2] = dblVal;
}
}
}
void hilb_matrix(int _iSize, double *_pData)
{
int iIndex1 = 0;
int iIndex2 = 0;
double dblVal = _iSize;
double dblTemp = 0;
for (iIndex1 = 0 ; iIndex1 < _iSize ; iIndex1++)
{
if (iIndex1 != 0)
{
dblVal = ((_iSize - iIndex1) * dblVal * (_iSize + iIndex1)) / pow(iIndex1, 2);
}
dblTemp = dblVal * dblVal;
_pData[iIndex1 * _iSize + iIndex1] = dblTemp / ( 2 * iIndex1 + 1);
if (iIndex1 == _iSize - 1)
{
break;
}
for (iIndex2 = iIndex1 + 1 ; iIndex2 < _iSize ; iIndex2++)
{
dblTemp = -((_iSize - iIndex2) * dblTemp * (_iSize + iIndex2)) / pow(iIndex2, 2);
_pData[iIndex1 * _iSize + iIndex2] = dblTemp / (iIndex1 + iIndex2 + 1);
_pData[iIndex2 * _iSize + iIndex1] = _pData[iIndex1 * _iSize + iIndex2];
}
}
}
void magic_matrix(int _iSize, double *_pData)
{
int iNewSize = 0;
int iIndex1 = 0;
int iIndex2 = 0;
int iUn = 1;
int iTemp1 = 0;
int iTemp2 = 0;
if (_iSize % 4 != 0)
{
int iRow = 0;
int iCol = 0;
if (_iSize % 2 == 0)
{
iNewSize = _iSize / 2;
}
if (_iSize % 2 != 0)
{
iNewSize = _iSize;
}
//odd order or upper corner of even order
iRow = 0;
iCol = iNewSize / 2;
memset(_pData, 0x00, sizeof(double) * _iSize * _iSize);
for (iIndex1 = 0 ; iIndex1 < iNewSize * iNewSize ; iIndex1++)
{
int iRowTemp = 0;
int iColTemp = 0;
_pData[iRow + iCol * _iSize] = iIndex1 + 1;
iRowTemp = iRow - 1;
iColTemp = iCol + 1;
if (iRowTemp < 0)
{
iRowTemp = iNewSize - 1;
}
if (iColTemp >= iNewSize)
{
iColTemp = 0;
}
if (_pData[iRowTemp + iColTemp * _iSize] != 0)
{
iRowTemp = iRow + 1;
iColTemp = iCol;
}
iRow = iRowTemp;
iCol = iColTemp;
}
if (_iSize % 2 != 0)
{
return;
}
//rest of even order
for (iIndex1 = 0 ; iIndex1 < iNewSize ; iIndex1++)
{
for (iIndex2 = 0 ; iIndex2 < iNewSize ; iIndex2++)
{
int iRow = iIndex1 + iNewSize;
int iCol = iIndex2 + iNewSize;
_pData[iIndex1 + iCol * _iSize] = _pData[iIndex1 + iIndex2 * _iSize] + 2 * iNewSize * iNewSize;
_pData[iRow + iIndex2 * _iSize] = _pData[iIndex1 + iIndex2 * _iSize] + 3 * iNewSize * iNewSize;
_pData[iRow + iCol * _iSize] = _pData[iIndex1 + iIndex2 * _iSize] + iNewSize * iNewSize;
}
}
if ((iNewSize - 1) / 2 == 0)
{
return;
}
for (iIndex1 = 0 ; iIndex1 < (iNewSize - 1) / 2 ; iIndex1++)
{
C2F(dswap)(&iNewSize, &_pData[iIndex1 * _iSize], &iUn, &_pData[iNewSize + iIndex1 * _iSize], &iUn);
}
iTemp1 = (iNewSize + 1) / 2 - 1;
iTemp2 = iTemp1 + iNewSize;
C2F(dswap)(&iUn, &_pData[iTemp1], &iUn, &_pData[iTemp2], &iUn);
C2F(dswap)(&iUn, &_pData[iTemp1 * _iSize + iTemp1], &iUn, &_pData[iTemp1 * _iSize + iTemp2], &iUn);
iTemp1 = _iSize - (iNewSize - 3) / 2;
if (iTemp1 > _iSize)
{
return;
}
for (iIndex1 = iTemp1 ; iIndex1 < _iSize ; iIndex1++)
{
C2F(dswap)(&iNewSize, &_pData[iIndex1 * _iSize], &iUn, &_pData[iNewSize + iIndex1 * _iSize], &iUn);
}
}
else
{
int iVal = 1;
for (iIndex1 = 0 ; iIndex1 < _iSize ; iIndex1++)
{
for (iIndex2 = 0 ; iIndex2 < _iSize ; iIndex2++)
{
_pData[iIndex2 * _iSize + iIndex1] = iVal;
if (((iIndex1 + 1) % 4) / 2 == ((iIndex2 + 1) % 4) / 2)
{
_pData[iIndex2 * _iSize + iIndex1] = _iSize * _iSize + 1 - iVal;
}
iVal++;
}
}
}
}
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.