text
stringlengths 2
100k
| meta
dict |
---|---|
/*
* Copyright (C) 2015 Jakub Kicinski <[email protected]>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2
* as published by the Free Software Foundation
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*/
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/usb.h>
#include "mt7601u.h"
#include "usb.h"
#include "trace.h"
static const struct usb_device_id mt7601u_device_table[] = {
{ USB_DEVICE(0x0b05, 0x17d3) },
{ USB_DEVICE(0x0e8d, 0x760a) },
{ USB_DEVICE(0x0e8d, 0x760b) },
{ USB_DEVICE(0x13d3, 0x3431) },
{ USB_DEVICE(0x13d3, 0x3434) },
{ USB_DEVICE(0x148f, 0x7601) },
{ USB_DEVICE(0x148f, 0x760a) },
{ USB_DEVICE(0x148f, 0x760b) },
{ USB_DEVICE(0x148f, 0x760c) },
{ USB_DEVICE(0x148f, 0x760d) },
{ USB_DEVICE(0x2001, 0x3d04) },
{ USB_DEVICE(0x2717, 0x4106) },
{ USB_DEVICE(0x2955, 0x0001) },
{ USB_DEVICE(0x2955, 0x1001) },
{ USB_DEVICE(0x2a5f, 0x1000) },
{ USB_DEVICE(0x7392, 0x7710) },
{ 0, }
};
bool mt7601u_usb_alloc_buf(struct mt7601u_dev *dev, size_t len,
struct mt7601u_dma_buf *buf)
{
struct usb_device *usb_dev = mt7601u_to_usb_dev(dev);
buf->len = len;
buf->urb = usb_alloc_urb(0, GFP_KERNEL);
buf->buf = usb_alloc_coherent(usb_dev, buf->len, GFP_KERNEL, &buf->dma);
return !buf->urb || !buf->buf;
}
void mt7601u_usb_free_buf(struct mt7601u_dev *dev, struct mt7601u_dma_buf *buf)
{
struct usb_device *usb_dev = mt7601u_to_usb_dev(dev);
usb_free_coherent(usb_dev, buf->len, buf->buf, buf->dma);
usb_free_urb(buf->urb);
}
int mt7601u_usb_submit_buf(struct mt7601u_dev *dev, int dir, int ep_idx,
struct mt7601u_dma_buf *buf, gfp_t gfp,
usb_complete_t complete_fn, void *context)
{
struct usb_device *usb_dev = mt7601u_to_usb_dev(dev);
unsigned pipe;
int ret;
if (dir == USB_DIR_IN)
pipe = usb_rcvbulkpipe(usb_dev, dev->in_eps[ep_idx]);
else
pipe = usb_sndbulkpipe(usb_dev, dev->out_eps[ep_idx]);
usb_fill_bulk_urb(buf->urb, usb_dev, pipe, buf->buf, buf->len,
complete_fn, context);
buf->urb->transfer_dma = buf->dma;
buf->urb->transfer_flags |= URB_NO_TRANSFER_DMA_MAP;
trace_mt_submit_urb(dev, buf->urb);
ret = usb_submit_urb(buf->urb, gfp);
if (ret)
dev_err(dev->dev, "Error: submit URB dir:%d ep:%d failed:%d\n",
dir, ep_idx, ret);
return ret;
}
void mt7601u_complete_urb(struct urb *urb)
{
struct completion *cmpl = urb->context;
complete(cmpl);
}
int mt7601u_vendor_request(struct mt7601u_dev *dev, const u8 req,
const u8 direction, const u16 val, const u16 offset,
void *buf, const size_t buflen)
{
int i, ret;
struct usb_device *usb_dev = mt7601u_to_usb_dev(dev);
const u8 req_type = direction | USB_TYPE_VENDOR | USB_RECIP_DEVICE;
const unsigned int pipe = (direction == USB_DIR_IN) ?
usb_rcvctrlpipe(usb_dev, 0) : usb_sndctrlpipe(usb_dev, 0);
for (i = 0; i < MT_VEND_REQ_MAX_RETRY; i++) {
ret = usb_control_msg(usb_dev, pipe, req, req_type,
val, offset, buf, buflen,
MT_VEND_REQ_TOUT_MS);
trace_mt_vend_req(dev, pipe, req, req_type, val, offset,
buf, buflen, ret);
if (ret == -ENODEV)
set_bit(MT7601U_STATE_REMOVED, &dev->state);
if (ret >= 0 || ret == -ENODEV)
return ret;
msleep(5);
}
dev_err(dev->dev, "Vendor request req:%02x off:%04x failed:%d\n",
req, offset, ret);
return ret;
}
void mt7601u_vendor_reset(struct mt7601u_dev *dev)
{
mt7601u_vendor_request(dev, MT_VEND_DEV_MODE, USB_DIR_OUT,
MT_VEND_DEV_MODE_RESET, 0, NULL, 0);
}
u32 mt7601u_rr(struct mt7601u_dev *dev, u32 offset)
{
int ret;
u32 val = ~0;
WARN_ONCE(offset > USHRT_MAX, "read high off:%08x", offset);
mutex_lock(&dev->vendor_req_mutex);
ret = mt7601u_vendor_request(dev, MT_VEND_MULTI_READ, USB_DIR_IN,
0, offset, dev->vend_buf, MT_VEND_BUF);
if (ret == MT_VEND_BUF)
val = get_unaligned_le32(dev->vend_buf);
else if (ret > 0)
dev_err(dev->dev, "Error: wrong size read:%d off:%08x\n",
ret, offset);
mutex_unlock(&dev->vendor_req_mutex);
trace_reg_read(dev, offset, val);
return val;
}
int mt7601u_vendor_single_wr(struct mt7601u_dev *dev, const u8 req,
const u16 offset, const u32 val)
{
int ret;
mutex_lock(&dev->vendor_req_mutex);
ret = mt7601u_vendor_request(dev, req, USB_DIR_OUT,
val & 0xffff, offset, NULL, 0);
if (!ret)
ret = mt7601u_vendor_request(dev, req, USB_DIR_OUT,
val >> 16, offset + 2, NULL, 0);
mutex_unlock(&dev->vendor_req_mutex);
return ret;
}
void mt7601u_wr(struct mt7601u_dev *dev, u32 offset, u32 val)
{
WARN_ONCE(offset > USHRT_MAX, "write high off:%08x", offset);
mt7601u_vendor_single_wr(dev, MT_VEND_WRITE, offset, val);
trace_reg_write(dev, offset, val);
}
u32 mt7601u_rmw(struct mt7601u_dev *dev, u32 offset, u32 mask, u32 val)
{
val |= mt7601u_rr(dev, offset) & ~mask;
mt7601u_wr(dev, offset, val);
return val;
}
u32 mt7601u_rmc(struct mt7601u_dev *dev, u32 offset, u32 mask, u32 val)
{
u32 reg = mt7601u_rr(dev, offset);
val |= reg & ~mask;
if (reg != val)
mt7601u_wr(dev, offset, val);
return val;
}
void mt7601u_wr_copy(struct mt7601u_dev *dev, u32 offset,
const void *data, int len)
{
WARN_ONCE(offset & 3, "unaligned write copy off:%08x", offset);
WARN_ONCE(len & 3, "short write copy off:%08x", offset);
mt7601u_burst_write_regs(dev, offset, data, len / 4);
}
void mt7601u_addr_wr(struct mt7601u_dev *dev, const u32 offset, const u8 *addr)
{
mt7601u_wr(dev, offset, get_unaligned_le32(addr));
mt7601u_wr(dev, offset + 4, addr[4] | addr[5] << 8);
}
static int mt7601u_assign_pipes(struct usb_interface *usb_intf,
struct mt7601u_dev *dev)
{
struct usb_endpoint_descriptor *ep_desc;
struct usb_host_interface *intf_desc = usb_intf->cur_altsetting;
unsigned i, ep_i = 0, ep_o = 0;
BUILD_BUG_ON(sizeof(dev->in_eps) < __MT_EP_IN_MAX);
BUILD_BUG_ON(sizeof(dev->out_eps) < __MT_EP_OUT_MAX);
for (i = 0; i < intf_desc->desc.bNumEndpoints; i++) {
ep_desc = &intf_desc->endpoint[i].desc;
if (usb_endpoint_is_bulk_in(ep_desc) &&
ep_i++ < __MT_EP_IN_MAX) {
dev->in_eps[ep_i - 1] = usb_endpoint_num(ep_desc);
dev->in_max_packet = usb_endpoint_maxp(ep_desc);
/* Note: this is ignored by usb sub-system but vendor
* code does it. We can drop this at some point.
*/
dev->in_eps[ep_i - 1] |= USB_DIR_IN;
} else if (usb_endpoint_is_bulk_out(ep_desc) &&
ep_o++ < __MT_EP_OUT_MAX) {
dev->out_eps[ep_o - 1] = usb_endpoint_num(ep_desc);
dev->out_max_packet = usb_endpoint_maxp(ep_desc);
}
}
if (ep_i != __MT_EP_IN_MAX || ep_o != __MT_EP_OUT_MAX) {
dev_err(dev->dev, "Error: wrong pipe number in:%d out:%d\n",
ep_i, ep_o);
return -EINVAL;
}
return 0;
}
static int mt7601u_probe(struct usb_interface *usb_intf,
const struct usb_device_id *id)
{
struct usb_device *usb_dev = interface_to_usbdev(usb_intf);
struct mt7601u_dev *dev;
u32 asic_rev, mac_rev;
int ret;
dev = mt7601u_alloc_device(&usb_intf->dev);
if (!dev)
return -ENOMEM;
usb_dev = usb_get_dev(usb_dev);
usb_reset_device(usb_dev);
usb_set_intfdata(usb_intf, dev);
dev->vend_buf = devm_kmalloc(dev->dev, MT_VEND_BUF, GFP_KERNEL);
if (!dev->vend_buf) {
ret = -ENOMEM;
goto err;
}
ret = mt7601u_assign_pipes(usb_intf, dev);
if (ret)
goto err;
ret = mt7601u_wait_asic_ready(dev);
if (ret)
goto err;
asic_rev = mt7601u_rr(dev, MT_ASIC_VERSION);
mac_rev = mt7601u_rr(dev, MT_MAC_CSR0);
dev_info(dev->dev, "ASIC revision: %08x MAC revision: %08x\n",
asic_rev, mac_rev);
/* Note: vendor driver skips this check for MT7601U */
if (!(mt7601u_rr(dev, MT_EFUSE_CTRL) & MT_EFUSE_CTRL_SEL))
dev_warn(dev->dev, "Warning: eFUSE not present\n");
ret = mt7601u_init_hardware(dev);
if (ret)
goto err;
ret = mt7601u_register_device(dev);
if (ret)
goto err_hw;
set_bit(MT7601U_STATE_INITIALIZED, &dev->state);
return 0;
err_hw:
mt7601u_cleanup(dev);
err:
usb_set_intfdata(usb_intf, NULL);
usb_put_dev(interface_to_usbdev(usb_intf));
destroy_workqueue(dev->stat_wq);
ieee80211_free_hw(dev->hw);
return ret;
}
static void mt7601u_disconnect(struct usb_interface *usb_intf)
{
struct mt7601u_dev *dev = usb_get_intfdata(usb_intf);
ieee80211_unregister_hw(dev->hw);
mt7601u_cleanup(dev);
usb_set_intfdata(usb_intf, NULL);
usb_put_dev(interface_to_usbdev(usb_intf));
destroy_workqueue(dev->stat_wq);
ieee80211_free_hw(dev->hw);
}
static int mt7601u_suspend(struct usb_interface *usb_intf, pm_message_t state)
{
struct mt7601u_dev *dev = usb_get_intfdata(usb_intf);
mt7601u_cleanup(dev);
return 0;
}
static int mt7601u_resume(struct usb_interface *usb_intf)
{
struct mt7601u_dev *dev = usb_get_intfdata(usb_intf);
int ret;
ret = mt7601u_init_hardware(dev);
if (ret)
return ret;
set_bit(MT7601U_STATE_INITIALIZED, &dev->state);
return 0;
}
MODULE_DEVICE_TABLE(usb, mt7601u_device_table);
MODULE_FIRMWARE(MT7601U_FIRMWARE);
MODULE_LICENSE("GPL");
static struct usb_driver mt7601u_driver = {
.name = KBUILD_MODNAME,
.id_table = mt7601u_device_table,
.probe = mt7601u_probe,
.disconnect = mt7601u_disconnect,
.suspend = mt7601u_suspend,
.resume = mt7601u_resume,
.reset_resume = mt7601u_resume,
.soft_unbind = 1,
.disable_hub_initiated_lpm = 1,
};
module_usb_driver(mt7601u_driver);
|
{
"pile_set_name": "Github"
}
|
{
"_args": [
[
{
"raw": "wrap-ansi@^2.0.0",
"scope": null,
"escapedName": "wrap-ansi",
"name": "wrap-ansi",
"rawSpec": "^2.0.0",
"spec": ">=2.0.0 <3.0.0",
"type": "range"
},
"/Users/qinliang.ql/Desktop/test/node_modules/yargs/node_modules/cliui"
]
],
"_from": "wrap-ansi@>=2.0.0 <3.0.0",
"_id": "[email protected]",
"_inCache": true,
"_location": "/wrap-ansi",
"_nodeVersion": "4.6.2",
"_npmOperationalInternal": {
"host": "packages-18-east.internal.npmjs.com",
"tmp": "tmp/wrap-ansi-2.1.0.tgz_1480440082575_0.23112521297298372"
},
"_npmUser": {
"name": "sindresorhus",
"email": "[email protected]"
},
"_npmVersion": "2.15.11",
"_phantomChildren": {},
"_requested": {
"raw": "wrap-ansi@^2.0.0",
"scope": null,
"escapedName": "wrap-ansi",
"name": "wrap-ansi",
"rawSpec": "^2.0.0",
"spec": ">=2.0.0 <3.0.0",
"type": "range"
},
"_requiredBy": [
"/sass-graph/cliui",
"/yargs/cliui"
],
"_resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz",
"_shasum": "d8fc3d284dd05794fe84973caecdd1cf824fdd85",
"_shrinkwrap": null,
"_spec": "wrap-ansi@^2.0.0",
"_where": "/Users/qinliang.ql/Desktop/test/node_modules/yargs/node_modules/cliui",
"author": {
"name": "Sindre Sorhus",
"email": "[email protected]",
"url": "sindresorhus.com"
},
"bugs": {
"url": "https://github.com/chalk/wrap-ansi/issues"
},
"dependencies": {
"string-width": "^1.0.1",
"strip-ansi": "^3.0.1"
},
"description": "Wordwrap a string with ANSI escape codes",
"devDependencies": {
"ava": "^0.16.0",
"chalk": "^1.1.0",
"coveralls": "^2.11.4",
"has-ansi": "^2.0.0",
"nyc": "^6.2.1",
"strip-ansi": "^3.0.0",
"xo": "*"
},
"directories": {},
"dist": {
"shasum": "d8fc3d284dd05794fe84973caecdd1cf824fdd85",
"tarball": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz"
},
"engines": {
"node": ">=0.10.0"
},
"files": [
"index.js"
],
"gitHead": "a731af5a3461d92f2af302e81e05ea698a3c8c1a",
"homepage": "https://github.com/chalk/wrap-ansi#readme",
"keywords": [
"wrap",
"break",
"wordwrap",
"wordbreak",
"linewrap",
"ansi",
"styles",
"color",
"colour",
"colors",
"terminal",
"console",
"cli",
"string",
"tty",
"escape",
"formatting",
"rgb",
"256",
"shell",
"xterm",
"log",
"logging",
"command-line",
"text"
],
"license": "MIT",
"maintainers": [
{
"name": "bcoe",
"email": "[email protected]"
},
{
"name": "dthree",
"email": "[email protected]"
},
{
"name": "qix",
"email": "[email protected]"
},
{
"name": "sindresorhus",
"email": "[email protected]"
}
],
"name": "wrap-ansi",
"optionalDependencies": {},
"readme": "ERROR: No README data found!",
"repository": {
"type": "git",
"url": "git+https://github.com/chalk/wrap-ansi.git"
},
"scripts": {
"coveralls": "nyc report --reporter=text-lcov | coveralls",
"test": "xo && nyc ava"
},
"version": "2.1.0"
}
|
{
"pile_set_name": "Github"
}
|
{% extends 'notification/email_base.html' %}
{% block subject %}[{{ site_name }}] New keg tapped: Keg {{ keg.id }}: {{ keg.type }}{% endblock %}
{% block body_plain %}
A new keg of {{ keg.type }} was just tapped on {{ site_name }}!
Track it here: {{ url }}
{% endblock %}
{% block body_html %}
<p>
A new keg of <b>{{ keg.type }}</b> was just tapped on
<b><a href="{{ site_url }}">{{ site_name }}</a></b>!
</p>
<p>
Track it <a href="{{ url }}">here</a>.
</p>
{% endblock %}
|
{
"pile_set_name": "Github"
}
|
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.scenes.scene2d.ui;
import com.badlogic.gdx.graphics.g2d.Batch;
import com.badlogic.gdx.scenes.scene2d.Actor;
import com.badlogic.gdx.scenes.scene2d.Group;
import com.badlogic.gdx.scenes.scene2d.Stage;
import com.badlogic.gdx.scenes.scene2d.utils.Layout;
import com.badlogic.gdx.utils.SnapshotArray;
/** A {@link Group} that participates in layout and provides a minimum, preferred, and maximum size.
* <p>
* The default preferred size of a widget group is 0 and this is almost always overridden by a subclass. The default minimum size
* returns the preferred size, so a subclass may choose to return 0 for minimum size if it wants to allow itself to be sized
* smaller than the preferred size. The default maximum size is 0, which means no maximum size.
* <p>
* See {@link Layout} for details on how a widget group should participate in layout. A widget group's mutator methods should call
* {@link #invalidate()} or {@link #invalidateHierarchy()} as needed. By default, invalidateHierarchy is called when child widgets
* are added and removed.
* @author Nathan Sweet */
public class WidgetGroup extends Group implements Layout {
private boolean needsLayout = true;
private boolean fillParent;
private boolean layoutEnabled = true;
public WidgetGroup () {
}
/** Creates a new widget group containing the specified actors. */
public WidgetGroup (Actor... actors) {
for (Actor actor : actors)
addActor(actor);
}
public float getMinWidth () {
return getPrefWidth();
}
public float getMinHeight () {
return getPrefHeight();
}
public float getPrefWidth () {
return 0;
}
public float getPrefHeight () {
return 0;
}
public float getMaxWidth () {
return 0;
}
public float getMaxHeight () {
return 0;
}
public void setLayoutEnabled (boolean enabled) {
layoutEnabled = enabled;
setLayoutEnabled(this, enabled);
}
private void setLayoutEnabled (Group parent, boolean enabled) {
SnapshotArray<Actor> children = parent.getChildren();
for (int i = 0, n = children.size; i < n; i++) {
Actor actor = children.get(i);
if (actor instanceof Layout)
((Layout)actor).setLayoutEnabled(enabled);
else if (actor instanceof Group) //
setLayoutEnabled((Group)actor, enabled);
}
}
public void validate () {
if (!layoutEnabled) return;
Group parent = getParent();
if (fillParent && parent != null) {
float parentWidth, parentHeight;
Stage stage = getStage();
if (stage != null && parent == stage.getRoot()) {
parentWidth = stage.getWidth();
parentHeight = stage.getHeight();
} else {
parentWidth = parent.getWidth();
parentHeight = parent.getHeight();
}
if (getWidth() != parentWidth || getHeight() != parentHeight) {
setWidth(parentWidth);
setHeight(parentHeight);
invalidate();
}
}
if (!needsLayout) return;
needsLayout = false;
layout();
// Widgets may call invalidateHierarchy during layout (eg, a wrapped label). The root-most widget group retries layout a
// reasonable number of times.
if (needsLayout) {
if (parent instanceof WidgetGroup) return; // The parent widget will layout again.
for (int i = 0; i < 5; i++) {
needsLayout = false;
layout();
if (!needsLayout) break;
}
}
}
/** Returns true if the widget's layout has been {@link #invalidate() invalidated}. */
public boolean needsLayout () {
return needsLayout;
}
public void invalidate () {
needsLayout = true;
}
public void invalidateHierarchy () {
invalidate();
Group parent = getParent();
if (parent instanceof Layout) ((Layout)parent).invalidateHierarchy();
}
protected void childrenChanged () {
invalidateHierarchy();
}
protected void sizeChanged () {
invalidate();
}
public void pack () {
setSize(getPrefWidth(), getPrefHeight());
validate();
// Validating the layout may change the pref size. Eg, a wrapped label doesn't know its pref height until it knows its
// width, so it calls invalidateHierarchy() in layout() if its pref height has changed.
setSize(getPrefWidth(), getPrefHeight());
validate();
}
public void setFillParent (boolean fillParent) {
this.fillParent = fillParent;
}
public void layout () {
}
/** If this method is overridden, the super method or {@link #validate()} should be called to ensure the widget group is laid
* out. */
public void draw (Batch batch, float parentAlpha) {
validate();
super.draw(batch, parentAlpha);
}
}
|
{
"pile_set_name": "Github"
}
|
#define PROJ_PARMS__ \
double C_x;
#define PJ_LIB__
# include <projects.h>
PROJ_HEAD(wag3, "Wagner III") "\n\tPCyl., Sph.\n\tlat_ts=";
#define TWOTHIRD 0.6666666666666666666667
FORWARD(s_forward); /* spheroid */
xy.x = P->C_x * lp.lam * cos(TWOTHIRD * lp.phi);
xy.y = lp.phi;
return (xy);
}
INVERSE(s_inverse); /* spheroid */
lp.phi = xy.y;
lp.lam = xy.x / (P->C_x * cos(TWOTHIRD * lp.phi));
return (lp);
}
FREEUP; if (P) pj_dalloc(P); }
ENTRY0(wag3)
double ts;
ts = pj_param(P->params, "rlat_ts").f;
P->C_x = cos(ts) / cos(2.*ts/3.);
P->es = 0.; P->inv = s_inverse; P->fwd = s_forward;
ENDENTRY(P)
|
{
"pile_set_name": "Github"
}
|
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package google.ads.googleads.v3.services;
import "google/ads/googleads/v3/resources/feed_placeholder_view.proto";
import "google/api/annotations.proto";
import "google/api/client.proto";
import "google/api/field_behavior.proto";
import "google/api/resource.proto";
option csharp_namespace = "Google.Ads.GoogleAds.V3.Services";
option go_package = "google.golang.org/genproto/googleapis/ads/googleads/v3/services;services";
option java_multiple_files = true;
option java_outer_classname = "FeedPlaceholderViewServiceProto";
option java_package = "com.google.ads.googleads.v3.services";
option objc_class_prefix = "GAA";
option php_namespace = "Google\\Ads\\GoogleAds\\V3\\Services";
option ruby_package = "Google::Ads::GoogleAds::V3::Services";
// Proto file describing the FeedPlaceholderView service.
// Service to fetch feed placeholder views.
service FeedPlaceholderViewService {
option (google.api.default_host) = "googleads.googleapis.com";
// Returns the requested feed placeholder view in full detail.
rpc GetFeedPlaceholderView(GetFeedPlaceholderViewRequest) returns (google.ads.googleads.v3.resources.FeedPlaceholderView) {
option (google.api.http) = {
get: "/v3/{resource_name=customers/*/feedPlaceholderViews/*}"
};
option (google.api.method_signature) = "resource_name";
}
}
// Request message for [FeedPlaceholderViewService.GetFeedPlaceholderView][google.ads.googleads.v3.services.FeedPlaceholderViewService.GetFeedPlaceholderView].
message GetFeedPlaceholderViewRequest {
// Required. The resource name of the feed placeholder view to fetch.
string resource_name = 1 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
type: "googleads.googleapis.com/FeedPlaceholderView"
}
];
}
|
{
"pile_set_name": "Github"
}
|
/* origin: FreeBSD /usr/src/lib/msun/src/e_atan2l.c */
/*
* ====================================================
* Copyright (C) 1993 by Sun Microsystems, Inc. All rights reserved.
*
* Developed at SunSoft, a Sun Microsystems, Inc. business.
* Permission to use, copy, modify, and distribute this
* software is freely granted, provided that this notice
* is preserved.
* ====================================================
*
*/
/*
* See comments in atan2.c.
* Converted to long double by David Schultz <[email protected]>.
*/
#include "libm.h"
#if LDBL_MANT_DIG == 53 && LDBL_MAX_EXP == 1024
long double atan2l(long double y, long double x)
{
return atan2(y, x);
}
#elif (LDBL_MANT_DIG == 64 || LDBL_MANT_DIG == 113) && LDBL_MAX_EXP == 16384
#include "__invtrigl.h"
long double atan2l(long double y, long double x)
{
union ldshape ux, uy;
long double z;
int m, ex, ey;
if (isnan(x) || isnan(y))
return x+y;
if (x == 1)
return atanl(y);
ux.f = x;
uy.f = y;
ex = ux.i.se & 0x7fff;
ey = uy.i.se & 0x7fff;
m = 2*(ux.i.se>>15) | uy.i.se>>15;
if (y == 0) {
switch(m) {
case 0:
case 1: return y; /* atan(+-0,+anything)=+-0 */
case 2: return 2*pio2_hi; /* atan(+0,-anything) = pi */
case 3: return -2*pio2_hi; /* atan(-0,-anything) =-pi */
}
}
if (x == 0)
return m&1 ? -pio2_hi : pio2_hi;
if (ex == 0x7fff) {
if (ey == 0x7fff) {
switch(m) {
case 0: return pio2_hi/2; /* atan(+INF,+INF) */
case 1: return -pio2_hi/2; /* atan(-INF,+INF) */
case 2: return 1.5*pio2_hi; /* atan(+INF,-INF) */
case 3: return -1.5*pio2_hi; /* atan(-INF,-INF) */
}
} else {
switch(m) {
case 0: return 0.0; /* atan(+...,+INF) */
case 1: return -0.0; /* atan(-...,+INF) */
case 2: return 2*pio2_hi; /* atan(+...,-INF) */
case 3: return -2*pio2_hi; /* atan(-...,-INF) */
}
}
}
if (ex+120 < ey || ey == 0x7fff)
return m&1 ? -pio2_hi : pio2_hi;
/* z = atan(|y/x|) without spurious underflow */
if ((m&2) && ey+120 < ex) /* |y/x| < 0x1p-120, x<0 */
z = 0.0;
else
z = atanl(fabsl(y/x));
switch (m) {
case 0: return z; /* atan(+,+) */
case 1: return -z; /* atan(-,+) */
case 2: return 2*pio2_hi-(z-2*pio2_lo); /* atan(+,-) */
default: /* case 3 */
return (z-2*pio2_lo)-2*pio2_hi; /* atan(-,-) */
}
}
#endif
|
{
"pile_set_name": "Github"
}
|
From d57aecba0cd291e0c28e2c82c3d4bce06c5b5b94 Mon Sep 17 00:00:00 2001
From: Gurchetan Singh <[email protected]>
Date: Mon, 2 Dec 2019 17:36:24 -0800
Subject: [PATCH] udmabuf: use cache_sgt_mapping option
Commit bc7a71da43b48333f84c6534ab43d240e34cf9eb uptream.
The GEM prime helpers do it, so should we. It's also possible to make
it optional later.
Signed-off-by: Gurchetan Singh <[email protected]>
Link: http://patchwork.freedesktop.org/patch/msgid/[email protected]
Signed-off-by: Gerd Hoffmann <[email protected]>
---
drivers/dma-buf/udmabuf.c | 9 +++++----
1 file changed, 5 insertions(+), 4 deletions(-)
--- a/drivers/dma-buf/udmabuf.c
+++ b/drivers/dma-buf/udmabuf.c
@@ -94,10 +94,11 @@ static void release_udmabuf(struct dma_b
}
static const struct dma_buf_ops udmabuf_ops = {
- .map_dma_buf = map_udmabuf,
- .unmap_dma_buf = unmap_udmabuf,
- .release = release_udmabuf,
- .mmap = mmap_udmabuf,
+ .cache_sgt_mapping = true,
+ .map_dma_buf = map_udmabuf,
+ .unmap_dma_buf = unmap_udmabuf,
+ .release = release_udmabuf,
+ .mmap = mmap_udmabuf,
};
#define SEALS_WANTED (F_SEAL_SHRINK)
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright (c) 2014, Oculus VR, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
#include "EightPeerTest.h"
/*
What is being done here is having 8 peers all connect to eachother and be
connected. Then it check if they all connect. If so send data in ordered reliable mode for 100
loops.
Possible ideas for changes:
Possibly use rakpeerinterfaces GetSystemList() for number of
connected peers instead of manually tracking. Would be slower though,
shouldn't be significant at this number but the recieve speed it part of the test.
Success conditions:
Peers connect and receive all packets in order.
No disconnections allowed in this version of the test.
Failure conditions:
If cannot connect to all peers for 20 seconds.
All packets are not recieved.
All packets are not in order.
Disconnection.
*/
int EightPeerTest::RunTest(DataStructures::List<RakString> params,bool isVerbose,bool noPauses)
{
const int peerNum= 8;
RakPeerInterface *peerList[peerNum];//A list of 8 peers
int connectionAmount[peerNum];//Counter for me to keep track of connection requests and accepts
int recievedFromList[peerNum][peerNum];//Counter for me to keep track of packets received
int lastNumberReceivedFromList[peerNum][peerNum];//Counter for me to keep track of last recieved sequence number
const int numPackets=100;
Packet *packet;
BitStream bitStream;
destroyList.Clear(false,_FILE_AND_LINE_);
//Initializations of the arrays
for (int i=0;i<peerNum;i++)
{
peerList[i]=RakPeerInterface::GetInstance();
destroyList.Push(peerList[i],_FILE_AND_LINE_);
connectionAmount[i]=0;
for (int j=0;j<peerNum;j++)
{
recievedFromList[i][j]=0;
lastNumberReceivedFromList[i][j]=0;
}
peerList[i]->Startup(peerNum*2, &SocketDescriptor(60000+i,0), 1);
peerList[i]->SetMaximumIncomingConnections(peerNum);
}
//Connect all the peers together
for (int i=0;i<peerNum;i++)
{
for (int j=i+1;j<peerNum;j++)//Start at i+1 so don't connect two of the same together.
{
if (peerList[i]->Connect("127.0.0.1", 60000+j, 0,0)!=CONNECTION_ATTEMPT_STARTED)
{
if (isVerbose)
{
DebugTools::ShowError("Problem while calling connect. \n",!noPauses && isVerbose,__LINE__,__FILE__);
}
return 1;//This fails the test, don't bother going on.
}
}
}
TimeMS entryTime=GetTimeMS();//Loop entry time
TimeMS finishTimer=GetTimeMS();
bool initialConnectOver=false;//Our initial connect all has been done.
for (int k=0;k<numPackets||GetTimeMS()-finishTimer<5000;)//Quit after we send 100 messages while connected, if not all connected and not failure, otherwise fail after 20 seconds and exit
{
bool allConnected=true;//Start true, only one failed case makes it all fail
for (int i=0;i<peerNum;i++)//Make sure all peers are connected to eachother
{
if (connectionAmount[i]<peerNum-1)
{
allConnected=false;
}
}
if (GetTimeMS()-entryTime>20000 &&!initialConnectOver &&!allConnected)//failed for 20 seconds
{
if (isVerbose)
DebugTools::ShowError("Failed to connect to all peers after 20 seconds",!noPauses && isVerbose,__LINE__,__FILE__);
return 2;
break;
}
if (allConnected)
{
if(!initialConnectOver)
initialConnectOver=true;
if (k<numPackets)
{
for (int i=0;i<peerNum;i++)//Have all peers send a message to all peers
{
bitStream.Reset();
bitStream.Write((unsigned char) (ID_USER_PACKET_ENUM+1));
bitStream.Write(k);
bitStream.Write(i);
peerList[i]->Send(&bitStream, HIGH_PRIORITY, RELIABLE_ORDERED ,0, UNASSIGNED_SYSTEM_ADDRESS, true);
}
}
k++;
}
if (k>=numPackets-3)//This is our last 3 packets, give it time to send packet and arrive on interface, 2 seconds is more than enough
{
RakSleep(300);
if (k==numPackets)
{
finishTimer=GetTimeMS();
}
}
for (int i=0;i<peerNum;i++)//Receive for all peers
{
if (allConnected)//If all connected try to make the data more visually appealing by bunching it in one receive
{
int waittime=0;
do
{
packet=peerList[i]->Receive();
waittime++;
if (!packet)
{
RakSleep(1);
}
if (waittime>1000)//Check for packet every millisec and if one second has passed move on, don't block execution
{
break;
}
}
while(!packet);//For testing purposes wait for packet a little while, go if not recieved
}
else//Otherwise just keep recieving quickly until connected
{
packet=peerList[i]->Receive();
}
if (isVerbose)
printf("For peer %i with %i connected peers.\n",i,connectionAmount[i]);
while(packet)
{
switch (packet->data[0])
{
case ID_REMOTE_DISCONNECTION_NOTIFICATION:
if (isVerbose)
{
printf("Another client has disconnected.\n");
DebugTools::ShowError("Test failed.\n",!noPauses && isVerbose,__LINE__,__FILE__);
}
return 3;
break;
case ID_REMOTE_CONNECTION_LOST:
if (isVerbose)
{
printf("Another client has lost the connection.\n");
DebugTools::ShowError("Test failed.\n",!noPauses && isVerbose,__LINE__,__FILE__);
}
return 3;
break;
case ID_REMOTE_NEW_INCOMING_CONNECTION:
if (isVerbose)
printf("Another client has connected.\n");
break;
case ID_CONNECTION_REQUEST_ACCEPTED:
if (isVerbose)
printf("Our connection request has been accepted.\n");
connectionAmount[i]++;
break;
case ID_CONNECTION_ATTEMPT_FAILED:
if (isVerbose)
DebugTools::ShowError("A connection has failed.\n Test failed.\n",!noPauses && isVerbose,__LINE__,__FILE__);
return 2;
break;
case ID_NEW_INCOMING_CONNECTION:
if (isVerbose)
printf("A connection is incoming.\n");
connectionAmount[i]++;//For this test assume connection. Test will fail if connection fails.
break;
case ID_NO_FREE_INCOMING_CONNECTIONS://Should not happend
if (isVerbose)
{
printf("The server is full. This shouldn't happen in this test ever.\n");
DebugTools::ShowError("Test failed.\n",!noPauses && isVerbose,__LINE__,__FILE__);
}
return 2;
break;
case ID_ALREADY_CONNECTED:
if (isVerbose)
printf("Already connected\n");//Shouldn't happen
break;
case ID_DISCONNECTION_NOTIFICATION:
if (isVerbose)
{
printf("We have been disconnected.\n");
DebugTools::ShowError("Test failed.\n",!noPauses && isVerbose,__LINE__,__FILE__);
}
return 3;
break;
case ID_CONNECTION_LOST:
allConnected=false;
connectionAmount[i]--;
if (isVerbose)
{
printf("Connection lost.\n");
DebugTools::ShowError("Test failed.\n",!noPauses && isVerbose,__LINE__,__FILE__);
}
return 3;
break;
default:
if (packet->data[0]==ID_USER_PACKET_ENUM+1)
{
int thePeerNum;
int sequenceNum;
bitStream.Reset();
bitStream.Write((char*)packet->data, packet->length);
bitStream.IgnoreBits(8);
bitStream.Read(sequenceNum);
bitStream.Read(thePeerNum);
if (isVerbose)
printf("Message %i from %i\n",sequenceNum,thePeerNum );
if (thePeerNum>=0&&thePeerNum<peerNum)
{
if (lastNumberReceivedFromList[i][thePeerNum]==sequenceNum)
{
lastNumberReceivedFromList[i][thePeerNum]++;
}
else
{
if (isVerbose)
{
printf("Packets out of order");
DebugTools::ShowError("Test failed.\n",!noPauses && isVerbose,__LINE__,__FILE__);
}
return 4;
}
recievedFromList[i][thePeerNum]++;}
}
break;
}
peerList[i]->DeallocatePacket(packet);
// Stay in the loop as long as there are more packets.
packet = peerList[i]->Receive();
}
}
RakSleep(0);//If needed for testing
}
for (int i=0;i<peerNum;i++)
{
for (int j=0;j<peerNum;j++)
{
if (i!=j)
{
if (isVerbose)
printf("%i recieved %i packets from %i\n",i,recievedFromList[i][j],j);
if (recievedFromList[i][j]!=numPackets)
{
if (isVerbose)
{
printf("Not all packets recieved. it was in reliable ordered mode so that means test failed or wait time needs increasing\n");
DebugTools::ShowError("Test failed.\n",!noPauses && isVerbose,__LINE__,__FILE__);
}
return 5;
}
}
}
}
printf("All packets recieved in order,pass\n");
return 0;
}
RakString EightPeerTest::GetTestName()
{
return "EightPeerTest";
}
RakString EightPeerTest::ErrorCodeToString(int errorCode)
{
switch (errorCode)
{
case 0:
return "No error";
break;
case 1:
return "Connect function returned failure.";
break;
case 2:
return "Peers failed to connect.";
break;
case 3:
return "There was a disconnection.";
break;
case 4:
return "Not ordered.";
break;
case 5:
return "Not reliable.";
break;
default:
return "Undefined Error";
}
}
EightPeerTest::EightPeerTest(void)
{
}
EightPeerTest::~EightPeerTest(void)
{
}
void EightPeerTest::DestroyPeers()
{
int theSize=destroyList.Size();
for (int i=0; i < theSize; i++)
RakPeerInterface::DestroyInstance(destroyList[i]);
}
|
{
"pile_set_name": "Github"
}
|
/**
* Freenet encryption support code. Some of this will be replaced with JCA code, but JCA has export
* restrictions issues (which are a massive configuration/maintenance problem for end users even if
* they are US citizens). Some of it wraps JCA code to provide convenient APIs, boost performance
* by e.g. precomputing stuff, or support higher level functionality not provided by JCA (e.g. JFK
* connection setup). Much of it is a bit too low level.
*/
package freenet.crypt;
|
{
"pile_set_name": "Github"
}
|
if(ESP_PLATFORM)
file(GLOB_RECURSE SOURCES lvgl/src/*.c)
idf_component_register(SRCS ${SOURCES}
INCLUDE_DIRS . lvgl)
target_compile_definitions(${COMPONENT_LIB} INTERFACE LV_CONF_INCLUDE_SIMPLE=1)
endif()
|
{
"pile_set_name": "Github"
}
|
-*- coding: utf-8 -*-
Files: blink.xpm braindamaged.xpm cry.xpm dead.xpm evil.xpm forced.xpm
frown.xpm grin.xpm indifferent.xpm reverse-smile.xpm sad.xpm
smile.xpm wry.xpm
Author: Adam Sjøgren
Copyright (C) 2007-2018 Free Software Foundation, Inc.
License: GNU General Public License version 3 or later (see COPYING)
|
{
"pile_set_name": "Github"
}
|
# -*- coding: utf-8 -*-
import smtplib
import email.message
def send(
sender, to,
subject='None',
body='None',
server='localhost'
):
"""sends a message."""
message = email.message.Message()
message['To'] = to
message['From'] = sender
message['Subject'] = subject
message.set_payload(body)
server = smtplib.SMTP(server)
try:
return server.sendmail(sender, to, message.as_string())
finally:
server.quit()
|
{
"pile_set_name": "Github"
}
|
// Copyright 2017 Team 254. All Rights Reserved.
// Author: [email protected] (Patrick Fairbank)
package plc
import (
"github.com/stretchr/testify/assert"
"testing"
)
func TestByteToBool(t *testing.T) {
bytes := []byte{7, 254, 3}
bools := byteToBool(bytes, 17)
if assert.Equal(t, 17, len(bools)) {
expectedBools := []bool{true, true, true, false, false, false, false, false, false, true, true, true, true,
true, true, true, true}
assert.Equal(t, expectedBools, bools)
}
}
func TestByteToUint(t *testing.T) {
bytes := []byte{1, 77, 2, 253, 21, 179}
uints := byteToUint(bytes, 3)
if assert.Equal(t, 3, len(uints)) {
assert.Equal(t, []uint16{333, 765, 5555}, uints)
}
}
func TestBoolToByte(t *testing.T) {
bools := []bool{true, true, false, false, true, false, false, false, false, true}
bytes := boolToByte(bools)
if assert.Equal(t, 2, len(bytes)) {
assert.Equal(t, []byte{19, 2}, bytes)
assert.Equal(t, bools, byteToBool(bytes, len(bools)))
}
}
func TestGetArmorBlockStatuses(t *testing.T) {
var plc Plc
plc.registers[fieldIoConnection] = 0
assert.Equal(t, map[string]bool{"RedDs": false, "BlueDs": false, "ShieldGenerator": false, "ControlPanel": false},
plc.GetArmorBlockStatuses())
plc.registers[fieldIoConnection] = 1
assert.Equal(t, map[string]bool{"RedDs": true, "BlueDs": false, "ShieldGenerator": false, "ControlPanel": false},
plc.GetArmorBlockStatuses())
plc.registers[fieldIoConnection] = 2
assert.Equal(t, map[string]bool{"RedDs": false, "BlueDs": true, "ShieldGenerator": false, "ControlPanel": false},
plc.GetArmorBlockStatuses())
plc.registers[fieldIoConnection] = 4
assert.Equal(t, map[string]bool{"RedDs": false, "BlueDs": false, "ShieldGenerator": true, "ControlPanel": false},
plc.GetArmorBlockStatuses())
plc.registers[fieldIoConnection] = 8
assert.Equal(t, map[string]bool{"RedDs": false, "BlueDs": false, "ShieldGenerator": false, "ControlPanel": true},
plc.GetArmorBlockStatuses())
plc.registers[fieldIoConnection] = 5
assert.Equal(t, map[string]bool{"RedDs": true, "BlueDs": false, "ShieldGenerator": true, "ControlPanel": false},
plc.GetArmorBlockStatuses())
plc.registers[fieldIoConnection] = 10
assert.Equal(t, map[string]bool{"RedDs": false, "BlueDs": true, "ShieldGenerator": false, "ControlPanel": true},
plc.GetArmorBlockStatuses())
plc.registers[fieldIoConnection] = 15
assert.Equal(t, map[string]bool{"RedDs": true, "BlueDs": true, "ShieldGenerator": true, "ControlPanel": true},
plc.GetArmorBlockStatuses())
}
|
{
"pile_set_name": "Github"
}
|
module Cisco-IOS-XE-object-group {
namespace "http://cisco.com/ns/yang/Cisco-IOS-XE-object-group";
prefix ios-og;
import cisco-semver {
prefix cisco-semver;
}
import Cisco-IOS-XE-types {
prefix ios-types;
}
import Cisco-IOS-XE-native {
prefix ios;
}
import ietf-inet-types {
prefix inet;
}
organization
"Cisco Systems, Inc.";
contact
"Cisco Systems, Inc.
Customer Service
Postal: 170 W Tasman Drive
San Jose, CA 95134
Tel: +1 1800 553-NETS
E-mail: [email protected]";
description
"Cisco XE Native Object Group Yang model.
Copyright (c) 2016, 2018 by Cisco Systems, Inc.
All rights reserved.";
// =========================================================================
// REVISION
// =========================================================================
revision 2019-07-01 {
description "Establish semantic version baseline";
cisco-semver:module-version "1.0.0";
}
revision 2018-11-07 {
description
"Cleanup spelling errors in description statements";
}
revision 2018-10-03 {
description
"Fix protocol icmp, tcp and udp able to standalone";
}
revision 2018-10-01 {
description
"Fix not being able to configure multiple protocol numbers and names";
}
revision 2018-06-24 {
description
"Refactor source port service object-group support";
}
revision 2018-06-20 {
description
"Add source port support to service object-group";
}
revision 2017-07-31 {
description
"Support multiple-host under object group network";
}
revision 2017-02-07 {
description
"Initial revision";
}
typedef object-group-port-operator {
type enumeration {
enum "eq";
enum "gt";
enum "lt";
}
}
grouping object-group-service-grouping {
leaf description {
description
"Service object group description";
type string {
length "1..200";
}
}
leaf group-object {
description
"Nested object group";
type string;
}
leaf-list protocal-number {
description
"An IP protocol number";
type uint8;
}
leaf ahp {
description
"Authentication Header Protocol";
type empty;
}
leaf eigrp {
description
"Cisco's EIGRP routing protocol";
type empty;
}
leaf esp {
description
"Encapsulation Security Payload";
type empty;
}
leaf gre {
description
"Cisco's GRE tunneling";
type empty;
}
leaf icmp {
description
"Internet Control Message Protocol";
type empty;
}
container icmp-conf {
container icmp {
description
"Internet Control Message Protocol";
leaf port-number {
type uint8;
}
leaf alternate-address {
description
"Alternate address";
type empty;
}
leaf conversion-error {
description
"Datagram conversion";
type empty;
}
leaf echo {
description
"Echo (ping)";
type empty;
}
leaf echo-reply {
description
"Echo reply";
type empty;
}
leaf information-reply {
description
"Information replies";
type empty;
}
leaf information-request {
description
"Information requests";
type empty;
}
leaf mask-reply {
description
"Mask replies";
type empty;
}
leaf mask-request {
description
"Mask requests";
type empty;
}
leaf mobile-redirect {
description
"Mobile host redirect";
type empty;
}
leaf parameter-problem {
description
"All parameter problems";
type empty;
}
leaf redirect {
description
"All redirects";
type empty;
}
leaf router-advertisement {
description
"Router discovery advertisements";
type empty;
}
leaf router-solicitation {
description
"Router discovery solicitations";
type empty;
}
leaf source-quench {
description
"Source quenches";
type empty;
}
leaf time-exceeded {
description
"All time exceeded";
type empty;
}
leaf timestamp-reply {
description
"Timestamp replies";
type empty;
}
leaf timestamp-request {
description
"Timestamp requests";
type empty;
}
leaf traceroute {
description
"Traceroute";
type empty;
}
leaf unreachable {
description
"All unreachables";
type empty;
}
}
}
leaf igmp {
description
"Internet Gateway Message Protocol";
type empty;
}
leaf ip {
description
"Any Internet Protocol";
type empty;
}
leaf ipinip {
description
"IP in IP tunneling";
type empty;
}
leaf nos {
description
"KA9Q NOS compatible IP over IP tunneling";
type empty;
}
leaf ospf {
description
"OSPF routing protocol";
type empty;
}
leaf pcp {
description
"Payload Compression Protocol";
type empty;
}
leaf pim {
description
"Protocol Independent Multicast";
type empty;
}
leaf tcp {
type empty;
}
container tcp-conf {
container tcp {
description
"Transmission Control Protocol";
list tcp-port-list {
key "tcp-port";
leaf tcp-port {
type ios-types:acl-tcp-port-type;
}
leaf operator {
type object-group-port-operator;
}
}
container tcp-range-port-list {
list range {
description
"Match only packets in the range of port numbers";
key "tcp-min-port tcp-max-port";
leaf tcp-min-port {
type ios-types:acl-tcp-port-type;
}
leaf tcp-max-port {
type ios-types:acl-tcp-port-type;
}
}
}
container tcp-src-port-list {
list source {
description
"Source port number";
key "tcp-port";
leaf tcp-port {
type ios-types:acl-tcp-port-type;
}
leaf operator {
type object-group-port-operator;
}
}
}
container tcp-src-range-port-list {
container source {
description
"Source port number";
list range {
description
"Match only packets in the range of port numbers";
key "tcp-min-port tcp-max-port";
leaf tcp-min-port {
type ios-types:acl-tcp-port-type;
}
leaf tcp-max-port {
type ios-types:acl-tcp-port-type;
}
}
}
}
container tcp-src-dst-port-list {
list source {
description
"Source port number";
key "tcp-src-port tcp-dst-port";
leaf src-operator {
type object-group-port-operator;
}
leaf tcp-src-port {
type ios-types:acl-tcp-port-type;
}
leaf dst-operator {
type object-group-port-operator;
}
leaf tcp-dst-port {
type ios-types:acl-tcp-port-type;
}
}
}
container tcp-src-range-dst-port-list {
list source {
description
"Source port number";
key "tcp-src-min-port tcp-src-max-port tcp-dst-port";
leaf tcp-src-min-port {
type ios-types:acl-tcp-port-type;
}
leaf tcp-src-max-port {
type ios-types:acl-tcp-port-type;
}
leaf operator {
type object-group-port-operator;
}
leaf tcp-dst-port {
type ios-types:acl-tcp-port-type;
}
}
}
container tcp-src-dst-range-port-list {
list source {
description
"Source port number";
key "tcp-src-port tcp-dst-min-port tcp-dst-max-port";
leaf operator {
type object-group-port-operator;
}
leaf tcp-src-port {
type ios-types:acl-tcp-port-type;
}
leaf tcp-dst-min-port {
type ios-types:acl-tcp-port-type;
}
leaf tcp-dst-max-port {
type ios-types:acl-tcp-port-type;
}
}
}
container tcp-src-range-dst-range-port-list {
list source {
description
"Source port number";
key "tcp-src-min-port tcp-src-max-port tcp-dst-min-port tcp-dst-max-port";
leaf tcp-src-min-port {
type ios-types:acl-tcp-port-type;
}
leaf tcp-src-max-port {
type ios-types:acl-tcp-port-type;
}
leaf tcp-dst-min-port {
type ios-types:acl-tcp-port-type;
}
leaf tcp-dst-max-port {
type ios-types:acl-tcp-port-type;
}
}
}
}
}
container tcp-udp {
description
"TCP or UDP protocol";
list tcp-udp-port-list {
key "tcp-udp-port";
leaf tcp-udp-port {
type ios-types:acl-tcp-udp-port-type;
}
leaf operator {
type object-group-port-operator;
}
}
container tcp-udp-range-port-list {
list range {
description
"Match only packets in the range of port numbers";
key "tcp-udp-min-port tcp-udp-max-port";
leaf tcp-udp-min-port {
type ios-types:acl-tcp-udp-port-type;
}
leaf tcp-udp-max-port {
type ios-types:acl-tcp-udp-port-type;
}
}
}
container tcp-udp-src-port-list {
list source {
description
"Source port number";
key "tcp-udp-port";
leaf tcp-udp-port {
type ios-types:acl-tcp-udp-port-type;
}
leaf operator {
type object-group-port-operator;
}
}
}
container tcp-udp-src-range-port-list {
container source {
description
"Source port number";
list range {
description
"Match only packets in the range of port numbers";
key "tcp-udp-min-port tcp-udp-max-port";
leaf tcp-udp-min-port {
type ios-types:acl-tcp-udp-port-type;
}
leaf tcp-udp-max-port {
type ios-types:acl-tcp-udp-port-type;
}
}
}
}
container tcp-udp-src-dst-port-list {
list source {
description
"Source port number";
key "tcp-udp-src-port tcp-udp-dst-port";
leaf src-operator {
type object-group-port-operator;
}
leaf tcp-udp-src-port {
type ios-types:acl-tcp-udp-port-type;
}
leaf dst-operator {
type object-group-port-operator;
}
leaf tcp-udp-dst-port {
type ios-types:acl-tcp-udp-port-type;
}
}
}
container tcp-udp-src-range-dst-port-list {
list source {
description
"Source port number";
key "tcp-udp-src-min-port tcp-udp-src-max-port tcp-udp-dst-port";
leaf tcp-udp-src-min-port {
type ios-types:acl-tcp-udp-port-type;
}
leaf tcp-udp-src-max-port {
type ios-types:acl-tcp-udp-port-type;
}
leaf operator {
type object-group-port-operator;
}
leaf tcp-udp-dst-port {
type ios-types:acl-tcp-udp-port-type;
}
}
}
container tcp-udp-src-dst-range-port-list {
list source {
description
"Source port number";
key "tcp-udp-src-port tcp-udp-dst-min-port tcp-udp-dst-max-port";
leaf operator {
type object-group-port-operator;
}
leaf tcp-udp-src-port {
type ios-types:acl-tcp-udp-port-type;
}
leaf tcp-udp-dst-min-port {
type ios-types:acl-tcp-udp-port-type;
}
leaf tcp-udp-dst-max-port {
type ios-types:acl-tcp-udp-port-type;
}
}
}
container tcp-udp-src-range-dst-range-port-list {
list source {
description
"Source port number";
key "tcp-udp-src-min-port tcp-udp-src-max-port tcp-udp-dst-min-port tcp-udp-dst-max-port";
leaf tcp-udp-src-min-port {
type ios-types:acl-tcp-udp-port-type;
}
leaf tcp-udp-src-max-port {
type ios-types:acl-tcp-udp-port-type;
}
leaf tcp-udp-dst-min-port {
type ios-types:acl-tcp-udp-port-type;
}
leaf tcp-udp-dst-max-port {
type ios-types:acl-tcp-udp-port-type;
}
}
}
}
leaf udp {
type empty;
}
container udp-conf {
container udp {
description
"User Datagram Protocol";
list udp-port-list {
key "udp-port";
leaf udp-port {
type ios-types:acl-udp-port-type;
}
leaf operator {
type object-group-port-operator;
}
}
container udp-range-port-list {
list range {
description
"Match only packets in the range of port numbers";
key "udp-min-port udp-max-port";
leaf udp-min-port {
type ios-types:acl-udp-port-type;
}
leaf udp-max-port {
type ios-types:acl-udp-port-type;
}
}
}
container udp-src-port-list {
list source {
description
"Source port number";
key "udp-port";
leaf udp-port {
type ios-types:acl-udp-port-type;
}
leaf operator {
type object-group-port-operator;
}
}
}
container udp-src-range-port-list {
container source {
description
"Source port number";
list range {
description
"Match only packets in the range of port numbers";
key "udp-min-port udp-max-port";
leaf udp-min-port {
type ios-types:acl-udp-port-type;
}
leaf udp-max-port {
type ios-types:acl-udp-port-type;
}
}
}
}
container udp-src-dst-port-list {
list source {
description
"Source port number";
key "udp-src-port udp-dst-port";
leaf src-operator {
type object-group-port-operator;
}
leaf udp-src-port {
type ios-types:acl-udp-port-type;
}
leaf dst-operator {
type object-group-port-operator;
}
leaf udp-dst-port {
type ios-types:acl-udp-port-type;
}
}
}
container udp-src-range-dst-port-list {
list source {
description
"Source port number";
key "udp-src-min-port udp-src-max-port udp-dst-port";
leaf udp-src-min-port {
type ios-types:acl-udp-port-type;
}
leaf udp-src-max-port {
type ios-types:acl-udp-port-type;
}
leaf operator {
type object-group-port-operator;
}
leaf udp-dst-port {
type ios-types:acl-udp-port-type;
}
}
}
container udp-src-dst-range-port-list {
list source {
description
"Source port number";
key "udp-src-port udp-dst-min-port udp-dst-max-port";
leaf operator {
type object-group-port-operator;
}
leaf udp-src-port {
type ios-types:acl-udp-port-type;
}
leaf udp-dst-min-port {
type ios-types:acl-udp-port-type;
}
leaf udp-dst-max-port {
type ios-types:acl-udp-port-type;
}
}
}
container udp-src-range-dst-range-port-list {
list source {
description
"Source port number";
key "udp-src-min-port udp-src-max-port udp-dst-min-port udp-dst-max-port";
leaf udp-src-min-port {
type ios-types:acl-udp-port-type;
}
leaf udp-src-max-port {
type ios-types:acl-udp-port-type;
}
leaf udp-dst-min-port {
type ios-types:acl-udp-port-type;
}
leaf udp-dst-max-port {
type ios-types:acl-udp-port-type;
}
}
}
}
}
}
grouping config-object-group-grouping {
list network {
description
"network group";
key "name";
leaf name {
description
"WORD;;object-group name";
type string {
length "1..64";
}
}
container obj-Mode-config-network-group {
list network_address {
key "ipv4_addr ipv4_mask";
leaf ipv4_addr {
description
"A.B.C.D;;Network address of the group members";
type inet:ipv4-address;
}
leaf ipv4_mask {
description
"A.B.C.D;;Network mask";
type inet:ipv4-address;
}
}
leaf description {
description
"Network object group description";
type string {
length "1..200";
}
}
leaf group-object {
description
"Nested object group";
type string;
}
list host {
description
"Host address of the object-group member";
key "ipv4-host";
leaf ipv4-host {
description
"Host address of the object-group member";
type inet:host;
}
}
container range {
leaf ip-addr-min {
description
"Starting IPv4 Address";
type inet:ipv4-address;
}
leaf ip-addr-max {
description
"Ending IPv4 Address";
type inet:ipv4-address;
}
}
}
}
list security {
description
"security group";
key "name";
leaf name {
type string {
length "1..64";
}
}
container security-group {
leaf description {
description
"Network object group description";
type string {
length "1..200";
}
}
leaf group-object {
description
"Nested object group";
type string;
}
container security-group {
description
"Security Group Tag";
leaf tag {
type uint16 {
range "1..65535";
}
}
}
}
}
list service {
description
"service group";
key "name";
leaf name {
type string {
length "1..64";
}
}
uses object-group-service-grouping;
}
}
/////////////////////////////////////////////////////////
// native / object-group
/////////////////////////////////////////////////////////
augment "/ios:native/ios:object-group" {
uses config-object-group-grouping;
}
} //module
|
{
"pile_set_name": "Github"
}
|
function [err_rel,err_abs] = derivcheck(f,x,flag)
%DERIVCHECK Check analytical vs numerical differentiation for a function
if nargin < 3 || isempty(flag); flag = false; end
tic
if flag
dy_num = fgrad(f,x,'five-points');
else
dy_num = gradest(f,x);
end
toc
tic
[y,dy_ana] = f(x);
toc
if size(dy_num,1) == size(dy_num,2)
dy_num = sum(dy_num,1);
end
% Reshape to row vectors
dy_num = dy_num(:)';
dy_ana = dy_ana(:)';
fprintf('Relative errors:\n');
err_rel = (dy_num(:)' - dy_ana(:)')./dy_num(:)'
fprintf('Absolute errors:\n');
err_abs = dy_num(:)' - dy_ana(:)'
end
|
{
"pile_set_name": "Github"
}
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: skip-file
import numpy as np
import mxnet as mx
import time
import logging
from collections import namedtuple
from mxnet import optimizer as opt
from mxnet.optimizer import get_updater
from mxnet import metric
# Parameter to pass to batch_end_callback
BatchEndParam = namedtuple('BatchEndParams', ['epoch', 'nbatch', 'eval_metric'])
class Solver(object):
def __init__(self, symbol, ctx=None,
begin_epoch=0, num_epoch=None,
arg_params=None, aux_params=None,
optimizer='sgd', **kwargs):
self.symbol = symbol
if ctx is None:
ctx = mx.cpu()
self.ctx = ctx
self.begin_epoch = begin_epoch
self.num_epoch = num_epoch
self.arg_params = arg_params
self.aux_params = aux_params
self.optimizer = optimizer
self.kwargs = kwargs.copy()
def fit(self, train_data, eval_data=None,
eval_metric='acc',
grad_req='write',
epoch_end_callback=None,
batch_end_callback=None,
kvstore='local',
logger=None):
if logger is None:
logger = logging
logging.info('Start training with %s', str(self.ctx))
arg_shapes, out_shapes, aux_shapes = self.symbol.infer_shape(data=train_data.provide_data[0][1])
arg_names = self.symbol.list_arguments()
if grad_req != 'null':
self.grad_params = {}
for name, shape in zip(arg_names, arg_shapes):
if not (name.endswith('data') or name.endswith('label')):
self.grad_params[name] = mx.nd.zeros(shape, self.ctx)
else:
self.grad_params = None
aux_names = self.symbol.list_auxiliary_states()
self.aux_params = {k : nd.zeros(s) for k, s in zip(aux_names, aux_shapes)}
data_name = train_data.data_name
label_name = train_data.label_name
input_names = [data_name, label_name]
self.optimizer = opt.create(self.optimizer, rescale_grad=(1.0/train_data.get_batch_size()), **(self.kwargs))
self.updater = get_updater(self.optimizer)
eval_metric = metric.create(eval_metric)
# begin training
for epoch in range(self.begin_epoch, self.num_epoch):
nbatch = 0
train_data.reset()
eval_metric.reset()
for data in train_data:
nbatch += 1
label_shape = data[label_name].shape
self.arg_params[data_name] = mx.nd.array(data[data_name], self.ctx)
self.arg_params[label_name] = mx.nd.array(data[label_name].reshape(label_shape[0], \
label_shape[1]*label_shape[2]), self.ctx)
output_names = self.symbol.list_outputs()
self.exector = self.symbol.bind(self.ctx, self.arg_params,
args_grad=self.grad_params,
grad_req=grad_req,
aux_states=self.aux_params)
assert len(self.symbol.list_arguments()) == len(self.exector.grad_arrays)
update_dict = {name: nd for name, nd in zip(self.symbol.list_arguments(), \
self.exector.grad_arrays) if nd is not None}
output_dict = {}
output_buff = {}
for key, arr in zip(self.symbol.list_outputs(), self.exector.outputs):
output_dict[key] = arr
output_buff[key] = mx.nd.empty(arr.shape, ctx=mx.cpu())
self.exector.forward(is_train=True)
for key in output_dict:
output_dict[key].copyto(output_buff[key])
self.exector.backward()
for key, arr in update_dict.items():
if key != "bigscore_weight":
self.updater(key, arr, self.arg_params[key])
pred_shape = self.exector.outputs[0].shape
label = mx.nd.array(data[label_name].reshape(label_shape[0], label_shape[1]*label_shape[2]))
pred = mx.nd.array(output_buff["softmax_output"].asnumpy().reshape(pred_shape[0], \
pred_shape[1], pred_shape[2]*pred_shape[3]))
eval_metric.update([label], [pred])
self.exector.outputs[0].wait_to_read()
batch_end_params = BatchEndParam(epoch=epoch, nbatch=nbatch, eval_metric=eval_metric)
batch_end_callback(batch_end_params)
if epoch_end_callback is not None:
epoch_end_callback(epoch, self.symbol, self.arg_params, self.aux_params)
name, value = eval_metric.get()
logger.info(" --->Epoch[%d] Train-%s=%f", epoch, name, value)
# evaluation
if eval_data:
logger.info(" in eval process...")
nbatch = 0
eval_data.reset()
eval_metric.reset()
for data in eval_data:
nbatch += 1
label_shape = data[label_name].shape
self.arg_params[data_name] = mx.nd.array(data[data_name], self.ctx)
self.arg_params[label_name] = mx.nd.array(data[label_name].reshape(label_shape[0], \
label_shape[1]*label_shape[2]), self.ctx)
exector = self.symbol.bind(self.ctx, self.arg_params,
args_grad=self.grad_params,
grad_req=grad_req,
aux_states=self.aux_params)
cpu_output_array = mx.nd.zeros(exector.outputs[0].shape)
exector.forward(is_train=False)
exector.outputs[0].copyto(cpu_output_array)
pred_shape = cpu_output_array.shape
label = mx.nd.array(data[label_name].reshape(label_shape[0], \
label_shape[1]*label_shape[2]))
pred = mx.nd.array(cpu_output_array.asnumpy().reshape(pred_shape[0], \
pred_shape[1], pred_shape[2]*pred_shape[3]))
eval_metric.update([label], [pred])
exector.outputs[0].wait_to_read()
name, value = eval_metric.get()
logger.info('batch[%d] Validation-%s=%f', nbatch, name, value)
|
{
"pile_set_name": "Github"
}
|
digraph sc {
ratio="compress"
node [fontsize="11" fontname="Arial" shape="record"];
edge [fontsize="9" fontname="Arial" color="grey" arrowhead="open" arrowsize="0.5"];
node_service_container [label="service_container\nSymfony\\Component\\DependencyInjection\\ContainerInterface\n", shape=record, fillcolor="#eeeeee", style="filled"];
node_foo [label="foo\nFooClass\n", shape=record, fillcolor="#eeeeee", style="filled"];
node_bar [label="bar\nBarClass\n", shape=record, fillcolor="#eeeeee", style="filled"];
node_foo -> node_bar [label="" style="filled"];
}
|
{
"pile_set_name": "Github"
}
|
import json
class User(object):
def __init__(self, user_id, email, username, picture_url, ratings, join_time, last_online, current_game):
self.user_id = user_id
self.email = email
self.username = username
self.picture_url = picture_url
self.ratings = ratings
self.join_time = join_time
self.last_online = last_online
self.current_game = current_game
# for flask-login
self.is_authenticated = True
self.is_active = True
self.is_anonymous = False
# for flask-login
def get_id(self):
return unicode(self.user_id)
def to_json_obj(self, with_key=False):
return {
'userId': str(self.user_id),
'email': self.email,
'username': self.username,
'pictureUrl': self.picture_url,
'ratings': self.ratings,
'joinTime': str(self.join_time),
'lastOnline': self.last_online and str(self.last_online),
'currentGame': {
key: value
for key, value in self.current_game.iteritems()
if with_key or key != 'playerKey'
} if self.current_game is not None else None,
}
def __str__(self):
return json.dumps(self.to_json_obj())
@staticmethod
def from_row(row):
return User(
row.id,
row.email,
row.username,
row.picture_url,
row.ratings,
row.join_time,
row.last_online,
row.current_game
)
class UserGameHistory(object):
def __init__(self, history_id, user_id, game_time, game_info):
self.history_id = history_id
self.user_id = user_id
self.game_time = game_time
self.game_info = game_info
def to_json_obj(self):
return {
'historyId': self.history_id,
'userId': str(self.user_id),
'gameTime': str(self.game_time),
'gameInfo': self.game_info,
}
@staticmethod
def from_row(row):
return UserGameHistory(row.id, row.user_id, row.game_time, row.game_info)
class ActiveGame(object):
def __init__(self, active_id, server, game_id, game_info):
self.active_id = active_id
self.server = server
self.game_id = game_id
self.game_info = game_info
def to_json_obj(self):
return {
'activeId': self.active_id,
'server': self.server,
'gameId': self.game_id,
'gameInfo': self.game_info,
}
@staticmethod
def from_row(row):
return ActiveGame(row.id, row.server, row.game_id, row.game_info)
class GameHistory(object):
def __init__(self, history_id, replay):
self.history_id = history_id
self.replay = replay
def to_json_obj(self):
return {
'historyId': self.history_id,
'replay': self.replay,
}
@staticmethod
def from_row(row):
return GameHistory(row.id, row.replay)
class CampaignProgress(object):
def __init__(self, levels_completed, belts_completed):
self.levels_completed = levels_completed
self.belts_completed = belts_completed
def to_json_obj(self):
return {
'levelsCompleted': self.levels_completed,
'beltsCompleted': self.belts_completed,
}
@staticmethod
def from_row(row):
if row is None:
return CampaignProgress({}, {})
return CampaignProgress(
row.progress.get('levelsCompleted', {}),
row.progress.get('beltsCompleted', {}),
)
|
{
"pile_set_name": "Github"
}
|
<!-- This Source Code Form is subject to the terms of the Mozilla Public
- License, v. 2.0. If a copy of the MPL was not distributed with this
- file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
<!ENTITY window.title "例外">
<!ENTITY window.width "45em">
<!ENTITY treehead.sitename.label "站点">
<!ENTITY treehead.status.label "状态">
<!ENTITY removepermission.label "移除此站点">
<!ENTITY removepermission.accesskey "R">
<!ENTITY removeallpermissions.label "移除全部站点">
<!ENTITY removeallpermissions.accesskey "e">
<!ENTITY address.label "网站地址:">
<!ENTITY address.accesskey "d">
<!ENTITY block.label "阻止">
<!ENTITY block.accesskey "B">
<!ENTITY session.label "在当前会话中允许">
<!ENTITY session.accesskey "S">
<!ENTITY allow.label "允许">
<!ENTITY allow.accesskey "A">
<!ENTITY windowClose.key "w">
<!ENTITY button.cancel.label "取消">
<!ENTITY button.cancel.accesskey "C">
<!ENTITY button.ok.label "保存更改">
<!ENTITY button.ok.accesskey "S">
|
{
"pile_set_name": "Github"
}
|
libusbip_la_CPPFLAGS = -DUSBIDS_FILE='"@USBIDS_DIR@/usb.ids"'
libusbip_la_CFLAGS = @EXTRA_CFLAGS@
libusbip_la_LDFLAGS = -version-info @LIBUSBIP_VERSION@
lib_LTLIBRARIES := libusbip.la
libusbip_la_SOURCES := names.c names.h usbip_host_driver.c usbip_host_driver.h \
usbip_common.c usbip_common.h vhci_driver.c vhci_driver.h \
sysfs_utils.c sysfs_utils.h
|
{
"pile_set_name": "Github"
}
|
/*!
* KeyboardJS
*
* Copyright 2011, Robert William Hurst
* Licenced under the BSD License.
* See https://raw.github.com/RobertWHurst/KeyboardJS/master/license.txt
*/
(function (context, factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module.
define(factory);
} else {
// Browser globals
context.k = context.KeyboardJS = factory();
}
}(this, function() {
//polyfills for ms's peice o' shit browsers
function bind(target, type, handler) { if (target.addEventListener) { target.addEventListener(type, handler, false); } else { target.attachEvent("on" + type, function(event) { return handler.call(target, event); }); } }
[].indexOf||(Array.prototype.indexOf=function(a,b,c){for(c=this.length,b=(c+~~b)%c;b<c&&(!(b in this)||this[b]!==a);b++);return b^c?b:-1;});
//locals
var locals = {
'us': {
"backspace": 8,
"tab": 9,
"enter": 13,
"shift": 16,
"ctrl": 17,
"alt": 18,
"pause": 19, "break": 19,
"capslock": 20,
"escape": 27, "esc": 27,
"space": 32, "spacebar": 32,
"pageup": 33,
"pagedown": 34,
"end": 35,
"home": 36,
"left": 37,
"up": 38,
"right": 39,
"down": 40,
"insert": 45,
"delete": 46,
"0": 48, "1": 49, "2": 50, "3": 51, "4": 52, "5": 53, "6": 54, "7": 55, "8": 56, "9": 57,
"a": 65, "b": 66, "c": 67, "d": 68, "e": 69, "f": 70, "g": 71, "h": 72, "i": 73, "j": 74, "k": 75, "l": 76, "m": 77, "n": 78, "o": 79, "p": 80, "q": 81, "r": 82, "s": 83, "t": 84, "u": 85, "v": 86, "w": 87, "x": 88, "y": 89, "z": 90,
"meta": 91, "command": 91, "windows": 91, "win": 91,
"_91": 92,
"select": 93,
"num0": 96, "num1": 97, "num2": 98, "num3": 99, "num4": 100, "num5": 101, "num6": 102, "num7": 103, "num8": 104, "num9": 105,
"multiply": 106,
"add": 107,
"subtract": 109,
"decimal": 110,
"divide": 111,
"f1": 112, "f2": 113, "f3": 114, "f4": 115, "f5": 116, "f6": 117, "f7": 118, "f8": 119, "f9": 120, "f10": 121, "f11": 122, "f12": 123,
"numlock": 144, "num": 144,
"scrolllock": 145, "scroll": 145,
"semicolon": 186,
"equal": 187, "equalsign": 187,
"comma": 188,
"dash": 189,
"period": 190,
"slash": 191, "forwardslash": 191,
"graveaccent": 192,
"openbracket": 219,
"backslash": 220,
"closebracket": 221,
"singlequote": 222
}
//If you create a new local please submit it as a pull request or post it in the issue tracker at
// http://github.com/RobertWhurst/KeyboardJS/issues/
}
//keys
var keys = locals['us'],
activeKeys = [],
activeBindings = {},
keyBindingGroups = [];
//adds keys to the active keys array
bind(document, "keydown", function(event) {
//lookup the key pressed and save it to the active keys array
for (var key in keys) {
if(keys.hasOwnProperty(key) && event.keyCode === keys[key]) {
if(activeKeys.indexOf(key) < 0) {
activeKeys.push(key);
}
}
}
//execute the first callback the longest key binding that matches the active keys
return executeActiveKeyBindings(event);
});
//removes keys from the active array
bind(document, "keyup", function (event) {
//lookup the key released and prune it from the active keys array
for(var key in keys) {
if(keys.hasOwnProperty(key) && event.keyCode === keys[key]) {
var iAK = activeKeys.indexOf(key);
if(iAK > -1) {
activeKeys.splice(iAK, 1);
}
}
}
//execute the end callback on the active key binding
return pruneActiveKeyBindings(event);
});
//bind to the window blur event and clear all pressed keys
bind(window, "blur", function() {
activeKeys = [];
//execute the end callback on the active key binding
return pruneActiveKeyBindings(event);
});
/**
* Generates an array of active key bindings
*/
function queryActiveBindings() {
var bindingStack = [];
//loop through the key binding groups by number of keys.
for(var keyCount = keyBindingGroups.length; keyCount > -1; keyCount -= 1) {
if(keyBindingGroups[keyCount]) {
var KeyBindingGroup = keyBindingGroups[keyCount];
//loop through the key bindings of the same key length.
for(var bindingIndex = 0; bindingIndex < KeyBindingGroup.length; bindingIndex += 1) {
var binding = KeyBindingGroup[bindingIndex],
//assume the binding is active till a required key is found to be unsatisfied
keyBindingActive = true;
//loop through each key required by the binding.
for(var keyIndex = 0; keyIndex < binding.keys.length; keyIndex += 1) {
var key = binding.keys[keyIndex];
//if the current key is not in the active keys array the mark the binding as inactive
if(activeKeys.indexOf(key) < 0) {
keyBindingActive = false;
}
}
//if the key combo is still active then push it into the binding stack
if(keyBindingActive) {
bindingStack.push(binding);
}
}
}
}
return bindingStack;
}
/**
* Collects active keys, sets active binds and fires on key down callbacks
* @param event
*/
function executeActiveKeyBindings(event) {
if(activeKeys < 1) {
return true;
}
var bindingStack = queryActiveBindings(),
spentKeys = [],
output;
//loop through each active binding
for (var bindingIndex = 0; bindingIndex < bindingStack.length; bindingIndex += 1) {
var binding = bindingStack[bindingIndex],
usesSpentKey = false;
//check each of the required keys. Make sure they have not been used by another binding
for(var keyIndex = 0; keyIndex < binding.keys.length; keyIndex += 1) {
var key = binding.keys[keyIndex];
if(spentKeys.indexOf(key) > -1) {
usesSpentKey = true;
break;
}
}
//if the binding does not use a key that has been spent then execute it
if(!usesSpentKey) {
//fire the callback
if(typeof binding.callback === "function") {
if(!binding.callback(event, binding.keys, binding.keyCombo)) {
output = false
}
}
//add the binding's combo to the active bindings array
if(!activeBindings[binding.keyCombo]) {
activeBindings[binding.keyCombo] = binding;
}
//add the current key binding's keys to the spent keys array
for(var keyIndex = 0; keyIndex < binding.keys.length; keyIndex += 1) {
var key = binding.keys[keyIndex];
if(spentKeys.indexOf(key) < 0) {
spentKeys.push(key);
}
}
}
}
//if there are spent keys then we know a binding was fired
// and that we need to tell jQuery to prevent event bubbling.
if(spentKeys.length) {
return false;
}
return output;
}
/**
* Removes no longer active keys and fires the on key up callbacks for associated active bindings.
* @param event
*/
function pruneActiveKeyBindings(event) {
var bindingStack = queryActiveBindings();
var output;
//loop through the active combos
for(var bindingCombo in activeBindings) {
if(activeBindings.hasOwnProperty(bindingCombo)) {
var binding = activeBindings[bindingCombo],
active = false;
//loop thorugh the active bindings
for(var bindingIndex = 0; bindingIndex < bindingStack.length; bindingIndex += 1) {
var activeCombo = bindingStack[bindingIndex].keyCombo;
//check to see if the combo is still active
if(activeCombo === bindingCombo) {
active = true;
break;
}
}
//if the combo is no longer active then fire its end callback and remove it
if(!active) {
if(typeof binding.endCallback === "function") {
if(!binding.endCallback(event, binding.keys, binding.keyCombo)) {
output = false
}
}
delete activeBindings[bindingCombo];
}
}
}
return output;
}
/**
* Binds a on key down and on key up callback to a key or key combo. Accepts a string containing the name of each
* key you want to bind to comma separated. If you want to bind a combo the use the plus sign to link keys together.
* Example: 'ctrl + x, ctrl + c' Will fire if Control and x or y are pressed at the same time.
* @param keyCombo
* @param callback
* @param endCallback
*/
function bindKey(keyCombo, callback, endCallback) {
function clear() {
if(keys && keys.length) {
var keyBindingGroup = keyBindingGroups[keys.length];
if(keyBindingGroup.indexOf(keyBinding) > -1) {
var index = keyBindingGroups[keys.length].indexOf(keyBinding);
keyBindingGroups[keys.length].splice(index, 1);
}
}
}
//create an array of combos from the first argument
var bindSets = keyCombo.toLowerCase().replace(/\s/g, '').split(',');
//create a binding for each key combo
for(var i = 0; i < bindSets.length; i += 1) {
//split up the keys
var keys = bindSets[i].split('+');
//if there are keys in the current combo
if(keys.length) {
if(!keyBindingGroups[keys.length]) { keyBindingGroups[keys.length] = []; }
//define the
var keyBinding = {
"callback": callback,
"endCallback": endCallback,
"keyCombo": bindSets[i],
"keys": keys
};
//save the binding sorted by length
keyBindingGroups[keys.length].push(keyBinding);
}
}
return {
"clear": clear
}
}
/**
* Binds keys or key combos to an axis. The keys should be in the following order; up, down, left, right. If any
* of the the binded key or key combos are active the callback will fire. The callback will be passed an array
* containing two numbers. The first represents x and the second represents y. Both have a possible range of -1,
* 0, or 1 depending on the axis direction.
* @param up
* @param down
* @param left
* @param right
* @param callback
*/
function bindAxis(up, down, left, right, callback) {
function clear() {
if(typeof clearUp === 'function') { clearUp(); }
if(typeof clearDown === 'function') { clearDown(); }
if(typeof clearLeft === 'function') { clearLeft(); }
if(typeof clearRight === 'function') { clearRight(); }
if(typeof timer === 'function') { clearInterval(timer); }
}
var axis = [0, 0];
if(typeof callback !== 'function') {
return false;
}
//up
var clearUp = bindKey(up, function () {
if(axis[0] === 0) {
axis[0] = -1;
}
}, function() {
axis[0] = 0;
}).clear;
//down
var clearDown = bindKey(down, function () {
if(axis[0] === 0) {
axis[0] = 1;
}
}, function() {
axis[0] = 0;
}).clear;
//left
var clearLeft = bindKey(left, function () {
if(axis[1] === 0) {
axis[1] = -1;
}
}, function() {
axis[1] = 0;
}).clear;
//right
var clearRight = bindKey(right, function () {
if(axis[1] === 0) {
axis[1] = 1;
}
}, function() {
axis[1] = 0;
}).clear;
var timer = setInterval(function(){
//NO CHANGE
if(axis[0] === 0 && axis[1] === 0) {
return;
}
//run the callback
callback(axis);
}, 1);
return {
"clear": clear
}
}
/**
* Clears all key and key combo binds containing a given key or keys.
* @param keys
*/
function unbindKey(keys) {
if(keys === 'all') {
keyBindingGroups = [];
return;
}
keys = keys.replace(/\s/g, '').split(',');
//loop through the key binding groups.
for(var iKCL = keyBindingGroups.length; iKCL > -1; iKCL -= 1) {
if(keyBindingGroups[iKCL]) {
var KeyBindingGroup = keyBindingGroups[iKCL];
//loop through the key bindings.
for(var iB = 0; iB < KeyBindingGroup.length; iB += 1) {
var keyBinding = KeyBindingGroup[iB],
remove = false;
//loop through the current key binding keys.
for(var iKB = 0; iKB < keyBinding.keys.length; iKB += 1) {
var key = keyBinding.keys[iKB];
//loop through the keys to be removed
for(var iKR = 0; iKR < keys.length; iKR += 1) {
var keyToRemove = keys[iKR];
if(keyToRemove === key) {
remove = true;
break;
}
}
if(remove) { break; }
}
if(remove) {
keyBindingGroups[iKCL].splice(iB, 1); iB -= 1;
if(keyBindingGroups[iKCL].length < 1) {
delete keyBindingGroups[iKCL];
}
}
}
}
}
}
/**
* Gets an array of active keys
*/
function getActiveKeys() {
return activeKeys;
}
/**
* Adds a new keyboard local not supported by keyboard JS
* @param local
* @param keys
*/
function addLocale(local, keys) {
locals[local] = keys;
}
/**
* Changes the keyboard local
* @param local
*/
function setLocale(local) {
if(locals[local]) {
keys = locals[local];
}
}
return {
"bind": {
"key": bindKey,
"axis": bindAxis
},
"activeKeys": getActiveKeys,
"unbind": {
"key": unbindKey
},
"locale": {
"add": addLocale,
"set": setLocale
}
}
}));
|
{
"pile_set_name": "Github"
}
|
# Look'n'Feel configuration file
# $Revision: 8034 $
# the theme to use, none specified or empty means the system default one
#Theme=javax.swing.plaf.metal.MetalLookAndFeel
#Theme=com.sun.java.swing.plaf.gtk.GTKLookAndFeel
#Theme=com.sun.java.swing.plaf.motif.MotifLookAndFeel
#Theme=com.sun.java.swing.plaf.windows.WindowsLookAndFeel
#Theme=com.sun.java.swing.plaf.windows.WindowsClassicLookAndFeel
|
{
"pile_set_name": "Github"
}
|
// Created by cgo -godefs - DO NOT EDIT
// cgo -godefs types_darwin.go
package process
const (
sizeofPtr = 0x8
sizeofShort = 0x2
sizeofInt = 0x4
sizeofLong = 0x8
sizeofLongLong = 0x8
)
type (
_C_short int16
_C_int int32
_C_long int64
_C_long_long int64
)
type Timespec struct {
Sec int64
Nsec int64
}
type Timeval struct {
Sec int64
Usec int32
Pad_cgo_0 [4]byte
}
type Rusage struct {
Utime Timeval
Stime Timeval
Maxrss int64
Ixrss int64
Idrss int64
Isrss int64
Minflt int64
Majflt int64
Nswap int64
Inblock int64
Oublock int64
Msgsnd int64
Msgrcv int64
Nsignals int64
Nvcsw int64
Nivcsw int64
}
type Rlimit struct {
Cur uint64
Max uint64
}
type UGid_t uint32
type KinfoProc struct {
Proc ExternProc
Eproc Eproc
}
type Eproc struct {
Paddr *uint64
Sess *Session
Pcred Upcred
Ucred Uucred
Pad_cgo_0 [4]byte
Vm Vmspace
Ppid int32
Pgid int32
Jobc int16
Pad_cgo_1 [2]byte
Tdev int32
Tpgid int32
Pad_cgo_2 [4]byte
Tsess *Session
Wmesg [8]int8
Xsize int32
Xrssize int16
Xccount int16
Xswrss int16
Pad_cgo_3 [2]byte
Flag int32
Login [12]int8
Spare [4]int32
Pad_cgo_4 [4]byte
}
type Proc struct{}
type Session struct{}
type ucred struct {
Link _Ctype_struct___0
Ref uint64
Posix Posix_cred
Label *Label
Audit Au_session
}
type Uucred struct {
Ref int32
UID uint32
Ngroups int16
Pad_cgo_0 [2]byte
Groups [16]uint32
}
type Upcred struct {
Pc_lock [72]int8
Pc_ucred *ucred
P_ruid uint32
P_svuid uint32
P_rgid uint32
P_svgid uint32
P_refcnt int32
Pad_cgo_0 [4]byte
}
type Vmspace struct {
Dummy int32
Pad_cgo_0 [4]byte
Dummy2 *int8
Dummy3 [5]int32
Pad_cgo_1 [4]byte
Dummy4 [3]*int8
}
type Sigacts struct{}
type ExternProc struct {
P_un [16]byte
P_vmspace uint64
P_sigacts uint64
Pad_cgo_0 [3]byte
P_flag int32
P_stat int8
P_pid int32
P_oppid int32
P_dupfd int32
Pad_cgo_1 [4]byte
User_stack uint64
Exit_thread uint64
P_debugger int32
Sigwait int32
P_estcpu uint32
P_cpticks int32
P_pctcpu uint32
Pad_cgo_2 [4]byte
P_wchan uint64
P_wmesg uint64
P_swtime uint32
P_slptime uint32
P_realtimer Itimerval
P_rtime Timeval
P_uticks uint64
P_sticks uint64
P_iticks uint64
P_traceflag int32
Pad_cgo_3 [4]byte
P_tracep uint64
P_siglist int32
Pad_cgo_4 [4]byte
P_textvp uint64
P_holdcnt int32
P_sigmask uint32
P_sigignore uint32
P_sigcatch uint32
P_priority uint8
P_usrpri uint8
P_nice int8
P_comm [17]int8
Pad_cgo_5 [4]byte
P_pgrp uint64
P_addr uint64
P_xstat uint16
P_acflag uint16
Pad_cgo_6 [4]byte
P_ru uint64
}
type Itimerval struct {
Interval Timeval
Value Timeval
}
type Vnode struct{}
type Pgrp struct{}
type UserStruct struct{}
type Au_session struct {
Aia_p *AuditinfoAddr
Mask AuMask
}
type Posix_cred struct {
UID uint32
Ruid uint32
Svuid uint32
Ngroups int16
Pad_cgo_0 [2]byte
Groups [16]uint32
Rgid uint32
Svgid uint32
Gmuid uint32
Flags int32
}
type Label struct{}
type AuditinfoAddr struct {
Auid uint32
Mask AuMask
Termid AuTidAddr
Asid int32
Flags uint64
}
type AuMask struct {
Success uint32
Failure uint32
}
type AuTidAddr struct {
Port int32
Type uint32
Addr [4]uint32
}
type UcredQueue struct {
Next *ucred
Prev **ucred
}
|
{
"pile_set_name": "Github"
}
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build !go1.7
package http2
import (
"crypto/tls"
"errors"
"net"
"net/http"
"time"
)
type contextContext interface {
Done() <-chan struct{}
Err() error
}
var errCanceled = errors.New("canceled")
type fakeContext struct{}
func (fakeContext) Done() <-chan struct{} { return nil }
func (fakeContext) Err() error { panic("should not be called") }
func reqContext(r *http.Request) fakeContext {
return fakeContext{}
}
func setResponseUncompressed(res *http.Response) {
// Nothing.
}
type clientTrace struct{}
func requestTrace(*http.Request) *clientTrace { return nil }
func traceGetConn(*http.Request, string) {}
func traceGotConn(*http.Request, *ClientConn) {}
func traceFirstResponseByte(*clientTrace) {}
func traceWroteHeaders(*clientTrace) {}
func traceWroteRequest(*clientTrace, error) {}
func traceGot100Continue(trace *clientTrace) {}
func traceWait100Continue(trace *clientTrace) {}
func nop() {}
func serverConnBaseContext(c net.Conn, opts *ServeConnOpts) (ctx contextContext, cancel func()) {
return nil, nop
}
func contextWithCancel(ctx contextContext) (_ contextContext, cancel func()) {
return ctx, nop
}
func requestWithContext(req *http.Request, ctx contextContext) *http.Request {
return req
}
// temporary copy of Go 1.6's private tls.Config.clone:
func cloneTLSConfig(c *tls.Config) *tls.Config {
return &tls.Config{
Rand: c.Rand,
Time: c.Time,
Certificates: c.Certificates,
NameToCertificate: c.NameToCertificate,
GetCertificate: c.GetCertificate,
RootCAs: c.RootCAs,
NextProtos: c.NextProtos,
ServerName: c.ServerName,
ClientAuth: c.ClientAuth,
ClientCAs: c.ClientCAs,
InsecureSkipVerify: c.InsecureSkipVerify,
CipherSuites: c.CipherSuites,
PreferServerCipherSuites: c.PreferServerCipherSuites,
SessionTicketsDisabled: c.SessionTicketsDisabled,
SessionTicketKey: c.SessionTicketKey,
ClientSessionCache: c.ClientSessionCache,
MinVersion: c.MinVersion,
MaxVersion: c.MaxVersion,
CurvePreferences: c.CurvePreferences,
}
}
func (cc *ClientConn) Ping(ctx contextContext) error {
return cc.ping(ctx)
}
func (cc *ClientConn) Shutdown(ctx contextContext) error {
return cc.shutdown(ctx)
}
func (t *Transport) idleConnTimeout() time.Duration { return 0 }
|
{
"pile_set_name": "Github"
}
|
<template>
<svg :width="size" :height="size * 1.142" viewBox="0 0 21 24" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
<g transform="translate(-17.000000, -13.000000)" :fill="color">
<polygon points="17.2886 13.0896 37.4846 24.7496 17.2886 36.4096"></polygon>
</g>
</g>
</svg>
</template>
<script>
export default {
props: ['color', 'size']
}
</script>
|
{
"pile_set_name": "Github"
}
|
{
"keyboard": "kudox/columner",
"keymap": "default",
"commit": "bb87bdec82433053eef68caa5fe4a03083a13e9c",
"layout": "LAYOUT",
"layers": [
[
"KC_ESC", "KC_1", "KC_2", "KC_3", "KC_4", "KC_5", "KC_LBRC", "KC_RBRC", "KC_6", "KC_7", "KC_8", "KC_9", "KC_0", "KC_BSPC",
"KC_TAB", "KC_Q", "KC_W", "KC_E", "KC_R", "KC_T", "KC_MINS", "KC_EQL", "KC_Y", "KC_U", "KC_I", "KC_O", "KC_P", "KC_BSLS",
"KC_LCTL", "KC_A", "KC_S", "KC_D", "KC_F", "KC_G", "KC_H", "KC_J", "KC_K", "KC_L", "KC_SCLN", "KC_ENT",
"KC_LSFT", "KC_Z", "KC_X", "KC_C", "KC_V", "KC_B", "KC_N", "KC_M", "KC_COMM", "KC_DOT", "KC_SLSH", "KC_RSFT",
"LALT_T(KC_ESC)", "KC_QUOT", "KC_GRV", "KC_DEL", "LT(1,KC_LANG2)", "MT(KC_LGUI,KC_ENT)", "KC_SPC", "KC_SPC", "MT(KC_RGUI,KC_ENT)", "LT(2,KC_LANG1)", "KC_LEFT", "KC_DOWN", "KC_UP", "KC_RGHT"
],
[
"_______", "KC_F1", "KC_F2", "KC_F3", "KC_F4", "KC_F5", "KC_F11", "KC_F12", "KC_F6", "KC_F7", "KC_F8", "KC_F9", "KC_F10", "_______",
"_______", "KC_DQUO", "KC_QUOT", "KC_TILD", "KC_ASTR", "KC_PLUS", "_______", "_______", "_______", "_______", "_______", "KC_TILD", "KC_GRV", "KC_JYEN",
"_______", "KC_AT", "KC_COLN", "KC_GRV", "_______", "KC_MINS", "_______", "_______", "_______", "KC_DQUO", "KC_QUOT", "_______",
"_______", "XXXXXXX", "XXXXXXX", "XXXXXXX", "KC_DOT", "KC_SLSH", "_______", "_______", "_______", "_______", "_______", "_______",
"_______", "_______", "_______", "_______", "_______", "_______", "_______", "_______", "_______", "_______", "KC_LT", "KC_UNDS", "KC_CIRC", "KC_GT"
],
[
"_______", "_______", "_______", "_______", "_______", "_______", "_______", "_______", "_______", "_______", "_______", "_______", "_______", "_______",
"_______", "KC_7", "KC_8", "KC_9", "KC_ASTR", "KC_PLUS", "_______", "_______", "XXXXXXX", "XXXXXXX", "XXXXXXX", "XXXXXXX", "XXXXXXX", "_______",
"_______", "KC_4", "KC_5", "KC_6", "KC_0", "KC_MINS", "XXXXXXX", "XXXXXXX", "XXXXXXX", "XXXXXXX", "XXXXXXX", "_______",
"_______", "KC_1", "KC_2", "KC_3", "KC_DOT", "KC_SLSH", "XXXXXXX", "XXXXXXX", "XXXXXXX", "XXXXXXX", "XXXXXXX", "_______",
"_______", "_______", "KC_JYEN", "KC_0", "_______", "_______", "_______", "_______", "_______", "_______", "_______", "_______", "_______", "_______"
]
]
}
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright 2016 Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <stdexcept>
#include <folly/Conv.h>
#include <folly/Likely.h>
#include <folly/Portability.h>
#include <folly/Range.h>
namespace folly {
class BadFormatArg : public std::invalid_argument {
public:
explicit BadFormatArg(const std::string& msg)
: std::invalid_argument(msg) {}
};
/**
* Parsed format argument.
*/
struct FormatArg {
/**
* Parse a format argument from a string. Keeps a reference to the
* passed-in string -- does not copy the given characters.
*/
explicit FormatArg(StringPiece sp)
: fullArgString(sp),
fill(kDefaultFill),
align(Align::DEFAULT),
sign(Sign::DEFAULT),
basePrefix(false),
thousandsSeparator(false),
trailingDot(false),
width(kDefaultWidth),
widthIndex(kNoIndex),
precision(kDefaultPrecision),
presentation(kDefaultPresentation),
nextKeyMode_(NextKeyMode::NONE) {
if (!sp.empty()) {
initSlow();
}
}
enum class Type {
INTEGER,
FLOAT,
OTHER
};
/**
* Validate the argument for the given type; throws on error.
*/
void validate(Type type) const;
/**
* Throw an exception if the first argument is false. The exception
* message will contain the argument string as well as any passed-in
* arguments to enforce, formatted using folly::to<std::string>.
*/
template <typename... Args>
void enforce(bool v, Args&&... args) const {
if (UNLIKELY(!v)) {
error(std::forward<Args>(args)...);
}
}
template <typename... Args>
std::string errorStr(Args&&... args) const;
template <typename... Args>
[[noreturn]] void error(Args&&... args) const;
/**
* Full argument string, as passed in to the constructor.
*/
StringPiece fullArgString;
/**
* Fill
*/
static constexpr char kDefaultFill = '\0';
char fill;
/**
* Alignment
*/
enum class Align : uint8_t {
DEFAULT,
LEFT,
RIGHT,
PAD_AFTER_SIGN,
CENTER,
INVALID
};
Align align;
/**
* Sign
*/
enum class Sign : uint8_t {
DEFAULT,
PLUS_OR_MINUS,
MINUS,
SPACE_OR_MINUS,
INVALID
};
Sign sign;
/**
* Output base prefix (0 for octal, 0x for hex)
*/
bool basePrefix;
/**
* Output thousands separator (comma)
*/
bool thousandsSeparator;
/**
* Force a trailing decimal on doubles which could be rendered as ints
*/
bool trailingDot;
/**
* Field width and optional argument index
*/
static constexpr int kDefaultWidth = -1;
static constexpr int kDynamicWidth = -2;
static constexpr int kNoIndex = -1;
int width;
int widthIndex;
/**
* Precision
*/
static constexpr int kDefaultPrecision = -1;
int precision;
/**
* Presentation
*/
static constexpr char kDefaultPresentation = '\0';
char presentation;
/**
* Split a key component from "key", which must be non-empty (an exception
* is thrown otherwise).
*/
template <bool emptyOk=false>
StringPiece splitKey();
/**
* Is the entire key empty?
*/
bool keyEmpty() const {
return nextKeyMode_ == NextKeyMode::NONE && key_.empty();
}
/**
* Split an key component from "key", which must be non-empty and a valid
* integer (an exception is thrown otherwise).
*/
int splitIntKey();
void setNextIntKey(int val) {
assert(nextKeyMode_ == NextKeyMode::NONE);
nextKeyMode_ = NextKeyMode::INT;
nextIntKey_ = val;
}
void setNextKey(StringPiece val) {
assert(nextKeyMode_ == NextKeyMode::NONE);
nextKeyMode_ = NextKeyMode::STRING;
nextKey_ = val;
}
private:
void initSlow();
template <bool emptyOk>
StringPiece doSplitKey();
StringPiece key_;
int nextIntKey_;
StringPiece nextKey_;
enum class NextKeyMode {
NONE,
INT,
STRING,
};
NextKeyMode nextKeyMode_;
};
template <typename... Args>
inline std::string FormatArg::errorStr(Args&&... args) const {
return to<std::string>(
"invalid format argument {", fullArgString, "}: ",
std::forward<Args>(args)...);
}
template <typename... Args>
[[noreturn]] inline void FormatArg::error(Args&&... args) const {
throw BadFormatArg(errorStr(std::forward<Args>(args)...));
}
template <bool emptyOk>
inline StringPiece FormatArg::splitKey() {
enforce(nextKeyMode_ != NextKeyMode::INT, "integer key expected");
return doSplitKey<emptyOk>();
}
template <bool emptyOk>
inline StringPiece FormatArg::doSplitKey() {
if (nextKeyMode_ == NextKeyMode::STRING) {
nextKeyMode_ = NextKeyMode::NONE;
if (!emptyOk) { // static
enforce(!nextKey_.empty(), "non-empty key required");
}
return nextKey_;
}
if (key_.empty()) {
if (!emptyOk) { // static
error("non-empty key required");
}
return StringPiece();
}
const char* b = key_.begin();
const char* e = key_.end();
const char* p;
if (e[-1] == ']') {
--e;
p = static_cast<const char*>(memchr(b, '[', e - b));
enforce(p, "unmatched ']'");
} else {
p = static_cast<const char*>(memchr(b, '.', e - b));
}
if (p) {
key_.assign(p + 1, e);
} else {
p = e;
key_.clear();
}
if (!emptyOk) { // static
enforce(b != p, "non-empty key required");
}
return StringPiece(b, p);
}
inline int FormatArg::splitIntKey() {
if (nextKeyMode_ == NextKeyMode::INT) {
nextKeyMode_ = NextKeyMode::NONE;
return nextIntKey_;
}
try {
return to<int>(doSplitKey<true>());
} catch (const std::out_of_range& e) {
error("integer key required");
return 0; // unreached
}
}
} // namespace folly
|
{
"pile_set_name": "Github"
}
|
# University repository
This repository has been created to collect the required material to set up DevTest Labs in Univerisities. This is useful both for IT admin and students because the former won't have to maintain physical machines, the latter will always have fresh machines available both for classes and self-service usage.
## Scenario Scripts Folder
This folder contains:
- [Powershell scripts file which needs to be run either via Console or via Automation account on Azure to set up the environments for the imagined scenarios.](ScenarioScripts)
- [Add-AzureDtlVM](ScenarioScripts/Add-AzureDtlVM.ps1): This script adds the specified number of Azure virtual machines to a DevTest Lab.
- [Add-AzureDtlVMAutoVar](ScenarioScripts/Add-AzureDtlVMAutoVar.ps1): This script adds the number of Azure virtual machines in the DevTest Lab by reading some parameters from AutomationVariable.
- [Add-GroupPermissionsDevTestLab](ScenarioScripts/Add-GroupPermissionsDevTestLab.ps1): This script adds the specified role to the AD Group in the DevTest Lab.
- [Common](ScenarioScripts/Common.ps1): This script contains many useful functions for the other scripts.
- [DeallocateStoppedVM](ScenarioScripts/DeallocateStoppedVM.ps1): This script deallocates every stopped Azure virtual machines.
- [Manage-AzureDtlFixedPool](ScenarioScripts/Manage-AzureDtlFixedPool.ps1): This script guarantees that the Virtual Machine pool of the Lab equals to the PoolSize specified as Azure Tag of Lab.
- [Remove-AzureDtlLabVMs](ScenarioScripts/Remove-AzureDtlLabVMs.ps1): This script guarantees that the Virtual Machine pool of the Lab equals to the PoolSize specified as Azure Tag of Lab.
- [Remove-AzureDtlVM](ScenarioScripts/Remove-AzureDtlVM.ps1): This script deletes every Azure virtual machines in the DevTest Lab.
- [Remove-GroupPermissionsDevTestLab](ScenarioScripts/Remove-GroupPermissionsDevTestLab.ps1): This script removes the specified role from the AD Group in the DevTest Lab.
- [Test-AzureDtlVMs](ScenarioScripts/Test-AzureDtlVMs.ps1): Given LabName and LabSize, this script verifies how many Azure virtual machines are inside the DevTest Lab and throws an error inside the logs when the number is greater or lower than size +/- VMDelta.
## Roles Foler
- [Roles folder which contains the json file which specifies the actions that a University user can take on a VM](Roles)
## Shutdown Scripts folder
- [Shutdown scripts folder which contains the scripts to automatically shutdown a VM if it's not used for a certain period of time](Shutdown%20Scripts)
- [LoadIdleScript](Shutdown%20Scripts/LoadIdleScript.ps1): This script creates a task inside Windows Task Scheduler getting a file script from a blob storage.
- [ShutdownOnIdleV2](Shutdown%20Scripts/ShutdownOnIdleV2.ps1): This script shutdowns the machine if the user hasn't been active.
## Simplified JS portal folder:
- [Simplifies JS portal contains the files needed to set a simplified portal for the students to claim a VM in an easier way](SimplifiedJSPortal)
## Creating the appropriate Azure credential file to run the scripts from command line
In 'powershell' do the following:
Login-AzureRmAccount
Set-AzureRmContext -SubscriptionId "XXXXX-XXXX-XXXX"
Save-AzureRMProfile -Path "$env:APPDATA\AzProfile.txt"
This saves the credentials file in the location on disk where the script look for by default.
|
{
"pile_set_name": "Github"
}
|
MANIFEST-000002
|
{
"pile_set_name": "Github"
}
|
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
for (i = 0; i < 1000; i++) {
var a = new ArrayBuffer(10000000);
}
WScript.Echo("pass");
|
{
"pile_set_name": "Github"
}
|
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": {
"0": "AM",
"1": "PM"
},
"DAY": {
"0": "Sunday",
"1": "Monday",
"2": "Tuesday",
"3": "Wednesday",
"4": "Thursday",
"5": "Friday",
"6": "Saturday"
},
"MONTH": {
"0": "January",
"1": "February",
"2": "March",
"3": "April",
"4": "May",
"5": "June",
"6": "July",
"7": "August",
"8": "September",
"9": "October",
"10": "November",
"11": "December"
},
"SHORTDAY": {
"0": "Sun",
"1": "Mon",
"2": "Tue",
"3": "Wed",
"4": "Thu",
"5": "Fri",
"6": "Sat"
},
"SHORTMONTH": {
"0": "Jan",
"1": "Feb",
"2": "Mar",
"3": "Apr",
"4": "May",
"5": "Jun",
"6": "Jul",
"7": "Aug",
"8": "Sep",
"9": "Oct",
"10": "Nov",
"11": "Dec"
},
"fullDate": "EEEE, MMMM d, y",
"longDate": "MMMM d, y",
"medium": "MMM d, y h:mm:ss a",
"mediumDate": "MMM d, y",
"mediumTime": "h:mm:ss a",
"short": "M/d/yy h:mm a",
"shortDate": "M/d/yy",
"shortTime": "h:mm a"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "$",
"DECIMAL_SEP": ".",
"GROUP_SEP": ",",
"PATTERNS": {
"0": {
"gSize": 3,
"lgSize": 3,
"macFrac": 0,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
"1": {
"gSize": 3,
"lgSize": 3,
"macFrac": 0,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "(\u00a4",
"negSuf": ")",
"posPre": "\u00a4",
"posSuf": ""
}
}
},
"id": "en-na",
"pluralCat": function (n) { if (n == 1) { return PLURAL_CATEGORY.ONE; } return PLURAL_CATEGORY.OTHER;}
});
}]);
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android" android:width="48.0dip" android:height="48.0dip"
android:viewportHeight="48.0"
android:viewportWidth="48.0">
<path android:pathData="M0,0 L48,0 L48,48 L0,48 Z"/>
<path android:fillColor="@android:color/holo_blue_bright"
android:pathData="M24,4 C12.9499998093,4,4,12.9499998093,4,24 S12.9499998093,44,24,44 S44,35.0499992371,44,24 S35.0499992371,4,24,4 Z M22,39.8600006104 C14.1099996567,38.8800010681,8,32.1599998474,8,24 C8,22.7700004578,8.14999961853,21.5699996948,8.42000007629,20.4200000763 L18,30 L18,32 C18,34.2099990845,19.7900009155,36,22,36 L22,39.8600006103 Z M35.7900009155,34.7900009156 C35.2799987793,33.1699981689,33.7799987793,32,32,32 L30,32 L30,26 C30,24.8999996185,29.1000003815,24,28,24 L16,24 L16,20 L20,20 C21.1000003815,20,22,19.1000003815,22,18 L22,14 L26,14 C28.2099990845,14,30,12.2100000381,30,10 L30,9.17000007629 C35.8600006103,11.5399999619,40,17.2800006866,40,24 C40,28.1599998474,38.4000015259,31.9400005341,35.7900009155,34.7900009155 Z"/>
</vector>
|
{
"pile_set_name": "Github"
}
|
var classgts_1_1_macro_scheduler =
[
[ "MacroScheduler", "classgts_1_1_macro_scheduler.html#a3edbcda7dd4761fe3ac2db4e94f2ede6", null ],
[ "~MacroScheduler", "classgts_1_1_macro_scheduler.html#ae100b9da87053a4170100670070f6109", null ],
[ "_allocateWorkload", "classgts_1_1_macro_scheduler.html#a4508f5b4cbe74392ec37fe99d659f5bf", null ],
[ "_freeWorkload", "classgts_1_1_macro_scheduler.html#ae1c067282ea2bcbc79d22d495604330f", null ],
[ "allocateNode", "classgts_1_1_macro_scheduler.html#a45a80bf2775a677ad1e6adeef6bfd66a", null ],
[ "buildSchedule", "classgts_1_1_macro_scheduler.html#a674d8bde40dd9b40652028facb9eb797", null ],
[ "computeResources", "classgts_1_1_macro_scheduler.html#a8822478e3a255da7d3f13b3eb50bf7dc", null ],
[ "destroyNode", "classgts_1_1_macro_scheduler.html#abf287c3c45901e67738e369d0dce0be5", null ],
[ "executeSchedule", "classgts_1_1_macro_scheduler.html#ab08462eae1949fed2aa6fdc973638438", null ],
[ "findComputeResource", "classgts_1_1_macro_scheduler.html#aedb95d1cc4dfff1001ab6e6f9eab310f", null ],
[ "freeSchedule", "classgts_1_1_macro_scheduler.html#a7ffe0c5d6d65396f9c617e0fe0e41d4d", null ],
[ "init", "classgts_1_1_macro_scheduler.html#a9bd283ec7970ba5789e497474ad9450d", null ],
[ "Node", "classgts_1_1_macro_scheduler.html#a6db9d28bd448a131448276ee03de1e6d", null ],
[ "m_computeResources", "classgts_1_1_macro_scheduler.html#af9a05cdf5fb14b54f2d747c076842ace", null ]
];
|
{
"pile_set_name": "Github"
}
|
/* A Bison parser, made by GNU Bison 2.7. */
/* Locations for Bison parsers in C++
Copyright (C) 2002-2007, 2009-2012 Free Software Foundation, Inc.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>. */
/* As a special exception, you may create a larger work that contains
part or all of the Bison parser skeleton and distribute that work
under terms of your choice, so long as that work isn't itself a
parser generator using the skeleton or a modified version thereof
as a parser skeleton. Alternatively, if you modify or redistribute
the parser skeleton itself, you may (at your option) remove this
special exception, which will cause the skeleton and the resulting
Bison output files to be licensed under the GNU General Public
License without this special exception.
This special exception was added by the Free Software Foundation in
version 2.2 of Bison. */
/**
** \file location.hh
** Define the yy::location class.
*/
#ifndef YY_YY_LOCATION_HH_INCLUDED
# define YY_YY_LOCATION_HH_INCLUDED
# include "position.hh"
namespace yy {
/* Line 166 of location.cc */
#line 47 "location.hh"
/// Abstract a location.
class location
{
public:
/// Construct a location from \a b to \a e.
location (const position& b, const position& e)
: begin (b)
, end (e)
{
}
/// Construct a 0-width location in \a p.
explicit location (const position& p = position ())
: begin (p)
, end (p)
{
}
/// Construct a 0-width location in \a f, \a l, \a c.
explicit location (std::string* f,
unsigned int l = 1u,
unsigned int c = 1u)
: begin (f, l, c)
, end (f, l, c)
{
}
/// Initialization.
void initialize (std::string* f = YY_NULL,
unsigned int l = 1u,
unsigned int c = 1u)
{
begin.initialize (f, l, c);
end = begin;
}
/** \name Line and Column related manipulators
** \{ */
public:
/// Reset initial location to final location.
void step ()
{
begin = end;
}
/// Extend the current location to the COUNT next columns.
void columns (unsigned int count = 1)
{
end += count;
}
/// Extend the current location to the COUNT next lines.
void lines (unsigned int count = 1)
{
end.lines (count);
}
/** \} */
public:
/// Beginning of the located region.
position begin;
/// End of the located region.
position end;
};
/// Join two location objects to create a location.
inline const location operator+ (const location& begin, const location& end)
{
location res = begin;
res.end = end.end;
return res;
}
/// Add two location objects.
inline const location operator+ (const location& begin, unsigned int width)
{
location res = begin;
res.columns (width);
return res;
}
/// Add and assign a location.
inline location& operator+= (location& res, unsigned int width)
{
res.columns (width);
return res;
}
/// Compare two location objects.
inline bool
operator== (const location& loc1, const location& loc2)
{
return loc1.begin == loc2.begin && loc1.end == loc2.end;
}
/// Compare two location objects.
inline bool
operator!= (const location& loc1, const location& loc2)
{
return !(loc1 == loc2);
}
/** \brief Intercept output stream redirection.
** \param ostr the destination output stream
** \param loc a reference to the location to redirect
**
** Avoid duplicate information.
*/
template <typename YYChar>
inline std::basic_ostream<YYChar>&
operator<< (std::basic_ostream<YYChar>& ostr, const location& loc)
{
position last = loc.end - 1;
ostr << loc.begin;
if (last.filename
&& (!loc.begin.filename
|| *loc.begin.filename != *last.filename))
ostr << '-' << last;
else if (loc.begin.line != last.line)
ostr << '-' << last.line << '.' << last.column;
else if (loc.begin.column != last.column)
ostr << '-' << last.column;
return ostr;
}
} // yy
/* Line 296 of location.cc */
#line 180 "location.hh"
#endif /* !YY_YY_LOCATION_HH_INCLUDED */
|
{
"pile_set_name": "Github"
}
|
<div class="like-buttons d-print-none btn-toolbar" style="margin-bottom :4px;">
<% if logged_in_as(['admin', 'moderator']) || (current_user && (current_user.uid == node.uid || node.has_tag("with:#{current_user.username}"))) %>
<% if node.type == 'note' %>
<a aria-label="Edit post" <% if current_user && current_user.uid != node.uid %>data-confirm='Please be aware that you are editing a post by another contributor, a privilege you have only because you are an admin or co-author.' <% end %> class='btn btn-outline-secondary btn-circle btn-sm' href='/notes/edit/<%= node.id %>?t=<%= Time.now.to_i %><% if params[:controller] == "questions" %>&redirect=question&template=question<% end %>'>
<i class='ml-1 ff fa fa-pencil'></i>
</a>
<% end %>
<% end %>
<% if node.type == 'page' %>
<% if node.has_tag('locked') && (current_user ? (current_user.role != "moderator" && current_user.role != "admin") : true) %>
<a href="/wiki/locked" class='btn btn-outline-secondary btn-circle btn-sm'>
<i class="ml-1 ff fa fa-lock"></i>
</a>
<% elsif current_user && current_user.first_time_poster %>
<a class='btn btn-outline-secondary btn-circle btn-sm' rel="tooltip" title="Please post a question or other content before editing the wiki. Click to learn why." data-placement="top" href="https://publiclab.org/notes/tester/04-23-2016/new-moderation-system-for-first-time-posters">
<i class="ml-1 ff fa fa-lock"></i>
</a>
<% else %>
<a class="btn-circle btn btn-outline-secondary" id="edit-btn" href='<%= node.edit_path %>?t=<%= Time.now.to_i %>'>
<i class='ml-1 ff fa fa-pencil'></i>
</a>
<% end %>
<% end %>
<span data-toggle="tooltip" data-placement="top" rel="tooltip" title="Liked by <%= node.likers.size %> people" class="btn btn-outline-secondary btn-circle btn-like" node-id="<%= node.id %>" id="like-button-<%= node.id %>">
<% if !current_user %>
<a id="open-login-like" data-hashparams="like" data-toggle="modal" data-target="#loginModal">
<span id="like-star-<%= node.id %>" class="ff fa fa-star"></span>
</a>
<% else %>
<span id="like-star-<%= node.id %>" class="ff fa fa-star<% if !node.liked_by(current_user.uid) %>-o<% end %>"></span>
<% end %>
</span>
<% if @node %>
<% subpage_count = Tag.find_pages("parent:#{@node.slug}", 100).count %>
<% if subpage_count > 0 %>
<span class="btn btn-outline-secondary btn-circle" data-toggle="tooltip" data-placement="top" rel="tooltip" title="<%= subpage_count %> sub-pages">
<a href="/wiki/tag/parent:<%= @node.slug %>">
<i class="ff fa fa-book"></i>
</a>
</span>
<% end %>
<% end %>
<span data-toggle="tooltip" data-placement="bottom" title="Tools" id="menu-btn" class="btn btn-outline-secondary btn-circle btn-sm" rel="popover" data-placement="left" data-html="true" style="overflow: auto; max-height: 500px" data-content="
<p><b>Users who liked this</b></p>
<% node.likers.each do |user| %>
<i class='fa fa-star-o'></i> <a href='/profile/<%= user.username %>/'><%= user.username %></a><br />
<% end %>
<% if node.likers.size == 0 %><i>None</i><% end %>
<% if node.type == 'note' && current_user && node.uid != current_user.uid%>
<hr style='margin-bottom:3px;' />
<p><b>Award a Barnstar:</b></p>
<form class='form' action='/barnstar/give'>
<select name='star' class='form-control'>
<option value='basic'>The basic barnstar</option>
<option value='photo-documentation'>The photo documentation barnstar</option>
<option value='video-documentation'>The video documentation barnstar</option>
<option value='watchdog'>The watchdog barnstar</option>
<option value='empiricism'>The empiricism barnstar</option>
<option value='excessive-enthusiasm'>The enthusiasm barnstar</option>
</select><br />
<input type='hidden' name='nid' value='<%= node.id %>'/>
<input type='submit' class='btn btn-primary' value='Give'/> <a _target='blank' href='/wiki/barnstars'>Learn more</a>
</form>
<br>
<% end %>
<% if node.type == "page" %>
<p><a class='btn btn-sm btn-outline-secondary' rel='tooltip' title='Try the beta inline Rich Wiki editor.' data-placement='top' href='<%= node.path %>?raw=true'>
<i style='color:#0c8;' class='fa fa-pencil'></i>
</a></p>
<% end %>
<% if logged_in_as(['admin', 'moderator']) || (current_user && current_user.uid == node.uid) %>
<hr />
<div class='btn-group'>
<a rel='tooltip' title='Flag as spam' class='btn btn-sm btn-outline-secondary btn-flag-spam-<%= node.id %>' href='/moderate/flag_node/<%= node.id %>'>
<i class='fa fa-flag'></i>
</a>
<li data-toggle='tooltip' data-placement='top' title='Follow by tag or author' id='menu-follow-btn' class='btn btn-outline-secondary btn-sm requireLogin nestedPopover' data-html='true' rel='popover' data-placement='left' data-content=" <%= "No tags" if tagnames.nil? || tagnames.size == 0 %> <% if tagnames %><% tagnames.each do |tagname| %><p style='margin-bottom:3px; overflow: hidden; text-overflow: ellipsis;'><a href='/subscribe/tag/<%= tagname %>' class='btn btn-outline-secondary btn-sm'><%= tagname %></a></p><% end %><% end %><hr /><i class='fa fa-user'></i><% if current_user && !current_user.following?(node.author) && node.author != current_user %> <a class='btn btn-sm' href='/relationships?followed_id=<%= node.author.id %>' data-method='post' > <%= node.author.name %></a><% else %> <%= node.author.name %><% end %>">
<i class='fa fa-user-plus' aria-hidden='true'></i>
</li>
<a href='/notes/delete/<%= node.id %>' class='btn btn-outline-secondary btn-sm' id='menu-delete-btn' data-confirm='All revisions will be lost, and you cannot undo this action. If this is a spam page, be sure that it did not overwrite valid content before deleting the entire page and the history.'>
<i class='fa fa-trash'></i><span class='d-none d-lg-inline'></span>
</a>
<a href='/notes/raw/<%= node.id %>' class='btn btn-outline-secondary btn-sm'>
<i class='fa fa-code'></i><span class='d-none d-lg-inline'> Raw</span>
</a>
<% if logged_in_as(['admin', 'moderator']) %>
<% if node.type == 'note' %>
<a class='btn btn-outline-secondary btn-sm' href='/moderate/spam/<%= node.id %>'><i class='fa fa-ban-circle'></i> Spam</a>
<% elsif node.type == 'page' %>
<a class='btn btn-outline-secondary btn-sm' href='/moderate/revision/spam/<%= @revision.vid %>'><i class='fa fa-ban-circle'></i> Spam revision</a>
<% end %>
<% end %>
</div>
<% end %>
<br />
<hr style='clear:both;' />
<div>
<span>
<% if node.type == 'note' %>
<%= number_with_delimiter(node.views) %> <%= translation('notes.show.views') %>
<% if node.comments %>
| <a href='#comments'><%= node.comments.size %> <%= translation('notes.show.comments') %></a>
<% end %>
| <%= raw translation('wiki.show.last_edited', url1: "/profile/" + node.latest.author.name, author: node.latest.author.name, time: time_ago_in_words(node.latest.created_at)) %>
<% end %>
<% if node.type == 'page' %>
<%= number_with_delimiter(node.views) %> <%= translation('wiki.show.views') %>
<% if node.comments %>
| <a href='<%= node.path %>/comments'><%= node.comments.length %> Comments</a>
<% end %>
| <%= raw translation('wiki.show.last_edited', url1: "/profile/" + @revision.author.name, author: @revision.author.name, time: time_ago_in_words(@revision.created_at)) %>
<% end %>
</span>
</div>
<br />
<hr style='clear:both;' />
<div>
<p><a id='print-new' style='cursor:pointer;'><i class='fa fa-print'></i> Printable format (beta) </a></p>
<p><a id='print-command-no-links'><i class='fa fa-print'></i> Print without page links</a></p>
<p><a id='print-command-3-col'><i class='fa fa-print'></i> Print in 3-column layout</a></p>
</div>
">
<span style="margin: 5px !important;" class="ff fa fa-ellipsis-h"></span>
</span>
<style type="text/css">
/* Styling the links to remove underline on hover and setting pointer as cursor */
#print-command-3-col , #print-command-no-links {
text-decoration : none;
cursor : pointer;
}
.bs-popover-bottom {
overflow: visible;
max-width: 1000px;
}
.like-buttons {
margin-top:5px;
}
like-buttons a .fa, .popover .btn-group a .fa {
color: inherit;
}
@media (max-width: 992px) {
.like-buttons {
justify-content: center !important;
margin-top: 30px;
}
}
</style>
<script>
$('[rel="tooltip"]').on('click', function () {
$(this).tooltip('hide')
})
function print_three_column() {
$('body').css('column-count', 3)
.css('column-gap', '50px');
$('.popover').popover('hide');
window.print();
$('body').css('column-count', 1)
.css('column-gap', 0);
}
function print_linkless() {
window.print();
}
function print_new() {
var node_type = '<%= node.type %>';
if (node_type == 'page'){
<% if @revision %>
window.open(
"/wikis/print/<%= @node.id %>",
'_blank' // <- This is what makes it open in a new window.
);
<% end %>
}
else if (node_type == 'note'){
window.open(
"/notes/print/" + <%= node.id %>,
'_blank' // <- This is what makes it open in a new window.
);
}
}
// Event listner on CLICK on links
$(document).on("click", "#print-command-3-col",print_three_column);
$(document).on("click", "#print-command-no-links",print_linkless);
$(document).on("click", "#print-new",print_new);
jQuery(document).ready(function() {
// 304 cached response yields no data to insert, which is not useful
jQuery.ajaxSetup({
cache: false
});
// opens modal to login before you can like;
$("#open-login-like").click(function() {
$("input[name='hash_params']").val($(this).attr("data-hashparams"));
});
// triggers liking and unliking
<% if current_user %>
$('#like-button-<%= node.id %>').on('click', debounce(<% if node.liked_by(current_user.uid) %>clickliked<% else %>clicknotliked<% end %>, 200, true));
// leaving below uncommented for now - possibility useful for implement comment like functionality?
// if(window.location.href.includes("#like")) {
// $('#like-button-<%= node.id %>').click();
// }
<% end %>
});
$('body').on('click', '.nestedPopover', function() {
$(this).popover({
container: 'body',
trigger: 'focus click'
});
$(this).popover('show');
$(this).removeClass("nestedPopover");
});
$('body').on('click', function (e) { // If there's a click event in the body
$('[rel=popover]').each(function () { // For each popover
/*
* The popover will only `hide` if all the conditions below are satisfied :
* 1. The target triggering the `click` event is not `this`
* 2. `this` doesn't have a child triggering the `click` event
* 3. Any element with `.popover` class doesn't have a child triggering the `click` event
*/
if (!$(this).is(e.target) && $(this).has(e.target).length === 0 && $('.popover').has(e.target).length === 0) {
$(this).popover('hide');
}
});
});
</script>
</div>
|
{
"pile_set_name": "Github"
}
|
//===- AMDGPUIntrinsicInfo.h - AMDGPU Intrinsic Information ------*- C++ -*-===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//==-----------------------------------------------------------------------===//
//
/// \file
/// \brief Interface for the AMDGPU Implementation of the Intrinsic Info class.
//
//===-----------------------------------------------------------------------===//
#ifndef LLVM_LIB_TARGET_R600_AMDGPUINTRINSICINFO_H
#define LLVM_LIB_TARGET_R600_AMDGPUINTRINSICINFO_H
#include "llvm/IR/Intrinsics.h"
#include "llvm/Target/TargetIntrinsicInfo.h"
namespace llvm {
class TargetMachine;
namespace AMDGPUIntrinsic {
enum ID {
last_non_AMDGPU_intrinsic = Intrinsic::num_intrinsics - 1,
#define GET_INTRINSIC_ENUM_VALUES
#include "AMDGPUGenIntrinsics.inc"
#undef GET_INTRINSIC_ENUM_VALUES
, num_AMDGPU_intrinsics
};
} // end namespace AMDGPUIntrinsic
class AMDGPUIntrinsicInfo : public TargetIntrinsicInfo {
public:
AMDGPUIntrinsicInfo();
std::string getName(unsigned IntrId, Type **Tys = nullptr,
unsigned numTys = 0) const override;
unsigned lookupName(const char *Name, unsigned Len) const override;
bool isOverloaded(unsigned IID) const override;
Function *getDeclaration(Module *M, unsigned ID,
Type **Tys = nullptr,
unsigned numTys = 0) const override;
};
} // end namespace llvm
#endif
|
{
"pile_set_name": "Github"
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.sql.tree;
import com.google.common.collect.ImmutableList;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import static java.util.Objects.requireNonNull;
public class TypeParameter
extends DataTypeParameter
{
private final DataType type;
public TypeParameter(DataType type)
{
super(Optional.empty());
this.type = requireNonNull(type, "value is null");
}
public DataType getValue()
{
return type;
}
@Override
public String toString()
{
return type.toString();
}
@Override
public List<? extends Node> getChildren()
{
return ImmutableList.of(type);
}
@Override
protected <R, C> R accept(AstVisitor<R, C> visitor, C context)
{
return visitor.visitTypeParameter(this, context);
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TypeParameter that = (TypeParameter) o;
return type.equals(that.type);
}
@Override
public int hashCode()
{
return Objects.hash(type);
}
@Override
public boolean shallowEquals(Node other)
{
return sameClass(this, other);
}
}
|
{
"pile_set_name": "Github"
}
|
define(["Test", "Tone/core/Master", "Tone/core/Tone", "helper/Offline", "helper/PassAudio"],
function (Test, Master, Tone, Offline, PassAudio) {
describe("Master", function(){
it ("exists", function(){
expect(Tone.Master).to.exist;
});
it ("provides a toMaster method", function(){
expect(Tone.prototype.toMaster).is.a("function");
expect(AudioNode.prototype.toMaster).is.a("function");
});
it ("can be muted and unmuted", function(){
Tone.Master.mute = false;
expect(Tone.Master.mute).to.be.false;
Tone.Master.mute = true;
expect(Tone.Master.mute).to.be.true;
});
it ("passes audio through", function(done){
PassAudio(function(input){
input.toMaster();
}, done);
});
it ("passes no audio when muted", function(done){
var offline = new Offline();
offline.before(function(){
Tone.Master.mute = true;
});
offline.test(function(sample){
expect(sample).to.equal(0);
});
offline.after(done);
offline.run();
});
it ("has a master volume control", function(){
Tone.Master.volume.value = -20;
expect(Tone.Master.volume.value).to.be.closeTo(-20, 0.1);
});
it ("can pass audio through chained nodes", function(done){
var gain;
PassAudio(function(input){
gain = Tone.context.createGain();
input.connect(gain);
Tone.Master.chain(gain);
}, function(){
gain.disconnect();
done();
});
});
});
});
|
{
"pile_set_name": "Github"
}
|
<component name="InspectionProjectProfileManager">
<profile version="1.0" is_locked="false">
<option name="myName" value="Project Default" />
<option name="myLocal" value="false" />
<inspection_tool class="PyUnresolvedReferencesInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredIdentifiers">
<list>
<option value="testRunner.runner.findElement" />
<option value="testDriver.weChatHome.*" />
</list>
</option>
</inspection_tool>
</profile>
</component>
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="utf-8"?>
<ScrollView
android:layout_width="match_parent"
android:layout_height="match_parent"
xmlns:android="http://schemas.android.com/apk/res/android">
<LinearLayout
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="match_parent">
<com.hankkin.library.NoScrollListView
android:divider="@color/grey_normal_bg"
android:dividerHeight="@dimen/spacing_tiny"
android:id="@+id/nlv_good_detial_imgs"
android:layout_width="match_parent"
android:layout_height="wrap_content"/>
<TextView
android:background="@drawable/list_gray_item"
android:drawableRight="@drawable/rightarrow"
android:text="安全交易"
android:padding="@dimen/spacing_small"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
<include layout="@layout/layout_line"/>
<TextView
android:background="@drawable/list_gray_item"
android:drawableRight="@drawable/rightarrow"
android:text="免责声明"
android:padding="@dimen/spacing_small"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
<include layout="@layout/layout_line"/>
<TextView
android:background="@drawable/list_gray_item"
android:drawableRight="@drawable/rightarrow"
android:text="交易担保"
android:padding="@dimen/spacing_small"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
<include layout="@layout/layout_line"/>
<TextView
android:background="@drawable/list_gray_item"
android:drawableRight="@drawable/rightarrow"
android:text="我要入驻"
android:padding="@dimen/spacing_small"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
<include layout="@layout/layout_line"/>
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:background="@color/grey_normal_bg"
android:padding="@dimen/spacing_smallest">
<View
android:layout_width="wrap_content"
android:layout_height="0.5dp"
android:layout_centerVertical="true"
android:layout_marginRight="@dimen/spacing_normal"
android:layout_toLeftOf="@+id/tv_good_detail_daodi"
android:background="@color/deep_transparent" />
<TextView
android:id="@+id/tv_good_detail_daodi"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_centerInParent="true"
android:text="已经最到底啦!"
android:textSize="@dimen/font_small" />
<View
android:layout_width="wrap_content"
android:layout_height="0.5dp"
android:layout_centerVertical="true"
android:layout_marginLeft="@dimen/spacing_normal"
android:layout_toRightOf="@+id/tv_good_detail_daodi"
android:background="@color/deep_transparent" />
</RelativeLayout>
</LinearLayout>
</ScrollView>
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<div xmlns:field="urn:jsptagdir:/WEB-INF/tags/form/fields" xmlns:form="urn:jsptagdir:/WEB-INF/tags/form" xmlns:jsp="http://java.sun.com/JSP/Page" version="2.0">
<jsp:directive.page contentType="text/html;charset=UTF-8"/>
<jsp:output omit-xml-declaration="yes"/>
<form:update id="fu_nl_bzk_brp_model_data_kern_Verdrag" modelAttribute="verdrag" path="/verdrags" versionField="none" z="5gMV8uWTueeJcZYk7wa6yNjD64E=">
<field:simple field="acties" id="c_nl_bzk_brp_model_data_kern_Verdrag_acties" messageCode="entity_reference_not_managed" messageCodeAttribute="Actie" z="dlTgfytVGm9j74Mf4IppP079SpY="/>
<field:input field="oms" id="c_nl_bzk_brp_model_data_kern_Verdrag_oms" required="true" z="4u91nInJIWUSWkByLF35jEqgSY4="/>
<field:input field="dataanvgel" id="c_nl_bzk_brp_model_data_kern_Verdrag_dataanvgel" validationMessageCode="field_invalid_integer" z="xtLHrtcDf0MJ5929aSnQ+dMX/94="/>
<field:input field="dateindegel" id="c_nl_bzk_brp_model_data_kern_Verdrag_dateindegel" validationMessageCode="field_invalid_integer" z="kZxKOzMhKKBjhBNst+uEXpRNsvE="/>
</form:update>
</div>
|
{
"pile_set_name": "Github"
}
|
import { Injectable, ErrorHandler } from '@angular/core';
import { Subject, BehaviorSubject } from 'rxjs';
import { IDictionary } from 'app/interfaces/shared';
export type PluginEvent =
| 'app-ready'
| 'new-window'
;
@Injectable({
providedIn: 'root'
})
export class PluginEventService {
private subjects: IDictionary<Subject<any>> = {};
emit(eventName: PluginEvent, data: any) {
this.getSubject(eventName).next(data);
}
on(eventName: PluginEvent, handler: (value: any) => void) {
return this.getSubject(eventName).subscribe((data) => {
if (data !== null) {
return handler(data);
}
});
}
unsubscribe() {
Object.values(this.subjects).forEach(subject => subject.unsubscribe());
}
private getSubject(eventName: string) {
const subjectName = this.createSubjectName(eventName);
if (!this.subjects[subjectName]) {
if (subjectName.endsWith('-ready')) {
this.subjects[subjectName] = new BehaviorSubject(null);
} else {
this.subjects[subjectName] = new Subject();
}
}
return this.subjects[subjectName];
}
private createSubjectName(eventName: string) {
return `$ ${eventName}`;
}
}
|
{
"pile_set_name": "Github"
}
|
StartChar: numbersign
Encoding: 35 35 93
Width: 1060
VWidth: 6
Flags: W
HStem: -19 21G<94 262 430 610.5> 385 158<79.4172 206 421 550 753 900> 795 160<166.791 334 546 679 881 985> 1312 20G<578 606.5 827 1010>
VStem: 485 162<1157.32 1331>
LayerCount: 2
Fore
SplineSet
497 795 m 1
475 715 446 623 421 541 c 1
462 543 494 543 528 543 c 2
602 543 l 1
629 627 655 720 679 798 c 1
608 798 565 796 497 795 c 1
262 -19 m 1
94 -18 l 1
123 95 155 208 206 375 c 1
153 374 108 365 78 356 c 1
57 503 l 1
117 523 190 529 257 534 c 1
289 614 310 711 334 790 c 1
320 790 l 1
254 786 203 781 165 768 c 1
141 915 l 1
211 940 302 944 387 949 c 1
445 1133 450 1191 485 1331 c 1
514 1331 559 1332 597 1332 c 0
616 1332 633 1332 647 1331 c 1
616 1210 589 1099 546 955 c 1
728 955 l 1
779 1118 806 1207 848 1326 c 1
1010 1324 l 1
961 1203 931 1114 881 956 c 1
1016 956 l 1
985 795 l 1
957 796 924 799 832 799 c 1
809 720 780 629 753 544 c 1
932 544 l 1
900 383 l 1
873 384 824 386 710 386 c 1
650 189 627 99 594 -22 c 1
430 -21 l 1
463 91 496 209 550 385 c 1
526 385 l 2
472 385 422 383 371 380 c 1
327 230 262 -19 262 -19 c 1
EndSplineSet
Validated: 1
EndChar
|
{
"pile_set_name": "Github"
}
|
load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "go_default_library",
srcs = [
"client.go",
"docker.go",
"factory.go",
"handler.go",
],
importmap = "k8s.io/kubernetes/vendor/github.com/google/cadvisor/container/docker",
importpath = "github.com/google/cadvisor/container/docker",
visibility = ["//visibility:public"],
deps = [
"//vendor/github.com/blang/semver:go_default_library",
"//vendor/github.com/docker/docker/api/types:go_default_library",
"//vendor/github.com/docker/docker/api/types/container:go_default_library",
"//vendor/github.com/docker/docker/client:go_default_library",
"//vendor/github.com/docker/go-connections/tlsconfig:go_default_library",
"//vendor/github.com/google/cadvisor/container:go_default_library",
"//vendor/github.com/google/cadvisor/container/common:go_default_library",
"//vendor/github.com/google/cadvisor/container/libcontainer:go_default_library",
"//vendor/github.com/google/cadvisor/devicemapper:go_default_library",
"//vendor/github.com/google/cadvisor/fs:go_default_library",
"//vendor/github.com/google/cadvisor/info/v1:go_default_library",
"//vendor/github.com/google/cadvisor/machine:go_default_library",
"//vendor/github.com/google/cadvisor/manager/watcher:go_default_library",
"//vendor/github.com/google/cadvisor/utils/docker:go_default_library",
"//vendor/github.com/google/cadvisor/zfs:go_default_library",
"//vendor/github.com/opencontainers/runc/libcontainer/cgroups/fs:go_default_library",
"//vendor/github.com/opencontainers/runc/libcontainer/configs:go_default_library",
"//vendor/golang.org/x/net/context:go_default_library",
"//vendor/k8s.io/klog:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)
|
{
"pile_set_name": "Github"
}
|
#-*- coding:utf-8 -*-
import torch
import numpy as np
def point_form(boxes):
""" Convert prior_boxes to (xmin, ymin, xmax, ymax)
representation for comparison to point form ground truth data.
Args:
boxes: (tensor) center-size default boxes from priorbox layers.
Return:
boxes: (tensor) Converted xmin, ymin, xmax, ymax form of boxes.
"""
return torch.cat((boxes[:, 0:2] - boxes[:, 2:4]/2, # xmin, ymin
boxes[:, 0:2] + boxes[:, 2:4]/2), 1) # xmax, ymax
def center_size(boxes):
""" Convert prior_boxes to (cx, cy, w, h)
representation for comparison to center-size form ground truth data.
Args:
boxes: (tensor) point_form boxes
Return:
boxes: (tensor) Converted xmin, ymin, xmax, ymax form of boxes.
"""
return torch.cat((boxes[:, 2:4] + boxes[:, 0:2])/2, # cx, cy
boxes[:, 2:4] - boxes[:, 0:2], 1) # w, h
def intersect(box_a, box_b):
""" We resize both tensors to [A,B,2] without new malloc:
[A,2] -> [A,1,2] -> [A,B,2]
[B,2] -> [1,B,2] -> [A,B,2]
Then we compute the area of intersect between box_a and box_b.
Args:
box_a: (tensor) bounding boxes, Shape: [A,4].
box_b: (tensor) bounding boxes, Shape: [B,4].
Return:
(tensor) intersection area, Shape: [A,B].
"""
A = box_a.size(0)
B = box_b.size(0)
max_xy = torch.min(box_a[:, 2:4].unsqueeze(1).expand(A, B, 2),
box_b[:, 2:4].unsqueeze(0).expand(A, B, 2))
min_xy = torch.max(box_a[:, 0:2].unsqueeze(1).expand(A, B, 2),
box_b[:, 0:2].unsqueeze(0).expand(A, B, 2))
inter = torch.clamp((max_xy - min_xy), min=0)
return inter[:, :, 0] * inter[:, :, 1]
def jaccard(box_a, box_b):
"""Compute the jaccard overlap of two sets of boxes. The jaccard overlap
is simply the intersection over union of two boxes. Here we operate on
ground truth boxes and default boxes.
E.g.:
A ∩ B / A ∪ B = A ∩ B / (area(A) + area(B) - A ∩ B)
Args:
box_a: (tensor) Ground truth bounding boxes, Shape: [num_objects,4]
box_b: (tensor) Prior boxes from priorbox layers, Shape: [num_priors,4]
Return:
jaccard overlap: (tensor) Shape: [box_a.size(0), box_b.size(0)]
"""
inter = intersect(box_a, box_b)
area_a = ((box_a[:, 2]-box_a[:, 0]) *
(box_a[:, 3]-box_a[:, 1])).unsqueeze(1).expand_as(inter) # [A,B]
area_b = ((box_b[:, 2]-box_b[:, 0]) *
(box_b[:, 3]-box_b[:, 1])).unsqueeze(0).expand_as(inter) # [A,B]
union = area_a + area_b - inter
return inter / union # [A,B]
def matrix_iou(a, b):
"""
return iou of a and b, numpy version for data augenmentation
"""
lt = np.maximum(a[:, np.newaxis, 0:2], b[:, 0:2])
rb = np.minimum(a[:, np.newaxis, 2:4], b[:, 2:4])
area_i = np.prod(rb - lt, axis=2) * (lt < rb).all(axis=2)
area_a = np.prod(a[:, 2:4] - a[:, 0:2], axis=1)
area_b = np.prod(b[:, 2:4] - b[:, 0:2], axis=1)
return area_i / (area_a[:, np.newaxis] + area_b - area_i)
def matrix_iof(a, b):
"""
return iof of a and b, numpy version for data augenmentation
"""
lt = np.maximum(a[:, np.newaxis, 0:2], b[:, 0:2])
rb = np.minimum(a[:, np.newaxis, 2:4], b[:, 2:4])
area_i = np.prod(rb - lt, axis=2) * (lt < rb).all(axis=2)
area_a = np.prod(a[:, 2:4] - a[:, 0:2], axis=1)
return area_i / np.maximum(area_a[:, np.newaxis], 1)
def match(threshold, truths, priors, variances, labels, loc_t, conf_t, idx):
"""Match each prior box with the ground truth box of the highest jaccard
overlap, encode the bounding boxes, then return the matched indices
corresponding to both confidence and location preds.
Args:
threshold: (float) The overlap threshold used when mathing boxes.
truths: (tensor) Ground truth boxes, Shape: [num_obj, num_priors].
priors: (tensor) Prior boxes from priorbox layers, Shape: [n_priors,4].
variances: (tensor) Variances corresponding to each prior coord,
Shape: [num_priors, 4].
labels: (tensor) All the class labels for the image, Shape: [num_obj].
loc_t: (tensor) Tensor to be filled w/ endcoded location targets.
conf_t: (tensor) Tensor to be filled w/ matched indices for conf preds.
idx: (int) current batch index
Return:
The matched indices corresponding to 1)location and 2)confidence preds.
"""
# jaccard index
overlaps = jaccard(
truths,
point_form(priors)
)
# (Bipartite Matching)
# [1,num_objects] best prior for each ground truth
best_prior_overlap, best_prior_idx = overlaps.max(1, keepdim=True)
# ignore hard gt
valid_gt_idx = best_prior_overlap[:, 0] >= 0.2
best_prior_idx_filter = best_prior_idx[valid_gt_idx, :]
if best_prior_idx_filter.shape[0] <= 0:
loc_t[idx] = 0
conf_t[idx] = 0
return
# [1,num_priors] best ground truth for each prior
best_truth_overlap, best_truth_idx = overlaps.max(0, keepdim=True)
best_truth_idx.squeeze_(0)
best_truth_overlap.squeeze_(0)
best_prior_idx.squeeze_(1)
best_prior_idx_filter.squeeze_(1)
best_prior_overlap.squeeze_(1)
best_truth_overlap.index_fill_(0, best_prior_idx_filter, 2) # ensure best prior
# TODO refactor: index best_prior_idx with long tensor
# ensure every gt matches with its prior of max overlap
for j in range(best_prior_idx.size(0)):
best_truth_idx[best_prior_idx[j]] = j
matches = truths[best_truth_idx] # Shape: [num_priors,14]
conf = labels[best_truth_idx] # Shape: [num_priors]
conf[best_truth_overlap < threshold] = 0 # label as background
loc = encode(matches, priors, variances)
loc_t[idx] = loc # [num_priors,14] encoded offsets to learn
conf_t[idx] = conf # [num_priors] top class label for each prior
def encode(matched, priors, variances):
"""Encode the variances from the priorbox layers into the ground truth boxes
we have matched (based on jaccard overlap) with the prior boxes.
Args:
matched: (tensor) Coords of ground truth for each prior in point-form
Shape: [num_priors, 4].
priors: (tensor) Prior boxes in center-offset form
Shape: [num_priors,4].
variances: (list[float]) Variances of priorboxes
Return:
encoded boxes and landmarks (tensor), Shape: [num_priors, 14]
"""
# dist b/t match center and prior's center
g_cxcy = (matched[:, 0:2] + matched[:, 2:4])/2 - priors[:, 0:2]
# encode variance
g_cxcy /= (variances[0] * priors[:, 2:4])
# match wh / prior wh
g_wh = (matched[:, 2:4] - matched[:, 0:2]) / priors[:, 2:4]
g_wh = torch.log(g_wh) / variances[1]
# landmarks
g_xy1 = (matched[:, 4:6] - priors[:, 0:2]) / (variances[0] * priors[:, 2:4])
g_xy2 = (matched[:, 6:8] - priors[:, 0:2]) / (variances[0] * priors[:, 2:4])
g_xy3 = (matched[:, 8:10] - priors[:, 0:2]) / (variances[0] * priors[:, 2:4])
g_xy4 = (matched[:, 10:12] - priors[:, 0:2]) / (variances[0] * priors[:, 2:4])
g_xy5 = (matched[:, 12:14] - priors[:, 0:2]) / (variances[0] * priors[:, 2:4])
# return target for loss
return torch.cat([g_cxcy, g_wh, g_xy1, g_xy2, g_xy3, g_xy4, g_xy5], 1) # [num_priors,14]
# Adapted from https://github.com/Hakuyume/chainer-ssd
def decode(loc, priors, variances):
"""Decode locations from predictions using priors to undo
the encoding we did for offset regression at train time.
Args:
loc (tensor): location predictions for loc layers,
Shape: [num_priors,4]
priors (tensor): Prior boxes in center-offset form.
Shape: [num_priors,4].
variances: (list[float]) Variances of priorboxes
Return:
decoded bounding box predictions
"""
boxes = torch.cat((
priors[:, 0:2] + loc[:, 0:2] * variances[0] * priors[:, 2:4],
priors[:, 2:4] * torch.exp(loc[:, 2:4] * variances[1]),
priors[:, 0:2] + loc[:, 4:6] * variances[0] * priors[:, 2:4],
priors[:, 0:2] + loc[:, 6:8] * variances[0] * priors[:, 2:4],
priors[:, 0:2] + loc[:, 8:10] * variances[0] * priors[:, 2:4],
priors[:, 0:2] + loc[:, 10:12] * variances[0] * priors[:, 2:4],
priors[:, 0:2] + loc[:, 12:14] * variances[0] * priors[:, 2:4]), 1)
boxes[:, 0:2] -= boxes[:, 2:4] / 2
boxes[:, 2:4] += boxes[:, 0:2]
return boxes
def log_sum_exp(x):
"""Utility function for computing log_sum_exp while determining
This will be used to determine unaveraged confidence loss across
all examples in a batch.
Args:
x (Variable(tensor)): conf_preds from conf layers
"""
x_max = x.data.max()
return torch.log(torch.sum(torch.exp(x-x_max), 1, keepdim=True)) + x_max
# Original author: Francisco Massa:
# https://github.com/fmassa/object-detection.torch
# Ported to PyTorch by Max deGroot (02/01/2017)
def nms(boxes, scores, overlap=0.5, top_k=200):
"""Apply non-maximum suppression at test time to avoid detecting too many
overlapping bounding boxes for a given object.
Args:
boxes: (tensor) The location preds for the img, Shape: [num_priors,4].
scores: (tensor) The class predscores for the img, Shape:[num_priors].
overlap: (float) The overlap thresh for suppressing unnecessary boxes.
top_k: (int) The Maximum number of box preds to consider.
Return:
The indices of the kept boxes with respect to num_priors.
"""
keep = torch.Tensor(scores.size(0)).fill_(0).long()
if boxes.numel() == 0:
return keep
x1 = boxes[:, 0]
y1 = boxes[:, 1]
x2 = boxes[:, 2]
y2 = boxes[:, 3]
area = torch.mul(x2 - x1, y2 - y1)
v, idx = scores.sort(0) # sort in ascending order
# I = I[v >= 0.01]
idx = idx[-top_k:] # indices of the top-k largest vals
xx1 = boxes.new()
yy1 = boxes.new()
xx2 = boxes.new()
yy2 = boxes.new()
w = boxes.new()
h = boxes.new()
# keep = torch.Tensor()
count = 0
while idx.numel() > 0:
i = idx[-1] # index of current largest val
# keep.append(i)
keep[count] = i
count += 1
if idx.size(0) == 1:
break
idx = idx[:-1] # remove kept element from view
# load bboxes of next highest vals
torch.index_select(x1, 0, idx, out=xx1)
torch.index_select(y1, 0, idx, out=yy1)
torch.index_select(x2, 0, idx, out=xx2)
torch.index_select(y2, 0, idx, out=yy2)
# store element-wise max with next highest score
xx1 = torch.clamp(xx1, min=x1[i])
yy1 = torch.clamp(yy1, min=y1[i])
xx2 = torch.clamp(xx2, max=x2[i])
yy2 = torch.clamp(yy2, max=y2[i])
w.resize_as_(xx2)
h.resize_as_(yy2)
w = xx2 - xx1
h = yy2 - yy1
# check sizes of xx1 and xx2.. after each iteration
w = torch.clamp(w, min=0.0)
h = torch.clamp(h, min=0.0)
inter = w*h
# IoU = i / (area(a) + area(b) - i)
rem_areas = torch.index_select(area, 0, idx) # load remaining areas)
union = (rem_areas - inter) + area[i]
IoU = inter/union # store result in iou
# keep only elements with an IoU <= overlap
idx = idx[IoU.le(overlap)]
return keep, count
|
{
"pile_set_name": "Github"
}
|
.. _console:
*************
Debug console
*************
The debug console can be used to follow the boot process:
1. FSBL (if debug mode is enabled)
The serial console can also be used to see the output
of other bare metal applications, for example the memory test.
2. U-Boot
During the boot process U-Boot will show status and debug information.
After FSBL starts U-Boot, there is a 3 second delay
before U-Boot starts the Linux kernel.
If during this time a key is pressed,
U-boot will stop the boot process
and give the user access to its shell.
3. Linux console
During the boot process Linux will show status and debug information.
When ``systemd`` reaches ``multi-user.target`` a login prompt will appear.
User name: ``root``
Password: ``root``
==============
Hardware setup
==============
.. note::
For STEMlab 125-14 you need additional USB to microUSB cable,
for STEMlab 125-10 additional serial to USB adapter.
.. image:: console-connector.png
Connect your Red Pitaya and PC with micro USB B to USB A cable and follow the instructions for your OS.
.. image:: pitaya-USB-connection-300x164.png
-------
Windows
-------
Download and install the `FTD driver <http://www.ftdichip.com/Drivers/VCP.htm>`_ to your PC. After installation, a
new COM port will appear in the Device Manager you can use in Hyperterminal or another terminal utility to connect to
Red Pitaya.
-----
Linux
-----
There is broad support for USB to serial converters in the Linux kernel,
so in most cases the converter will be detected soon after connecting it.
You can see the driver output in the kernel log using ``dmesg``:
.. code-block:: none
:emphasize-lines: 11
$ dmesg
...
[95074.784075] usb 1-2.4.3: new full-speed USB device number 20 using ehci-pci
[95074.885386] usb 1-2.4.3: New USB device found, idVendor=0403, idProduct=6015
[95074.885399] usb 1-2.4.3: New USB device strings: Mfr=1, Product=2, SerialNumber=3
[95074.885406] usb 1-2.4.3: Product: FT231X USB UART
[95074.885411] usb 1-2.4.3: Manufacturer: FTDI
[95074.885416] usb 1-2.4.3: SerialNumber: DN003P0Q
[95074.890105] ftdi_sio 1-2.4.3:1.0: FTDI USB Serial Device converter detected
[95074.890228] usb 1-2.4.3: Detected FT-X
[95074.891157] usb 1-2.4.3: FTDI USB Serial Device converter now attached to ttyUSB0
The first board connected to your PC will create a device named ``/dev/ttyUSB0``.
If **N** USB to serial devices are connected, they will appear as
``/dev/ttyUSBn`` where **n** in in **{0, 1, ..., N-1}**.
To access this devices programs should be run with ``sudo``.
~~~~~~~~~~~
``minicom``
~~~~~~~~~~~
Minicom is a text-based modem control and terminal emulation program .
It is commonly used for setting up a remote serial console.
To configure ``minicom`` use the ``-s`` option.
.. code-block:: shell-session
sudo minicom -s
A configuration menu will open.
.. code-block:: none
+-----[configuration]------+
| Filenames and paths |
| File transfer protocols |
| Serial port setup |
| Modem and dialing |
| Screen and keyboard |
| Save setup as dfl |
| Save setup as.. |
| Exit |
| Exit from Minicom |
+--------------------------+
Go to ``Serial port setup``, press **Enter** and setup the next options:
* Serial Device: ``/dev/ttyUSB0`` (device index ``0`` or a higher number)
* Bps/Par/Bits: ``115200 8N1`` (baud rate, byte length, parity and stop bits)
* Hardware/Software Flow Control: No (flow control should be disabled)
.. code-block:: none
+-----------------------------------------------------------------------+
| A - Serial Device : /dev/ttyUSB0 |
| B - Lockfile Location : /var/lock |
| C - Callin Program : |
| D - Callout Program : |
| E - Bps/Par/Bits : 115200 8N1 |
| F - Hardware Flow Control : No |
| G - Software Flow Control : No |
| |
| Change which setting? |
+-----------------------------------------------------------------------+
``minicom`` requires some special ``Control+a`` key sequences to operate.
Please see the `minicom manual <https://linux.die.net/man/1/minicom>`_ for details.
~~~~~~~~~~
``screen``
~~~~~~~~~~
GNU ``screen`` is in general a terminal multiplexer.
It also supports connecting to a serial console,
and provides syntax to configure the serial connection
baud rate, byte length, parity and flow control, ...
Compared to ``minicom`` it provides better fonts,
better support for terminal window re-sizing, ...
.. code-block:: shell-session
$ sudo screen /dev/ttyUSB1 115200 cs8
Similar to ``minicom``, ``screen`` requires some special ``Control+a`` key sequences to operate.
Please see the `screen manual <https://www.gnu.org/software/screen/manual/screen.html>`_ for details.
=======================
Reference boot sequence
=======================
You can compare this reference boot sequences against yours.
------
U-Boot
------
.. code-block:: none
U-Boot 2016.01 (Nov 16 2016 - 12:23:28 +0100), Build: jenkins-redpitaya-master-156
Model: Red Pitaya Board
Board: Xilinx Zynq
I2C: ready
DRAM: ECC disabled 480 MiB
I2C:EEPROM selection failed
MMC: sdhci@e0100000: 0
In: serial@e0000000
Out: serial@e0000000
Err: serial@e0000000
Model: Red Pitaya Board
Board: Xilinx Zynq
Net: ZYNQ GEM: e000b000, phyaddr 1, interface rgmii-id
eth0: ethernet@e000b000
Hit any key to stop autoboot: 0
Running script from SD...
Device: sdhci@e0100000
Manufacturer ID: 19
OEM: 4459
Name: 00000
Tran Speed: 25000000
Rd Block Len: 512
SD version 1.0
High Capacity: Yes
Capacity: 3.7 GiB
Bus Width: 4-bit
Erase Group Size: 512 Bytes
reading u-boot.scr
1203 bytes read in 17 ms (68.4 KiB/s)
## Executing script at 02000000
Set devicetree and ramdisk high loading address to 0x20000000
Loading from SD card (FAT file system) to memory
Device: sdhci@e0100000
Manufacturer ID: 19
OEM: 4459
Name: 00000
Tran Speed: 25000000
Rd Block Len: 512
SD version 1.0
High Capacity: Yes
Capacity: 3.7 GiB
Bus Width: 4-bit
Erase Group Size: 512 Bytes
reading u-boot.scr
1203 bytes read in 17 ms (68.4 KiB/s)
## Executing script at 02000000
Set devicetree and ramdisk high loading address to 0x20000000
Loading from SD card (FAT file system) to memory
Device: sdhci@e0100000
Manufacturer ID: 19
OEM: 4459
Name: 00000
Tran Speed: 25000000
Rd Block Len: 512
SD version 1.0
High Capacity: Yes
Capacity: 3.7 GiB
Bus Width: 4-bit
Erase Group Size: 512 Bytes
reading uImage
4590664 bytes read in 404 ms (10.8 MiB/s)
reading devicetree.dtb
17342 bytes read in 19 ms (890.6 KiB/s)
Booting Linux kernel with ramdisk and devicetree
## Booting kernel from Legacy Image at 02004000 ...
Image Name: Linux-4.4.0-xilinx
Image Type: ARM Linux Kernel Image (uncompressed)
Data Size: 4590600 Bytes = 4.4 MiB
Load Address: 00008000
Entry Point: 00008000
Verifying Checksum ... OK
## Flattened Device Tree blob at 04000000
Booting using the fdt blob at 0x4000000
Loading Kernel Image ... OK
Loading Device Tree to 1d33c000, end 1d3433bd ... OK
|
{
"pile_set_name": "Github"
}
|
<?php
// +----------------------------------------------------------------------
// | 短信配置
// +----------------------------------------------------------------------
return [
//默认支付模式
'default' => 'yunxin',
//单个手机每日发送上限
'maxPhoneCount' => 10,
//验证码每分钟发送上线
'maxMinuteCount' => 20,
//单个IP每日发送上限
'maxIpCount' => 50,
//驱动模式
'stores' => [
//云信
'yunxin' => [
//短信模板id
'template_id' => [
//验证码
'VERIFICATION_CODE' => 518076,
//支付成功
'PAY_SUCCESS_CODE' => 520268,
//发货提醒
'DELIVER_GOODS_CODE' => 520269,
//确认收货提醒
'TAKE_DELIVERY_CODE' => 520271,
//管理员下单提醒
'ADMIN_PLACE_ORDER_CODE' => 520272,
//管理员退货提醒
'ADMIN_RETURN_GOODS_CODE' => 520274,
//管理员支付成功提醒
'ADMIN_PAY_SUCCESS_CODE' => 520273,
//管理员确认收货
'ADMIN_TAKE_DELIVERY_CODE' => 520422,
//改价提醒
'PRICE_REVISION_CODE' => 528288,
//订单未支付
'ORDER_PAY_FALSE' => 528116,
],
],
//阿里云
'aliyun' => [
'template_id' => [
]
]
]
];
|
{
"pile_set_name": "Github"
}
|
/* Convert string for NaN payload to corresponding NaN. Wide strings,
long double.
Copyright (C) 2015-2020 Free Software Foundation, Inc.
This file is part of the GNU C Library.
The GNU C Library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
The GNU C Library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with the GNU C Library; if not, see
<https://www.gnu.org/licenses/>. */
#include <math.h>
/* This function is unused if long double and double have the same
representation. */
#ifndef __NO_LONG_DOUBLE_MATH
# include "../stdlib/strtod_nan_wide.h"
# include <math-type-macros-ldouble.h>
# define STRTOD_NAN __wcstold_nan
# include "../stdlib/strtod_nan_main.c"
#endif
|
{
"pile_set_name": "Github"
}
|
/* MtCoder.c -- Multi-thread Coder
2010-09-24 : Igor Pavlov : Public domain */
#include "Precomp.h"
#include <stdio.h>
#include "MtCoder.h"
void LoopThread_Construct(CLoopThread *p)
{
Thread_Construct(&p->thread);
Event_Construct(&p->startEvent);
Event_Construct(&p->finishedEvent);
}
void LoopThread_Close(CLoopThread *p)
{
Thread_Close(&p->thread);
Event_Close(&p->startEvent);
Event_Close(&p->finishedEvent);
}
static THREAD_FUNC_RET_TYPE THREAD_FUNC_CALL_TYPE LoopThreadFunc(void *pp)
{
CLoopThread *p = (CLoopThread *)pp;
for (;;)
{
if (Event_Wait(&p->startEvent) != 0)
return SZ_ERROR_THREAD;
if (p->stop)
return 0;
p->res = p->func(p->param);
if (Event_Set(&p->finishedEvent) != 0)
return SZ_ERROR_THREAD;
}
}
WRes LoopThread_Create(CLoopThread *p)
{
p->stop = 0;
RINOK(AutoResetEvent_CreateNotSignaled(&p->startEvent));
RINOK(AutoResetEvent_CreateNotSignaled(&p->finishedEvent));
return Thread_Create(&p->thread, LoopThreadFunc, p);
}
WRes LoopThread_StopAndWait(CLoopThread *p)
{
p->stop = 1;
if (Event_Set(&p->startEvent) != 0)
return SZ_ERROR_THREAD;
return Thread_Wait(&p->thread);
}
WRes LoopThread_StartSubThread(CLoopThread *p) { return Event_Set(&p->startEvent); }
WRes LoopThread_WaitSubThread(CLoopThread *p) { return Event_Wait(&p->finishedEvent); }
static SRes Progress(ICompressProgress *p, UInt64 inSize, UInt64 outSize)
{
return (p && p->Progress(p, inSize, outSize) != SZ_OK) ? SZ_ERROR_PROGRESS : SZ_OK;
}
static void MtProgress_Init(CMtProgress *p, ICompressProgress *progress)
{
unsigned i;
for (i = 0; i < NUM_MT_CODER_THREADS_MAX; i++)
p->inSizes[i] = p->outSizes[i] = 0;
p->totalInSize = p->totalOutSize = 0;
p->progress = progress;
p->res = SZ_OK;
}
static void MtProgress_Reinit(CMtProgress *p, unsigned index)
{
p->inSizes[index] = 0;
p->outSizes[index] = 0;
}
#define UPDATE_PROGRESS(size, prev, total) \
if (size != (UInt64)(Int64)-1) { total += size - prev; prev = size; }
SRes MtProgress_Set(CMtProgress *p, unsigned index, UInt64 inSize, UInt64 outSize)
{
SRes res;
CriticalSection_Enter(&p->cs);
UPDATE_PROGRESS(inSize, p->inSizes[index], p->totalInSize)
UPDATE_PROGRESS(outSize, p->outSizes[index], p->totalOutSize)
if (p->res == SZ_OK)
p->res = Progress(p->progress, p->totalInSize, p->totalOutSize);
res = p->res;
CriticalSection_Leave(&p->cs);
return res;
}
static void MtProgress_SetError(CMtProgress *p, SRes res)
{
CriticalSection_Enter(&p->cs);
if (p->res == SZ_OK)
p->res = res;
CriticalSection_Leave(&p->cs);
}
static void MtCoder_SetError(CMtCoder* p, SRes res)
{
CriticalSection_Enter(&p->cs);
if (p->res == SZ_OK)
p->res = res;
CriticalSection_Leave(&p->cs);
}
/* ---------- MtThread ---------- */
void CMtThread_Construct(CMtThread *p, CMtCoder *mtCoder)
{
p->mtCoder = mtCoder;
p->outBuf = 0;
p->inBuf = 0;
Event_Construct(&p->canRead);
Event_Construct(&p->canWrite);
LoopThread_Construct(&p->thread);
}
#define RINOK_THREAD(x) { if((x) != 0) return SZ_ERROR_THREAD; }
static void CMtThread_CloseEvents(CMtThread *p)
{
Event_Close(&p->canRead);
Event_Close(&p->canWrite);
}
static void CMtThread_Destruct(CMtThread *p)
{
CMtThread_CloseEvents(p);
if (Thread_WasCreated(&p->thread.thread))
{
LoopThread_StopAndWait(&p->thread);
LoopThread_Close(&p->thread);
}
if (p->mtCoder->alloc)
IAlloc_Free(p->mtCoder->alloc, p->outBuf);
p->outBuf = 0;
if (p->mtCoder->alloc)
IAlloc_Free(p->mtCoder->alloc, p->inBuf);
p->inBuf = 0;
}
#define MY_BUF_ALLOC(buf, size, newSize) \
if (buf == 0 || size != newSize) \
{ IAlloc_Free(p->mtCoder->alloc, buf); \
size = newSize; buf = (Byte *)IAlloc_Alloc(p->mtCoder->alloc, size); \
if (buf == 0) return SZ_ERROR_MEM; }
static SRes CMtThread_Prepare(CMtThread *p)
{
MY_BUF_ALLOC(p->inBuf, p->inBufSize, p->mtCoder->blockSize)
MY_BUF_ALLOC(p->outBuf, p->outBufSize, p->mtCoder->destBlockSize)
p->stopReading = False;
p->stopWriting = False;
RINOK_THREAD(AutoResetEvent_CreateNotSignaled(&p->canRead));
RINOK_THREAD(AutoResetEvent_CreateNotSignaled(&p->canWrite));
return SZ_OK;
}
static SRes FullRead(ISeqInStream *stream, Byte *data, size_t *processedSize)
{
size_t size = *processedSize;
*processedSize = 0;
while (size != 0)
{
size_t curSize = size;
SRes res = stream->Read(stream, data, &curSize);
*processedSize += curSize;
data += curSize;
size -= curSize;
RINOK(res);
if (curSize == 0)
return SZ_OK;
}
return SZ_OK;
}
#define GET_NEXT_THREAD(p) &p->mtCoder->threads[p->index == p->mtCoder->numThreads - 1 ? 0 : p->index + 1]
static SRes MtThread_Process(CMtThread *p, Bool *stop)
{
CMtThread *next;
*stop = True;
if (Event_Wait(&p->canRead) != 0)
return SZ_ERROR_THREAD;
next = GET_NEXT_THREAD(p);
if (p->stopReading)
{
next->stopReading = True;
return Event_Set(&next->canRead) == 0 ? SZ_OK : SZ_ERROR_THREAD;
}
{
size_t size = p->mtCoder->blockSize;
size_t destSize = p->outBufSize;
RINOK(FullRead(p->mtCoder->inStream, p->inBuf, &size));
next->stopReading = *stop = (size != p->mtCoder->blockSize);
if (Event_Set(&next->canRead) != 0)
return SZ_ERROR_THREAD;
RINOK(p->mtCoder->mtCallback->Code(p->mtCoder->mtCallback, p->index,
p->outBuf, &destSize, p->inBuf, size, *stop));
MtProgress_Reinit(&p->mtCoder->mtProgress, p->index);
if (Event_Wait(&p->canWrite) != 0)
return SZ_ERROR_THREAD;
if (p->stopWriting)
return SZ_ERROR_FAIL;
if (p->mtCoder->outStream->Write(p->mtCoder->outStream, p->outBuf, destSize) != destSize)
return SZ_ERROR_WRITE;
return Event_Set(&next->canWrite) == 0 ? SZ_OK : SZ_ERROR_THREAD;
}
}
static THREAD_FUNC_RET_TYPE THREAD_FUNC_CALL_TYPE ThreadFunc(void *pp)
{
CMtThread *p = (CMtThread *)pp;
for (;;)
{
Bool stop;
CMtThread *next = GET_NEXT_THREAD(p);
SRes res = MtThread_Process(p, &stop);
if (res != SZ_OK)
{
MtCoder_SetError(p->mtCoder, res);
MtProgress_SetError(&p->mtCoder->mtProgress, res);
next->stopReading = True;
next->stopWriting = True;
Event_Set(&next->canRead);
Event_Set(&next->canWrite);
return res;
}
if (stop)
return 0;
}
}
void MtCoder_Construct(CMtCoder* p)
{
unsigned i;
p->alloc = 0;
for (i = 0; i < NUM_MT_CODER_THREADS_MAX; i++)
{
CMtThread *t = &p->threads[i];
t->index = i;
CMtThread_Construct(t, p);
}
CriticalSection_Init(&p->cs);
CriticalSection_Init(&p->mtProgress.cs);
}
void MtCoder_Destruct(CMtCoder* p)
{
unsigned i;
for (i = 0; i < NUM_MT_CODER_THREADS_MAX; i++)
CMtThread_Destruct(&p->threads[i]);
CriticalSection_Delete(&p->cs);
CriticalSection_Delete(&p->mtProgress.cs);
}
SRes MtCoder_Code(CMtCoder *p)
{
unsigned i, numThreads = p->numThreads;
SRes res = SZ_OK;
p->res = SZ_OK;
MtProgress_Init(&p->mtProgress, p->progress);
for (i = 0; i < numThreads; i++)
{
RINOK(CMtThread_Prepare(&p->threads[i]));
}
for (i = 0; i < numThreads; i++)
{
CMtThread *t = &p->threads[i];
CLoopThread *lt = &t->thread;
if (!Thread_WasCreated(<->thread))
{
lt->func = ThreadFunc;
lt->param = t;
if (LoopThread_Create(lt) != SZ_OK)
{
res = SZ_ERROR_THREAD;
break;
}
}
}
if (res == SZ_OK)
{
unsigned j;
for (i = 0; i < numThreads; i++)
{
CMtThread *t = &p->threads[i];
if (LoopThread_StartSubThread(&t->thread) != SZ_OK)
{
res = SZ_ERROR_THREAD;
p->threads[0].stopReading = True;
break;
}
}
Event_Set(&p->threads[0].canWrite);
Event_Set(&p->threads[0].canRead);
for (j = 0; j < i; j++)
LoopThread_WaitSubThread(&p->threads[j].thread);
}
for (i = 0; i < numThreads; i++)
CMtThread_CloseEvents(&p->threads[i]);
return (res == SZ_OK) ? p->res : res;
}
|
{
"pile_set_name": "Github"
}
|
# This test checks aliases support based on the list in the
# WHATWG Encoding Living Standard
#
# https://encoding.spec.whatwg.org/
#
# The input of this test is the file whatwg-aliases.json downloaded from
# https://encoding.spec.whatwg.org/encodings.json
#
# To run:
# AUTHOR_TESTING=1 prove -l t/whatwg-aliases.t
use Test::More
($ENV{AUTHOR_TESTING} || $ENV{RELEASE_TESTING})
? 'no_plan'
: (skip_all => 'For maintainers only');
use Encode 'find_encoding';
use JSON::PP 'decode_json';
use File::Spec;
use FindBin;
my $encodings = decode_json(do {
# https://encoding.spec.whatwg.org/encodings.json
open my $f, '<', File::Spec->catdir($FindBin::Bin, 'whatwg-aliases.json');
local $/;
<$f>
});
my %IGNORE = map { $_ => '' } qw(
replacement
utf8
);
my %TODO = (
'ISO-8859-8-I' => 'Not supported',
'gb18030' => 'Not supported',
'866' => 'Not supported',
'x-user-defined' => 'Not supported',
# ...
);
for my $section (@$encodings) {
for my $enc (@{$section->{encodings}}) {
my $name = $enc->{name};
next if exists $IGNORE{$name};
local $TODO = $TODO{$name} if exists $TODO{$name};
my $encoding = find_encoding($name);
isa_ok($encoding, 'Encode::Encoding', $name);
for my $label (@{$enc->{labels}}) {
local $TODO = $TODO{$label} if exists $TODO{$label};
my $e = find_encoding($label);
if (isa_ok($e, 'Encode::Encoding', $label)) {
next if exists $IGNORE{$label};
is($e->name, $encoding->name, "$label ->name is $name")
}
}
}
}
done_testing;
|
{
"pile_set_name": "Github"
}
|
<?php
/**
* This file has been @generated by a phing task from CLDR version 34.0.0.
* See [README.md](README.md#generating-data) for more information.
*
* @internal Please do not require this file directly.
* It may change location/format between versions
*
* Do not modify this file directly!
*/
return array (
'AC' => 'Motu ʻAsenisini',
'AD' => 'ʻAnitola',
'AE' => 'ʻAlepea Fakatahataha',
'AF' => 'ʻAfikānisitani',
'AG' => 'Anitikua mo Palaputa',
'AI' => 'Anikuila',
'AL' => 'ʻAlipania',
'AM' => 'ʻĀmenia',
'AO' => 'ʻAngikola',
'AQ' => 'ʻAnitātika',
'AR' => 'ʻAsenitina',
'AS' => 'Haʻamoa ʻAmelika',
'AT' => 'ʻAositulia',
'AU' => 'ʻAositelēlia',
'AW' => 'ʻAlupa',
'AX' => 'ʻOtumotu ʻAlani',
'AZ' => 'ʻAsapaisani',
'BA' => 'Posinia mo Hesikōvina',
'BB' => 'Pāpeitosi',
'BD' => 'Pengilātesi',
'BE' => 'Pelesiume',
'BF' => 'Pekano Faso',
'BG' => 'Pulukalia',
'BH' => 'Paleini',
'BI' => 'Puluniti',
'BJ' => 'Penini',
'BL' => 'Sā Patēlemi',
'BM' => 'Pēmuta',
'BN' => 'Pulunei',
'BO' => 'Polīvia',
'BQ' => 'Kalipiane fakahōlani',
'BR' => 'Palāsili',
'BS' => 'Pahama',
'BT' => 'Pūtani',
'BW' => 'Potisiuana',
'BY' => 'Pelalusi',
'BZ' => 'Pelise',
'CA' => 'Kānata',
'CC' => 'ʻOtumotu Koko',
'CD' => 'Kongo - Kinisasa',
'CF' => 'Lepupelika ʻAfilika Lotoloto',
'CG' => 'Kongo - Palasavila',
'CH' => 'Suisilani',
'CI' => 'Matafonua ʻAivolī',
'CK' => 'ʻOtumotu Kuki',
'CL' => 'Sili',
'CM' => 'Kameluni',
'CN' => 'Siaina',
'CO' => 'Kolomipia',
'CR' => 'Kosita Lika',
'CU' => 'Kiupa',
'CV' => 'Muiʻi Vēte',
'CW' => 'Kulasao',
'CX' => 'Motu Kilisimasi',
'CY' => 'Saipalesi',
'CZ' => 'Sēkia',
'DE' => 'Siamane',
'DG' => 'Tieko Kāsia',
'DJ' => 'Siputi',
'DK' => 'Tenimaʻake',
'DM' => 'Tominika',
'DO' => 'Lepupelika Tominika',
'DZ' => 'ʻAlisilia',
'EA' => 'Siuta mo Melila',
'EC' => 'ʻEkuetoa',
'EE' => 'ʻEsitōnia',
'EG' => 'ʻIsipite',
'EH' => 'Sahala fakahihifo',
'ER' => 'ʻElitulia',
'ES' => 'Sipeini',
'ET' => 'ʻĪtiōpia',
'FI' => 'Finilani',
'FJ' => 'Fisi',
'FK' => 'ʻOtumotu Fokulani',
'FM' => 'Mikolonīsia',
'FO' => 'ʻOtumotu Faloe',
'FR' => 'Falanisē',
'GA' => 'Kaponi',
'GB' => 'Pilitānia',
'GD' => 'Kelenatā',
'GE' => 'Seōsia',
'GF' => 'Kuiana fakafalanisē',
'GG' => 'Kuenisī',
'GH' => 'Kana',
'GI' => 'Sipalālitā',
'GL' => 'Kulinilani',
'GM' => 'Kamipia',
'GN' => 'Kini',
'GP' => 'Kuatalupe',
'GQ' => 'ʻEkueta Kini',
'GR' => 'Kalisi',
'GS' => 'ʻOtumotu Seōsia-tonga mo Saniuisi-tonga',
'GT' => 'Kuatamala',
'GU' => 'Kuamu',
'GW' => 'Kini-Pisau',
'GY' => 'Kuiana',
'HK' => 'Hongi Kongi SAR Siaina',
'HN' => 'Honitulasi',
'HR' => 'Kuloisia',
'HT' => 'Haiti',
'HU' => 'Hungakalia',
'IC' => 'ʻOtumotu Kaneli',
'ID' => 'ʻInitonēsia',
'IE' => 'ʻAealani',
'IL' => 'ʻIsileli',
'IM' => 'Motu Mani',
'IN' => 'ʻInitia',
'IO' => 'Potu fonua moana ʻInitia fakapilitānia',
'IQ' => 'ʻIlaaki',
'IR' => 'ʻIlaani',
'IS' => 'ʻAisilani',
'IT' => 'ʻĪtali',
'JE' => 'Selusī',
'JM' => 'Samaika',
'JO' => 'Soatane',
'JP' => 'Siapani',
'KE' => 'Keniā',
'KG' => 'Kīkisitani',
'KH' => 'Kamipōtia',
'KI' => 'Kilipasi',
'KM' => 'Komolosi',
'KN' => 'Sā Kitisi mo Nevisi',
'KP' => 'Kōlea tokelau',
'KR' => 'Kōlea tonga',
'KW' => 'Kueiti',
'KY' => 'ʻOtumotu Keimeni',
'KZ' => 'Kasakitani',
'LA' => 'Lau',
'LB' => 'Lepanoni',
'LC' => 'Sā Lūsia',
'LI' => 'Likitenisiteini',
'LK' => 'Sīlangikā',
'LR' => 'Laipelia',
'LS' => 'Lesoto',
'LT' => 'Lituania',
'LU' => 'Lakisimipeki',
'LV' => 'Lativia',
'LY' => 'Līpia',
'MA' => 'Moloko',
'MC' => 'Monako',
'MD' => 'Molotova',
'ME' => 'Monitenikalo',
'MF' => 'Sā Mātini (fakafalanisē)',
'MG' => 'Matakasika',
'MH' => 'ʻOtumotu Māsolo',
'MK' => 'Masetōnia',
'ML' => 'Māli',
'MM' => 'Mianimā (Pema)',
'MN' => 'Mongokōlia',
'MO' => 'Makau SAR Siaina',
'MP' => 'ʻOtumotu Maliana tokelau',
'MQ' => 'Mātiniki',
'MR' => 'Maulitenia',
'MS' => 'Moʻungaselati',
'MT' => 'Malita',
'MU' => 'Maulitiusi',
'MV' => 'Malativisi',
'MW' => 'Malaui',
'MX' => 'Mekisikou',
'MY' => 'Malēsia',
'MZ' => 'Mosēmipiki',
'NA' => 'Namipia',
'NC' => 'Niu Kaletōnia',
'NE' => 'Nisia',
'NF' => 'Motu Nōfoliki',
'NG' => 'Naisilia',
'NI' => 'Nikalakua',
'NL' => 'Hōlani',
'NO' => 'Noauē',
'NP' => 'Nepali',
'NR' => 'Naulu',
'NU' => 'Niuē',
'NZ' => 'Nuʻusila',
'OM' => 'ʻOmani',
'PA' => 'Panamā',
'PE' => 'Pelū',
'PF' => 'Polinisia fakafalanisē',
'PG' => 'Papuaniukini',
'PH' => 'Filipaini',
'PK' => 'Pākisitani',
'PL' => 'Polani',
'PM' => 'Sā Piea mo Mikeloni',
'PN' => 'ʻOtumotu Pitikeni',
'PR' => 'Puēto Liko',
'PS' => 'Potu Palesitaine',
'PT' => 'Potukali',
'PW' => 'Palau',
'PY' => 'Palakuai',
'QA' => 'Katā',
'RE' => 'Lēunioni',
'RO' => 'Lomēnia',
'RS' => 'Sēpia',
'RU' => 'Lūsia',
'RW' => 'Luanitā',
'SA' => 'Saute ʻAlepea',
'SB' => 'ʻOtumotu Solomone',
'SC' => 'ʻOtumotu Seiseli',
'SD' => 'Sūteni',
'SE' => 'Suēteni',
'SG' => 'Singapoa',
'SH' => 'Sā Helena',
'SI' => 'Silōvenia',
'SJ' => 'Sivolopāti mo Sani Maieni',
'SK' => 'Silōvakia',
'SL' => 'Siela Leone',
'SM' => 'Sā Malino',
'SN' => 'Senekalo',
'SO' => 'Sōmalia',
'SR' => 'Suliname',
'SS' => 'Sūtani fakatonga',
'ST' => 'Sao Tomē mo Pilinisipe',
'SV' => 'ʻEle Salavatoa',
'SX' => 'Sā Mātini (fakahōlani)',
'SY' => 'Sīlia',
'SZ' => 'Suasilani',
'TA' => 'Tulisitani ta Kunuha',
'TC' => 'ʻOtumotu Tuki mo Kaikosi',
'TD' => 'Sāti',
'TF' => 'Potu fonua tonga fakafalanisē',
'TG' => 'Toko',
'TH' => 'Tailani',
'TJ' => 'Tasikitani',
'TK' => 'Tokelau',
'TL' => 'Timoa hahake',
'TM' => 'Tūkimenisitani',
'TN' => 'Tunīsia',
'TO' => 'Tonga',
'TR' => 'Toake',
'TT' => 'Tilinitati mo Topako',
'TV' => 'Tūvalu',
'TW' => 'Taiuani',
'TZ' => 'Tenisānia',
'UA' => 'ʻŪkalaʻine',
'UG' => 'ʻIukanitā',
'UM' => 'ʻOtumotu siʻi ʻo ʻAmelika',
'US' => 'Puleʻanga fakatahataha ʻAmelika',
'UY' => 'ʻUlukuai',
'UZ' => 'ʻUsipekitani',
'VA' => 'Kolo Vatikani',
'VC' => 'Sā Viniseni mo Kulenatini',
'VE' => 'Venesuela',
'VG' => 'ʻOtumotu Vilikini fakapilitānia',
'VI' => 'ʻOtumotu Vilikini fakaʻamelika',
'VN' => 'Vietinami',
'VU' => 'Vanuatu',
'WF' => 'ʻUvea mo Futuna',
'WS' => 'Haʻamoa',
'XK' => 'Kōsovo',
'YE' => 'Iemeni',
'YT' => 'Maiote',
'ZA' => 'ʻAfilika tonga',
'ZM' => 'Semipia',
'ZW' => 'Simipapuei',
);
|
{
"pile_set_name": "Github"
}
|
<template>
<div class="group-nav">
<div class="group-top">
<div class="actions">
<search @query="onQuery"
v-model="query"
placeholder="请输入分组名称"
size="small">
</search>
<el-button size="small" icon="el-icon-plus" @click="handleClickShowDialog"></el-button>
</div>
</div>
<div class="group-list">
<div class="block-title">
我的收藏 <small>{{ myGroupList.length }}</small>
</div>
<div v-for="group in myGroupList"
class="group-item"
@click="handleClickGroup(group)"
:key="group._id">
<div class="group-item-wrap">
<div :class="[group._id === groupId ? 'active': '']">{{group.name}}</div>
<el-rate :value="1" @click.native.stop="handleRemoveFavorite(group._id)" :max="1"></el-rate>
</div>
</div>
<div class="block-title">更多分组</div>
<div v-for="group in moreGroupList"
class="group-item"
@click="handleClickGroup(group)"
:key="group._id">
<div class="group-item-wrap">
<div :class="[group._id === groupId ? 'active': '']">{{group.name}}</div>
<el-rate @click.native.stop="handleAddFavorite(group._id)" :max="1"></el-rate>
</div>
</div>
</div>
<create-group-dialog
v-if="showCreateDialog"
:visible.sync="showCreateDialog"
@action="handleClickCreateGroup"
@close="handleClickClose"/>
</div>
</template>
<script>
import CreateGroupDialog from '@/components/common/CreateGroup'
import Search from '@/components/common/SearchInput'
import { mapActions, mapState } from 'vuex'
export default {
components: {
Search,
CreateGroupDialog
},
computed: {
...mapState(['user', 'groups']),
// 收藏
favorites () {
return this.user.favorites
},
// 全部分组
groupList () {
return this.groups.filter(g => g.name.toLowerCase().indexOf(this.query.toLowerCase()) >= 0)
},
// 收藏的分组
myGroupList () {
return this.groupList.filter(item => this.favorites.includes(item._id))
},
// 更多分组
moreGroupList () {
return this.groupList.filter(item => !this.favorites.includes(item._id))
},
groupId () {
return this.$route.params.groupId
}
},
data () {
return {
showCreateDialog: false,
query: ''
}
},
methods: {
...mapActions([
'addFavorite',
'removeFavorite'
]),
async handleAddFavorite (groupId) {
// 添加到收藏夹
try {
await this.addFavorite(groupId)
this.$message.success('收藏成功')
} catch (err) {
this.$message.error('收藏失败')
}
},
async handleRemoveFavorite (groupId) {
// 添加到收藏夹
try {
await this.removeFavorite(groupId)
this.$message.success('取消收藏成功')
} catch (err) {
this.$message.error('取消收藏失败')
}
},
onQuery (val) {
this.query = val
},
handleClickGroup (group) {
if (group && group._id) {
this.$router.replace(`/list/${group._id}`)
} else {
this.$router.replace('/')
}
},
handleClickCreateGroup (groupName) {
this.$store.dispatch('createGroup', { name: groupName }).then(() => {
this.showCreateDialog = false
}).catch(e => this.$message.error(e.msg))
},
handleClickClose () {
this.showCreateDialog = false
},
handleClickShowDialog () {
this.showCreateDialog = true
}
}
}
</script>
<style lang="less">
.group-top {
margin-top: 15px;
}
.group-nav {
padding: 0 15px;
}
.group-list {
.group-item {
margin: 0 -15px;
padding: 10px 20px;
padding-left: 20px;
font-size: 15px;
cursor: pointer;
color: #303133;
&:hover {
background: #ddd;
}
}
.group-item-wrap {
.active {
color: #409EFF;
}
display: flex;
justify-content: space-between;
}
}
.block-title {
display: block;
font-size: 16px;
font-weight: bold;
color: #606266;
margin: 10px 0;
small {
color: #909399;
}
}
.actions {
display: flex;
justify-content: space-between;
align-items: center;
.el-button {
margin-left: 10px;
}
}
</style>
|
{
"pile_set_name": "Github"
}
|
# Jekyll Resources
* [Jekyll By Example Tutorial](https://www.andrewmunsell.com/tutorials/jekyll-by-example) (super comprehensive)
* [Jekyll Variables](http://jekyllrb.com/docs/variables/)
* [Hosting on Github Pages](https://pages.github.com/)
* [Using Jekyll with Pages](https://help.github.com/articles/using-jekyll-with-pages/)
|
{
"pile_set_name": "Github"
}
|
//
// STDFeedItem.h
// STKitDemo
//
// Created by SunJiangting on 14-9-26.
// Copyright (c) 2014年 SunJiangting. All rights reserved.
//
#import <STKit/STKit.h>
@interface STDFeedItem : STObject
@property (nonatomic, copy) NSString *title;
@property (nonatomic, copy) NSString *thumbURLString;
@property (nonatomic, copy) NSString *imageURLString;
@property (nonatomic, assign) CGFloat width;
@property (nonatomic, assign) CGFloat height;
// 评分
@property (nonatomic, assign) CGFloat rate;
- (STImageItem *)imageItem;
@end
|
{
"pile_set_name": "Github"
}
|
import numpy as np
from nitime.lazy import scipy_stats as stats
from nitime import descriptors as desc
from nitime import utils as tsu
from nitime import algorithms as tsa
from nitime import timeseries as ts
class EventRelatedAnalyzer(desc.ResetMixin):
"""Analyzer object for reverse-correlation/event-related analysis.
Note: right now, this class assumes the input time series is only
two-dimensional. If your input data is something like
(nchannels,nsubjects, ...) with more dimensions, things are likely to break
in hard to understand ways.
"""
def __init__(self, time_series, events, len_et, zscore=False,
correct_baseline=False, offset=0):
"""
Parameters
----------
time_series : a time-series object
A time-series with data on which the event-related analysis proceeds
events_time_series : a TimeSeries object or an Events object
The events which occurred in tandem with the time-series in the
EventRelatedAnalyzer. This object's data has to have the same
dimensions as the data in the EventRelatedAnalyzer object. In each
sample in the time-series, there is an integer, which denotes the
kind of event which occurred at that time. In time-bins in which no
event occurred, a 0 should be entered. The data in this time series
object needs to have the same dimensionality as the data in the
data time-series
len_et : int
The expected length of the event-triggered quantity (in the same
time-units as the events are represented (presumably number of TRs,
for fMRI data). For example, the size of the block dedicated in the
fir_matrix to each type of event
zscore : a flag to return the result in zscore (where relevant)
correct_baseline : a flag to correct the baseline according to the first
point in the event-triggered average (where possible)
offset : the offset of the beginning of the event-related time-series,
relative to the event occurrence
"""
#XXX Change so that the offset and length of the eta can be given in
#units of time
#Make sure that the offset and the len_et values can be used, by
#padding with zeros before and after:
if isinstance(events, ts.TimeSeries):
#Set a flag to indicate the input is a time-series object:
self._is_ts = True
s = time_series.data.shape
e_data = np.copy(events.data)
#If the input is a one-dimensional (instead of an n-channel
#dimensional) time-series, we will need to broadcast to make the
#data assume the same number of dimensions as the time-series
#input:
if len(events.shape) == 1 and len(s) > 1:
e_data = e_data + np.zeros((s[0], 1))
zeros_before = np.zeros((s[:-1] + (int(offset),)))
zeros_after = np.zeros((s[:-1] + (int(len_et),)))
time_series_data = np.hstack([zeros_before,
time_series.data,
zeros_after])
events_data = np.hstack([zeros_before,
e_data,
zeros_after])
#If the events and the time_series have more than 1-d, the analysis
#can traverse their first dimension
if time_series.data.ndim - 1 > 0:
self._len_h = time_series.data.shape[0]
self.events = events_data
self.data = time_series_data
#Otherwise, in order to extract the array from the first dimension,
#we wrap it in a list
else:
self._len_h = 1
self.events = [events_data]
self.data = [time_series_data]
elif isinstance(events, ts.Events):
self._is_ts = False
s = time_series.data.shape
zeros_before = np.zeros((s[:-1] + (abs(offset),)))
zeros_after = np.zeros((s[:-1] + (abs(len_et),)))
#If the time_series has more than 1-d, the analysis can traverse
#the first dimension
if time_series.data.ndim - 1 > 0:
self._len_h = time_series.shape[0]
self.data = time_series
self.events = events
#Otherwise, in order to extract the array from the first dimension,
#we wrap it in a list
else:
self._len_h = 1
self.data = [time_series]
#No need to do that for the Events object:
self.events = events
else:
err = ("Input 'events' to EventRelatedAnalyzer must be of type "
"Events or of type TimeSeries, %r given" % events)
raise ValueError(err)
self.sampling_rate = time_series.sampling_rate
self.sampling_interval = time_series.sampling_interval
self.len_et = int(len_et)
self._zscore = zscore
self._correct_baseline = correct_baseline
self.offset = offset
self.time_unit = time_series.time_unit
@desc.setattr_on_read
def FIR(self):
"""Calculate the FIR event-related estimated of the HRFs for different
kinds of events
Returns
-------
A time-series object, shape[:-2] are dimensions corresponding to the to
shape[:-2] of the EventRelatedAnalyzer data, shape[-2] corresponds to
the different kinds of events used (ordered according to the sorted
order of the unique components in the events time-series). shape[-1]
corresponds to time, and has length = len_et
"""
# XXX code needs to be changed to use flattening (see 'eta' below)
#Make a list to put the outputs in:
h = [0] * self._len_h
for i in range(self._len_h):
#XXX Check that the offset makes sense (there can't be an event
#happening within one offset duration of the beginning of the
#time-series:
#Get the design matrix (roll by the offset, in order to get the
#right thing):
roll_events = np.roll(self.events[i], self.offset)
design = tsu.fir_design_matrix(roll_events, self.len_et)
#Compute the fir estimate, in linear form:
this_h = tsa.fir(self.data[i], design)
#Reshape the linear fir estimate into a event_types*hrf_len array
u = np.unique(self.events[i])
event_types = u[np.unique(self.events[i]) != 0]
h[i] = np.reshape(this_h, (event_types.shape[0], self.len_et))
h = np.array(h).squeeze()
return ts.TimeSeries(data=h,
sampling_rate=self.sampling_rate,
t0=self.offset * self.sampling_interval,
time_unit=self.time_unit)
@desc.setattr_on_read
def FIR_estimate(self):
"""Calculate back the LTI estimate of the time-series, from FIR"""
raise NotImplementedError
@desc.setattr_on_read
def xcorr_eta(self):
"""Compute the normalized cross-correlation estimate of the HRFs for
different kinds of events
Returns
-------
A time-series object, shape[:-2] are dimensions corresponding to the to
shape[:-2] of the EventRelatedAnalyzer data, shape[-2] corresponds to
the different kinds of events used (ordered according to the sorted
order of the unique components in the events time-series). shape[-1]
corresponds to time, and has length = len_et (xcorr looks both back
and forward for half of this length)
"""
#Make a list to put the outputs in:
h = [0] * self._len_h
for i in range(self._len_h):
data = self.data[i]
u = np.unique(self.events[i])
event_types = u[np.unique(self.events[i]) != 0]
h[i] = np.empty((event_types.shape[0],
self.len_et // 2),
dtype=complex)
for e_idx in range(event_types.shape[0]):
this_e = (self.events[i] == event_types[e_idx]) * 1.0
if self._zscore:
this_h = tsa.freq_domain_xcorr_zscored(
data,
this_e,
-self.offset + 1,
self.len_et - self.offset - 2)
else:
this_h = tsa.freq_domain_xcorr(
data,
this_e,
-self.offset + 1,
self.len_et - self.offset - 2)
h[i][e_idx] = this_h
h = np.array(h).squeeze()
## t0 for the object returned here needs to be the central time, not
## the first time point, because the functions 'look' back and forth
## for len_et bins
return ts.TimeSeries(data=h,
sampling_rate=self.sampling_rate,
t0=-1 * self.len_et * self.sampling_interval,
time_unit=self.time_unit)
@desc.setattr_on_read
def et_data(self):
"""The event-triggered data (all occurrences).
This gets the time-series corresponding to the inidividual event
occurrences. Returns a list of lists of time-series. The first dimension
is the different channels in the original time-series data and the
second dimension is each type of event in the event time series
The time-series itself has the first diemnsion of the data being the
specific occurrence, with time 0 locked to the that occurrence
of the event and the last dimension is time.e
This complicated structure is so that it can deal with situations where
each channel has different events and different events have different #
of occurrences
"""
#Make a list for the output
h = [0] * self._len_h
for i in range(self._len_h):
data = self.data[i]
u = np.unique(self.events[i])
event_types = u[np.unique(self.events[i]) != 0]
#Make a list in here as well:
this_list = [0] * event_types.shape[0]
for e_idx in range(event_types.shape[0]):
idx = np.where(self.events[i] == event_types[e_idx])
idx_w_len = np.array([idx[0] + count + self.offset for count
in range(self.len_et)])
event_trig = data[idx_w_len].T
this_list[e_idx] = ts.TimeSeries(data=event_trig,
sampling_interval=self.sampling_interval,
t0=self.offset * self.sampling_interval,
time_unit=self.time_unit)
h[i] = this_list
return h
@desc.setattr_on_read
def eta(self):
"""The event-triggered average activity.
"""
#Make a list for the output
h = [0] * self._len_h
if self._is_ts:
# Loop over channels
for i in range(self._len_h):
data = self.data[i]
u = np.unique(self.events[i])
event_types = u[np.unique(self.events[i]) != 0]
h[i] = np.empty((event_types.shape[0], self.len_et),
dtype=complex)
# This offset is used to pull the event indices below, but we
# have to broadcast it so the shape of the resulting idx+offset
# operation below gives us the (nevents, len_et) array we want,
# per channel.
offset = np.arange(self.offset,
self.offset + self.len_et)[:, np.newaxis]
# Loop over event types
for e_idx in range(event_types.shape[0]):
idx = np.where(self.events[i] == event_types[e_idx])[0]
event_trig = data[idx + offset]
#Correct baseline by removing the first point in the series
#for each channel:
if self._correct_baseline:
event_trig -= event_trig[0]
h[i][e_idx] = np.mean(event_trig, -1)
#In case the input events are an Events:
else:
#Get the indices necessary for extraction of the eta:
add_offset = np.arange(self.offset,
self.offset + self.len_et)[:, np.newaxis]
idx = (self.events.time / self.sampling_interval).astype(int)
#Make a list for the output
h = [0] * self._len_h
# Loop over channels
for i in range(self._len_h):
#If this is a list with one element:
if self._len_h == 1:
event_trig = self.data[0][idx + add_offset]
#Otherwise, you need to index straight into the underlying data
#array:
else:
event_trig = self.data.data[i][idx + add_offset]
h[i] = np.mean(event_trig, -1)
h = np.array(h).squeeze()
return ts.TimeSeries(data=h,
sampling_interval=self.sampling_interval,
t0=self.offset * self.sampling_interval,
time_unit=self.time_unit)
@desc.setattr_on_read
def ets(self):
"""The event-triggered standard error of the mean """
#Make a list for the output
h = [0] * self._len_h
if self._is_ts:
# Loop over channels
for i in range(self._len_h):
data = self.data[i]
u = np.unique(self.events[i])
event_types = u[np.unique(self.events[i]) != 0]
h[i] = np.empty((event_types.shape[0], self.len_et),
dtype=complex)
# This offset is used to pull the event indices below, but we
# have to broadcast it so the shape of the resulting idx+offset
# operation below gives us the (nevents, len_et) array we want,
# per channel.
offset = np.arange(self.offset,
self.offset + self.len_et)[:, np.newaxis]
# Loop over event types
for e_idx in range(event_types.shape[0]):
idx = np.where(self.events[i] == event_types[e_idx])[0]
event_trig = data[idx + offset]
#Correct baseline by removing the first point in the series
#for each channel:
if self._correct_baseline:
event_trig -= event_trig[0]
h[i][e_idx] = stats.sem(event_trig, -1)
#In case the input events are an Events:
else:
#Get the indices necessary for extraction of the eta:
add_offset = np.arange(self.offset,
self.offset + self.len_et)[:, np.newaxis]
idx = (self.events.time / self.sampling_interval).astype(int)
#Make a list for the output
h = [0] * self._len_h
# Loop over channels
for i in range(self._len_h):
#If this is a list with one element:
if self._len_h == 1:
event_trig = self.data[0][idx + add_offset]
#Otherwise, you need to index straight into the underlying data
#array:
else:
event_trig = self.data.data[i][idx + add_offset]
h[i] = stats.sem(event_trig, -1)
h = np.array(h).squeeze()
return ts.TimeSeries(data=h,
sampling_interval=self.sampling_interval,
t0=self.offset * self.sampling_interval,
time_unit=self.time_unit)
|
{
"pile_set_name": "Github"
}
|
// @ts-nocheck
'use strict';
const atRuleParamIndex = require('../../utils/atRuleParamIndex');
const declarationValueIndex = require('../../utils/declarationValueIndex');
const getUnitFromValueNode = require('../../utils/getUnitFromValueNode');
const report = require('../../utils/report');
const ruleMessages = require('../../utils/ruleMessages');
const validateOptions = require('../../utils/validateOptions');
const valueParser = require('postcss-value-parser');
const ruleName = 'unit-case';
const messages = ruleMessages(ruleName, {
expected: (actual, expected) => `Expected "${actual}" to be "${expected}"`,
});
function rule(expectation, options, context) {
return (root, result) => {
const validOptions = validateOptions(result, ruleName, {
actual: expectation,
possible: ['lower', 'upper'],
});
if (!validOptions) {
return;
}
function check(node, value, getIndex) {
const violations = [];
function processValue(valueNode) {
const unit = getUnitFromValueNode(valueNode);
if (!unit) {
return false;
}
const expectedUnit = expectation === 'lower' ? unit.toLowerCase() : unit.toUpperCase();
if (unit === expectedUnit) {
return false;
}
violations.push({
index: getIndex(node) + valueNode.sourceIndex,
message: messages.expected(unit, expectedUnit),
});
return true;
}
const parsedValue = valueParser(value).walk((valueNode) => {
// Ignore wrong units within `url` function
let needFix = false;
const value = valueNode.value;
if (valueNode.type === 'function' && value.toLowerCase() === 'url') {
return false;
}
if (value.includes('*')) {
value.split('*').some((val) => {
return processValue({
...valueNode,
sourceIndex: value.indexOf(val) + val.length + 1,
value: val,
});
});
}
needFix = processValue(valueNode);
if (needFix && context.fix) {
valueNode.value = expectation === 'lower' ? value.toLowerCase() : value.toUpperCase();
}
});
if (violations.length) {
if (context.fix) {
if (node.name === 'media') {
node.params = parsedValue.toString();
} else {
node.value = parsedValue.toString();
}
} else {
violations.forEach((err) => {
report({
index: err.index,
message: err.message,
node,
result,
ruleName,
});
});
}
}
}
root.walkAtRules((atRule) => {
if (!/^media$/i.test(atRule.name) && !atRule.variable) {
return;
}
check(atRule, atRule.params, atRuleParamIndex);
});
root.walkDecls((decl) => check(decl, decl.value, declarationValueIndex));
};
}
rule.ruleName = ruleName;
rule.messages = messages;
module.exports = rule;
|
{
"pile_set_name": "Github"
}
|
//
// TimeInfo.swift
// CoinbaseTests
//
// Copyright © 2018 Coinbase, Inc. All rights reserved.
//
import Foundation
/// Represents server time.
///
open class TimeInfo: Decodable {
/// Time.
public let iso: Date
/// Time in Unix Epoch format: number of seconds elapsed from UTC 00:00:00, January 1 1970.
public let epoch: Double
private enum CodingKeys: String, CodingKey {
case iso, epoch
}
public required init(from decoder: Decoder) throws {
let values = try decoder.container(keyedBy: CodingKeys.self)
iso = try values.decode(Date.self, forKey: .iso)
epoch = try values.decode(Double.self, forKey: .epoch)
}
}
|
{
"pile_set_name": "Github"
}
|
class CfgSkeletons
{
class Default
{
isDiscrete = 1;
skeletonInherit = "";
skeletonBones[] = {};
};
class TFAR_fadak_Skeleton: Default {};
};
class CfgModels
{
class Default
{
sectionsInherit="";
sections[] = {};
skeletonName = "";
};
class tfr_fadak : Default
{
skeletonName = "TFAR_fadak_Skeleton";
};
};
|
{
"pile_set_name": "Github"
}
|
# -*- coding: utf-8 -*-
import sys
import unittest
from algoliasearch.exceptions import RequestException, ObjectNotFoundException
from algoliasearch.responses import MultipleResponse
from algoliasearch.search_index import SearchIndex
from tests.helpers.factory import Factory as F
from tests.helpers.misc import Unicode, rule_without_metadata
class TestSearchIndex(unittest.TestCase):
def setUp(self):
self.client = F.search_client()
self.index = F.index(self.client, self._testMethodName)
def tearDown(self):
self.client.close()
def test_tasks(self):
task_id = self.index.save_object(F.obj()).raw_responses[0]["taskID"]
task = self.index.get_task(task_id + 1000000)
self.assertEqual(task["status"], "notPublished")
def test_indexing(self):
responses = []
# adding a object with object id
obj1 = F.obj()
responses.append(self.index.save_object(obj1))
# adding a object w/o object id
obj2 = F.obj(object_id=False)
opts = {"autoGenerateObjectIDIfNotExist": True}
responses.append(self.index.save_object(obj2, opts))
# adding two objects with object id
obj3 = F.obj({"_tags": ["algolia"]})
obj4 = F.obj({"_tags": ["algolia"]})
responses.append(self.index.save_objects([obj3, obj4]))
# adding two objects w/o object id
obj5 = F.obj(object_id=False)
obj6 = F.obj(object_id=False)
opts = {"autoGenerateObjectIDIfNotExist": True}
responses.append(self.index.save_objects([obj5, obj6], opts))
object1_id = self.get_object_id(responses[0])
object2_id = self.get_object_id(responses[1])
object3_id = self.get_object_id(responses[2])
object4_id = self.get_object_id(responses[2], 1)
object5_id = self.get_object_id(responses[3])
object6_id = self.get_object_id(responses[3], 1)
# adding 1000 objects with object id
objects = []
for i in range(1000):
object_id = i
objects.append({"objectID": str(object_id), "name": object_id})
self.index._config.batch_size = 100
responses.append(self.index.save_objects(objects))
# waiting for all responses
MultipleResponse(responses).wait()
# Check 6 first records with get_object
self.assertEqual(obj1["name"], self.index.get_object(object1_id)["name"])
self.assertEqual(obj2["name"], self.index.get_object(object2_id)["name"])
self.assertEqual(obj3["name"], self.index.get_object(object3_id)["name"])
self.assertEqual(obj4["name"], self.index.get_object(object4_id)["name"])
self.assertEqual(obj5["name"], self.index.get_object(object5_id)["name"])
self.assertEqual(obj6["name"], self.index.get_object(object6_id)["name"])
# Check 1000 remaining records with get_objects
results = self.index.get_objects(range(1000))["results"]
for obj in results:
self.assertIn(obj, objects)
self.assertEqual(len(results), len(objects))
# Browse all records with browse_objects
results = []
for obj in self.index.browse_objects():
results.append(obj)
for obj in objects:
self.assertIn(obj, results)
for obj in [obj1, obj3, obj4]:
self.assertIn(obj, results)
self.assertEqual(len(results), 1006)
responses = []
# Alter 1 record with partial_update_object
obj1["name"] = "This is an altered name"
responses.append(self.index.partial_update_object(obj1))
# Alter 2 records with partial_update_objects
obj3["bar"] = 40
obj4["foo"] = 30
responses.append(self.index.partial_update_objects([obj3, obj4]))
MultipleResponse(responses).wait()
self.assertEqual(self.index.get_object(object1_id), obj1)
self.assertEqual(self.index.get_object(object3_id), obj3)
self.assertEqual(self.index.get_object(object4_id), obj4)
responses = []
# Delete the 6 first records with delete_object
responses.append(self.index.delete_object(object1_id))
responses.append(self.index.delete_object(object2_id))
responses.append(self.index.delete_by({"tagFilters": ["algolia"]}))
responses.append(self.index.delete_objects([object5_id, object6_id]))
# Delete the 1000 remaining records with delete_objects
responses.append(self.index.clear_objects())
MultipleResponse(responses).wait()
objects = [obj for obj in self.index.browse_objects()]
self.assertEqual(len(objects), 0)
def test_settings(self):
self.index.save_object(F.obj()).wait()
settings = {
"searchableAttributes": [
"attribute1",
"attribute2",
"attribute3",
"ordered(attribute4)",
"unordered(attribute5)",
],
"attributesForFaceting": [
"attribute1",
"filterOnly(attribute2)",
"searchable(attribute3)",
],
"unretrievableAttributes": ["attribute1", "attribute2"],
"attributesToRetrieve": ["attribute3", "attribute4"],
"ranking": [
"asc(attribute1)",
"desc(attribute2)",
"attribute",
"custom",
"exact",
"filters",
"geo",
"proximity",
"typo",
"words",
],
"customRanking": ["asc(attribute1)", "desc(attribute1)"],
"replicas": [self.index.name + "_replica1", self.index.name + "_replica2"],
"maxValuesPerFacet": 100,
"sortFacetValuesBy": "count",
"attributesToHighlight": ["attribute1", "attribute2"],
"attributesToSnippet": ["attribute1:10", "attribute2:8"],
"highlightPreTag": "<strong>",
"highlightPostTag": "</strong>",
"snippetEllipsisText": " and so on.",
"restrictHighlightAndSnippetArrays": True,
"hitsPerPage": 42,
"paginationLimitedTo": 43,
"minWordSizefor1Typo": 2,
"minWordSizefor2Typos": 6,
"typoTolerance": "false",
"allowTyposOnNumericTokens": False,
"ignorePlurals": True,
"disableTypoToleranceOnAttributes": ["attribute1", "attribute2"],
"disableTypoToleranceOnWords": ["word1", "word2"],
"separatorsToIndex": "()[]",
"queryType": "prefixNone",
"removeWordsIfNoResults": "allOptional",
"advancedSyntax": True,
"optionalWords": ["word1", "word2"],
"removeStopWords": True,
"disablePrefixOnAttributes": ["attribute1", "attribute2"],
"disableExactOnAttributes": ["attribute1", "attribute2"],
"exactOnSingleWordQuery": "word",
"enableRules": False,
"numericAttributesForFiltering": ["attribute1", "attribute2"],
"allowCompressionOfIntegerArray": True,
"attributeForDistinct": "attribute1",
"distinct": 2,
"replaceSynonymsInHighlight": False,
"minProximity": 7,
"responseFields": ["hits", "hitsPerPage"],
"maxFacetHits": 100,
"camelCaseAttributes": ["attribute1", "attribute2"],
"decompoundedAttributes": {
"de": ["attribute1", "attribute2"],
"fi": ["attribute3"],
},
"keepDiacriticsOnCharacters": "øé",
"queryLanguages": ["en", "fr"],
"alternativesAsExact": ["ignorePlurals"],
"advancedSyntaxFeatures": ["exactPhrase"],
"userData": {"customUserData": 42.0},
"indexLanguages": ["ja"],
}
self.index.set_settings(settings).wait()
# Because the response settings dict contains the extra version key, we
# also add it to the expected settings dict to prevent the test to fail
# for a missing key.
settings["version"] = 2
# In case something is wrong, we disable the maximum diff length to be
# able to see which setting is incorrect.
self.maxDiff = None
found = self.index.get_settings()
self.assertEqual(
self.index.get_settings(), Unicode.convert_dict_to_unicode(settings),
)
settings["typoTolerance"] = "min"
settings["ignorePlurals"] = ["en", "fr"]
settings["removeStopWords"] = ["en", "fr"]
settings["distinct"] = True
self.index.set_settings(settings).wait()
self.assertEqual(
self.index.get_settings(), Unicode.convert_dict_to_unicode(settings),
)
# To prevent issues in other test, we reset the maximum diff length to
# its default value
self.maxDiff = 80 * 8
def test_search(self):
responses = MultipleResponse()
responses.push(
self.index.save_objects(
[
{
"company": "Algolia",
"name": "Julien Lemoine",
"objectID": "julien-lemoine",
}, # noqa: E501
{
"company": "Algolia",
"name": "Nicolas Dessaigne",
"objectID": "nicolas-dessaigne",
}, # noqa: E501
{"company": "Amazon", "name": "Jeff Bezos"},
{"company": "Apple", "name": "Steve Jobs"},
{"company": "Apple", "name": "Steve Wozniak"},
{"company": "Arista Networks", "name": "Jayshree Ullal"},
{"company": "Google", "name": "Larry Page"},
{"company": "Google", "name": "Rob Pike"},
{"company": "Google", "name": "Serguey Brin"},
{"company": "Microsoft", "name": "Bill Gates"},
{"company": "SpaceX", "name": "Elon Musk"},
{"company": "Tesla", "name": "Elon Musk"},
{"company": "Yahoo", "name": "Marissa Mayer"},
],
{"autoGenerateObjectIDIfNotExist": True},
)
)
responses.push(
self.index.set_settings({"attributesForFaceting": ["searchable(company)"]})
)
responses.wait()
# Perform a search query using search with the query `algolia` and no
# parameter and check that the number of returned hits is equal to 2
result = self.index.search("algolia")
self.assertEqual(result["nbHits"], 2)
self.assertEqual(
SearchIndex.get_object_position(result, "nicolas-dessaigne"), 0
)
self.assertEqual(SearchIndex.get_object_position(result, "julien-lemoine"), 1)
self.assertEqual(SearchIndex.get_object_position(result, ""), -1)
# Call find_object with the following parameters and check that
# no object is found
with self.assertRaises(ObjectNotFoundException):
self.index.find_object(lambda _: False)
# Call find_object with the following parameters and check that
# the first object is returned with a `position=0` and `page=0`
found = self.index.find_object(lambda _: True)
self.assertEqual(found["position"], 0)
self.assertEqual(found["page"], 0)
def callback(obj):
# type: (dict) -> bool
return obj.get("company") == "Apple"
# Call find_object with the following parameters and check that
# no object is found
with self.assertRaises(ObjectNotFoundException):
self.index.find_object(callback, {"query": "algolia"})
# Call find_object with the following parameters and check that
# no object is found
with self.assertRaises(ObjectNotFoundException):
self.index.find_object(
callback, {"query": "", "paginate": False, "hitsPerPage": 5}
)
# Call find_object with the following parameters and check that
# the first object is returned with a `position=0` and `page=2`
found = self.index.find_object(
callback, {"query": "", "paginate": True, "hitsPerPage": 5}
)
self.assertEqual(found["position"], 0)
self.assertEqual(found["page"], 2)
# Perform a search using search with the query `elon` and the
# following parameter and check that the queryID field from
# the response is not empty
result = self.index.search("elon", {"clickAnalytics": True})
self.assertIn("queryID", result)
# Perform a faceted search using search with the query `elon` and the
# following parameters and check that the number of returned hits is
# equal to 1
result = self.index.search(
"elon", {"facets": "*", "facetFilters": "company:tesla"}
)
self.assertEqual(result["nbHits"], 1)
# Perform a filtered search using search with the query `elon` and the
# following parameters and check that the number of returned hits is
# equal to 2
result = self.index.search(
"elon", {"facets": "*", "filters": "(company:tesla OR company:spacex)"}
)
self.assertEqual(result["nbHits"], 2)
result = self.index.search_for_facet_values("company", "a")["facetHits"]
values = list(map(lambda facet: facet["value"], result))
self.assertIn("Algolia", values)
self.assertIn("Amazon", values)
self.assertIn("Apple", values)
self.assertIn("Arista Networks", values)
def test_synonyms(self):
responses = MultipleResponse()
responses.push(
self.index.save_objects(
[
{"console": "Sony PlayStation <PLAYSTATIONVERSION>"},
{"console": "Nintendo Switch"},
{"console": "Nintendo Wii U"},
{"console": "Nintendo Game Boy Advance"},
{"console": "Microsoft Xbox"},
{"console": "Microsoft Xbox 360"},
{"console": "Microsoft Xbox One"},
],
{"autoGenerateObjectIDIfNotExist": True},
)
)
responses.push(
self.index.save_synonym(
F.synonym(
{"synonyms": ["gba", "gameboy advance", "game boy advance"]}, "gba"
)
)
)
synonym1 = {
"objectID": "wii_to_wii_u",
"type": "onewaysynonym",
"input": "wii",
"synonyms": ["wii U"],
}
synonym2 = {
"objectID": "playstation_version_placeholder",
"type": "placeholder",
"placeholder": "<PLAYSTATIONVERSION>",
"replacements": ["1", "One", "2", "3", "4", "4 Pro"],
}
synonym3 = {
"objectID": "ps4",
"type": "altcorrection1",
"word": "ps4",
"corrections": ["playstation4"],
}
synonym4 = {
"objectID": "psone",
"type": "altcorrection2",
"word": "psone",
"corrections": ["playstationone"],
}
responses.push(
self.index.save_synonyms([synonym1, synonym2, synonym3, synonym4])
)
responses.wait()
self.assertEqual(self.index.get_synonym(synonym1["objectID"]), synonym1)
self.assertEqual(self.index.get_synonym(synonym2["objectID"]), synonym2)
self.assertEqual(self.index.get_synonym(synonym3["objectID"]), synonym3)
self.assertEqual(self.index.get_synonym(synonym4["objectID"]), synonym4)
self.assertEqual(self.index.search_synonyms("")["nbHits"], 5)
# Browse all records with browse_objects
results = []
for obj in self.index.browse_synonyms():
results.append(obj)
synonyms = [synonym1, synonym2, synonym3, synonym4]
for synonym in synonyms:
self.assertIn(synonym, results)
self.index.delete_synonym("gba").wait()
# Try to get the synonym with getSynonym with objectID `gba and c
# heck that the synonym does not exist anymore
with self.assertRaises(RequestException) as _:
self.index.get_synonym("gba")
# Clear all the synonyms using clear_synonyms
self.index.clear_synonyms().wait()
# Perform a synonym search using searchSynonyms with an empty query
# and check that the number of returned synonyms is equal to 0
self.assertEqual(self.index.search_synonyms("")["nbHits"], 0)
def test_rules(self):
responses = MultipleResponse()
responses.push(
self.index.save_objects(
[
{"objectID": "iphone_7", "brand": "Apple", "model": "7"},
{"objectID": "iphone_8", "brand": "Apple", "model": "8"},
{"objectID": "iphone_x", "brand": "Apple", "model": "X"},
{"objectID": "one_plus_one", "brand": "OnePlus", "model": "One"},
{"objectID": "one_plus_two", "brand": "OnePlus", "model": "Two"},
]
)
)
responses.push(
self.index.set_settings({"attributesForFaceting": ["brand", "model"]})
)
rule1 = {
"objectID": "brand_automatic_faceting",
"enabled": False,
"condition": {"anchoring": "is", "pattern": "{facet:brand}"},
"consequence": {
"params": {
"automaticFacetFilters": [
{"facet": "brand", "disjunctive": True, "score": 42},
]
}
},
"validity": [
{"from": 1532439300, "until": 1532525700},
{"from": 1532612100, "until": 1532698500},
],
"description": "Automatic apply the faceting on `brand` if a",
}
responses.push(self.index.save_rule(rule1))
rule2 = {
"objectID": "query_edits",
"conditions": [
{"anchoring": "is", "pattern": "mobile phone", "alternatives": True}
],
"consequence": {
"params": {
"query": {
"edits": [
{"type": "remove", "delete": "mobile"},
{"type": "replace", "delete": "phone", "insert": "iphone"},
]
}
}
},
}
rule3 = {
"objectID": "query_promo",
"consequence": {"params": {"filters": "brand:OnePlus"}},
}
rule4 = {
"objectID": "query_promo_only_summer",
"condition": {"context": "summer"},
"consequence": {"params": {"filters": "model:One"}},
}
responses.push(self.index.save_rules([rule2, rule3, rule4]))
responses.wait()
# Should be only the One Plus model One
self.assertEqual(
self.index.search("", {"ruleContexts": ["summer"]})["nbHits"], 1
)
self.assertEqual(
rule_without_metadata(self.index.get_rule(rule1["objectID"])), rule1
)
self.assertEqual(
rule_without_metadata(self.index.get_rule(rule2["objectID"])), rule2
)
self.assertEqual(
rule_without_metadata(self.index.get_rule(rule3["objectID"])), rule3
)
self.assertEqual(
rule_without_metadata(self.index.get_rule(rule4["objectID"])), rule4
)
self.assertEqual(self.index.search_rules("")["nbHits"], 4)
# Browse all records with browse_rules
results = []
for obj in self.index.browse_rules():
if isinstance(obj, dict):
results.append(rule_without_metadata(obj))
rules = [rule1, rule2, rule3, rule4]
for rule in rules:
self.assertIn(rule, results)
self.index.delete_rule(rule1["objectID"]).wait()
# Try to get the first rule with get_rule and check
# that the rule does not exist anymore
with self.assertRaises(RequestException) as _:
self.index.get_rule(rule1["objectID"])
# Clear all the rules using clear_rules
self.index.clear_rules().wait()
# Perform a rule search using search_rule with an empty query
# and check that the number of returned nbHits is equal to 0
self.assertEqual(self.index.search_rules("")["nbHits"], 0)
def test_batching(self):
responses = MultipleResponse()
responses.push(
self.index.save_objects(
[
{"objectID": "one", "key": "value"},
{"objectID": "two", "key": "value"},
{"objectID": "three", "key": "value"},
{"objectID": "four", "key": "value"},
{"objectID": "five", "key": "value"},
]
)
)
responses.push(
self.index.batch(
[
{
"action": "addObject",
"body": {"objectID": "zero", "key": "value"},
},
{"action": "updateObject", "body": {"objectID": "one", "k": "v"}},
{
"action": "partialUpdateObject",
"body": {"objectID": "two", "k": "v"},
},
{
"action": "partialUpdateObject",
"body": {"objectID": "two_bis", "key": "value"},
},
{
"action": "partialUpdateObjectNoCreate",
"body": {"objectID": "three", "k": "v"},
},
{"action": "deleteObject", "body": {"objectID": "four"}},
]
)
)
responses.wait()
objects = [
{"objectID": "zero", "key": "value"},
{"objectID": "one", "k": "v"},
{"objectID": "two", "key": "value", "k": "v"},
{"objectID": "two_bis", "key": "value"},
{"objectID": "three", "key": "value", "k": "v"},
{"objectID": "five", "key": "value"},
]
results = [obj for obj in self.index.browse_objects()]
for obj in objects:
self.assertIn(obj, results)
def test_replacing(self):
responses = MultipleResponse()
responses.push(self.index.save_object({"objectID": "one"}))
responses.push(self.index.save_rule(F.rule(object_id="one")))
responses.push(
self.index.save_synonym(
{"objectID": "one", "type": "synonym", "synonyms": ["one", "two"]}
)
)
responses.wait()
responses.push(self.index.replace_all_objects([{"objectID": "two"}]))
responses.push(
self.index.replace_all_rules(
[
{
"objectID": "two",
"condition": {"anchoring": "is", "pattern": "pattern"},
"consequence": {
"params": {
"query": {
"edits": [{"type": "remove", "delete": "pattern"}]
}
}
},
}
]
)
)
responses.push(
self.index.replace_all_synonyms(
[{"objectID": "two", "type": "synonym", "synonyms": ["one", "two"]}]
)
)
responses.wait()
# Check that record with objectID=`one` does not exist
with self.assertRaises(RequestException) as _:
self.index.get_object("one")
# Check that record with objectID=`two` does exist
self.assertEqual(self.index.get_object("two")["objectID"], "two")
# Check that rule with objectID=`one` does not exist
with self.assertRaises(RequestException) as _:
self.index.get_rule("one")
# Check that rule with objectID=`two` does exist
self.assertEqual(self.index.get_rule("two")["objectID"], "two")
# Check that synonym with objectID=`one` does not exist
with self.assertRaises(RequestException) as _:
self.index.get_synonym("one")
# Check that synonym with objectID="two" does exist using getSynonym
self.assertEqual(self.index.get_synonym("two")["objectID"], "two")
def test_safe_replacing(self):
# Adds dummy object
self.index.save_object(F.obj()).wait()
# Calls replace all objects with an object without
# object id, and with the safe parameter
self.index.replace_all_objects(
[{"name": "two"}], {"autoGenerateObjectIDIfNotExist": True, "safe": True}
)
response = self.index.search("")
self.assertEqual(response["nbHits"], 1)
hit = response["hits"][0]
self.assertEqual(hit["name"], "two")
self.assertIn("objectID", hit)
self.assertIn("_highlightResult", hit)
def test_exists(self):
self.assertFalse(self.index.exists())
self.index.save_object(F.obj()).wait()
self.assertTrue(self.index.exists())
self.index.delete().wait()
self.assertFalse(self.index.exists())
def test_url_encoding(self):
objects = [
# unicode
"中文",
# bytestring
"celery-task-meta-c4f1201f-eb7b-41d5-9318-a75a8cfbdaa0",
b"celery-task-meta-c4f1201f-eb7b-41d5-9318-a75a8cfbdaa0",
# misc
"àlgol?a",
b"\xe4\xb8\xad\xe6\x96\x87",
"$^^`",
]
if sys.version_info >= (3, 0):
objects.append("中文".encode("utf-8"))
self.index.save_objects(
[{"objectID": object_id} for object_id in objects]
).wait()
for obj in objects:
self.index.get_object(obj)
def get_object_id(self, indexing_response, index=0):
return indexing_response.raw_responses[0]["objectIDs"][index]
|
{
"pile_set_name": "Github"
}
|
every once in a while you see a film that is so effective in delivering the goods that it is easy to forget , and forgive , its glaring imperfections .
such is the case with ? good will hunting ? , a subtle character study about a socially inept mathematics genius who struggles to find his path in life .
despite some serious character problems , this is still a very good film .
you probably know about the plot so i ? ll make it quick .
will hunting ( damon ) is a janitor at m . i . t . , he ? s really smarter than einstein but nobody knows it .
he likes to go out with his friend chuckie ( affleck ) and their other working-class buddies and drink beer .
he ? s good-looking , charismatic , and witty but has a terrible time with authority and stiff college folk .
after getting into a tiff with the law , a distinguished professor ( skarsg ? rd ) discovers will ? s genius and offers him a chance to clean up his record and avoid jail time , at a price : he must attend weekly sessions with a therapist and work on various mathematical problems ( that have stumped the academic elite ) with the professor .
after outsmarting and scaring the hell out of a couple of different psychologists he meets his match when hooked up with a once-promising therapist named sean maguire ( williams ) who has his own problems .
in the meantime will meets a british medical school student ( driver ) and they begin to fall in love .
the story starts out well enough and is a pretty original basis for a film .
even though we ? ve seen movies about misunderstood , erratic prodigies before ( ? shine ? ring a bell ? ) , the script here creates a complex narrative that doesn ? t just focus solely on one character .
alas though , this is not a perfect film , as much as you feel like it could ? ve been while watching it .
the one real problem i had with it is the unrealistic nature of the main character .
is it possible for a lowly janitor to be this intelligent ?
of course .
is it possible for him to be estranged from any deep , human relationships ?
usually , yes .
but , is it possible for him to also be so handsome , funny , quick with the tongue , and city-street tough ?
not very likely .
come on , usually these guys are total nerds who can ? t even buy their own shirts , much less talk down a harvard student in a hip pub while picking up phone numbers from pretty med . -school
girls .
will is just a little too perfect , and in order to accept the character your disbelief suspension needs to be in excellent working condition .
the heavy-handed , anti-war statement made by will at a government job interview late in the film is also boorish , overlong , pompous , and completely unnecessary .
all this sounds pretty bad , but the film somehow makes up for it in other ways .
damon ? s acting overshadows the fact that the character is slightly unbelievable , his performance is truly extraordinary .
which leads me to the really good part of the review .
the strength of this movie can be summed up in one single word : acting .
i can ? t recall seeing a film recently that was so well-acted from top to bottom .
from minnie driver ? s frustrated lover to ben affleck ? s laid-back best friend , and all the small roles in between , the performances are magnificent .
robin williams ? skill is a given as a bereaved psychologist who could ? ve had a legendary career but was knocked off the path somewhere down the line .
the real gem though is stellan skarsg ? rd ? s turn as professor lambeau , an award-winning mathematician who feels reduced in comparison to a younger , smarter will hunting .
the scenes between williams and skarsg ? rd , as two old college pals who ? ve been brought back together by this enigmatic kid , display some of the best acting i ? ve ever seen .
when i say delivering the goods , this is what i ? m talking about .
watching these two work is what going to see movies is all about .
gus van sant ? s ( to die for , drugstore cowboy ) cold , urban direction is right on , as well as danny elfman ? s sauntering musical score .
i highly recommend ? good will hunting ? .
despite its faults , it is still an intriguing and fascinating film and you are not likely to see a better acted one this year .
|
{
"pile_set_name": "Github"
}
|
category:刑事
title:侯光辉故意杀人死刑复核刑事裁定书
publictime:2015-12-21
content:中华人民共和国最高人民法院
刑事裁定书
被告人侯光辉,男,汉族,1975年6月10日出生于四川省三台县,初中文化,农民,住三台县xx镇xxxx村x组x号。2013年6月14日被逮捕。现在押。
四川省绵阳市中级人民法院审理绵阳市人民检察院指控被告人侯光辉犯故意杀人罪一案,于2014年2月20日以(2013)绵刑初字第38号刑事附带民事判决,认定被告人侯光辉犯故意杀人罪,判处死刑,剥夺政治权利终身。宣判后,侯光辉提出上诉。四川省高级人民法院经依法开庭审理,于2014年7月15日以(2014)川刑终字第354号刑事裁定,驳回上诉,维持原判,并依法报请本院核准。本院依法组成合议庭,对本案进行了复核,依法讯问了被告人。现已复核终结。
经复核确认:被告人侯光辉在上海市与魏某同居期间发生矛盾,魏某于2013年5月24日回到四川省三台县老家,侯光辉于次日回到三台县。魏某以回家与丈夫离婚为由离开侯光辉,侯光辉寻找魏某无果后,于当月29日购买两把尖刀并用胶带固定在大腿上,来到三台县xx镇xxxx村xx组魏某的娘家寻找魏某,并与魏某的父亲魏某甲(被害人,殁年64岁)发生口角。当晚,侯光辉与魏某甲同住。次日1时许,侯光辉取下尖刀朝魏某甲腹部、头颈部猛刺;魏某甲之妻林某某(被害人,殁年65岁)闻声起床查看、呼救,侯光辉持尖刀朝林某某颈、胸、腹等部位猛刺;魏某甲的外孙女张某某(被害人,殁年9岁)在床边呼救,侯光辉又持尖刀朝张某某颈部等部位猛刺,致魏某甲、林某某、张某某三人呼吸循环衰竭死亡。侯光辉逃离现场后于当日4时许向公安机关投案自首。
上述事实,有第一审、第二审开庭审理中经质证确认的根据被告人侯光辉的供述和指认提取的尖刀、在被害人魏某甲家中提取的侯光辉扔弃在现场的白色短袖衬衣、运动鞋以及在侯光辉家中提取的侯光辉作案时所穿的深蓝色休闲裤等物证,证人侯某甲、侯某乙、魏某、魏某乙、王某某、何某某、魏某丙等的证言,证明尖刀、白色短袖衬衣、运动鞋、深蓝色休闲裤上留有被害人血迹以及白色短袖衬衣、运动鞋上留有侯光辉的DNA分型的DNA鉴定意见、尸体鉴定意见、活体鉴定意见,现场勘验、检查笔录和侯光辉的归案证明等证据证实。被告人侯光辉亦供认。足以认定。
本院认为,被告人侯光辉购买尖刀,持尖刀故意杀死他人,其行为构成故意杀人罪。侯光辉因与非法同居的女友魏某发生矛盾而迁怒于无辜,杀死三人,犯罪手段残忍,后果特别严重,罪行极其严重。虽有投案自首的情节,亦不足以从轻处罚。第一审判决、第二审裁定认定的事实清楚,证据确实、充分,定罪准确,量刑适当。审判程序合法。依照《中华人民共和国刑事诉讼法》第二百三十五条、第二百三十九条和《最高人民法院关于适用〈中华人民共和国刑事诉讼法〉的解释》第三百五十条第(一)项的规定,裁定如下:
核准四川省高级人民法院(2014)川刑终字第354号维持第一审以故意杀人罪判处被告人侯光辉死刑,剥夺政治权利终身的刑事裁定。
本裁定自宣告之日起发生法律效力。
审判长李广海
代理审判员彭锐
代理审判员曹吴清
二〇一四年十二月二十六日
书记员杜健康
|
{
"pile_set_name": "Github"
}
|
public class DeadBranchElimination1 {
public static final boolean Debug = false;
public static final boolean Trace = true;
public static void run() {
if (Debug) {
debug();
}
if (Trace) {
trace();
}
if (Debug) {
debug();
} else {
trace();
}
if (!Trace) {
debug2();
}
if (!Debug) {
trace2();
}
}
private static void debug() {
}
private static void trace() {
}
private static void debug2() {
}
private static void trace2() {
}
}
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0"?>
<!--
/**
* Copyright © Magento, Inc. All rights reserved.
* See COPYING.txt for license details.
*/
-->
<page xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" layout="1column" xsi:noNamespaceSchemaLocation="urn:magento:framework:View/Layout/etc/page_configuration.xsd">
<body>
<referenceBlock name="checkout.success" template="Magento_InventoryInStorePickupFrontend::success.phtml">
<arguments>
<argument name="is_order_store_pickup" xsi:type="object">Magento\InventoryInStorePickupFrontend\Block\Checkout\Onepage\Success\IsOrderStorePickup</argument>
</arguments>
</referenceBlock>
</body>
</page>
|
{
"pile_set_name": "Github"
}
|
.slick-pager {
width: 100%;
height: 26px;
border: 1px solid gray;
border-top: 0;
background: url('../images/header-columns-bg.gif') repeat-x center bottom;
vertical-align: middle;
}
.slick-pager .slick-pager-status {
display: inline-block;
padding: 6px;
}
.slick-pager .ui-icon-container {
display: inline-block;
margin: 2px;
border-color: gray;
}
.slick-pager .slick-pager-nav {
display: inline-block;
float: left;
padding: 2px;
}
.slick-pager .slick-pager-settings {
display: block;
float: right;
padding: 2px;
}
.slick-pager .slick-pager-settings * {
vertical-align: middle;
}
.slick-pager .slick-pager-settings a {
padding: 2px;
text-decoration: underline;
cursor: pointer;
}
|
{
"pile_set_name": "Github"
}
|
/* Copyright (C) 2012-2020 by László Nagy
This file is part of Bear.
Bear is a tool to generate compilation database for clang tooling.
Bear is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Bear is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#pragma once
namespace el::env {
constexpr char KEY_REPORTER[] = "INTERCEPT_REPORT_COMMAND";
constexpr char KEY_DESTINATION[] = "INTERCEPT_REPORT_DESTINATION";
constexpr char KEY_VERBOSE[] = "INTERCEPT_VERBOSE";
}
|
{
"pile_set_name": "Github"
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.web.common.ui.cssprep;
import java.awt.EventQueue;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.JComponent;
import javax.swing.SwingUtilities;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import org.netbeans.modules.web.common.ui.api.CssPreprocessorUI;
import org.netbeans.modules.web.common.ui.spi.CssPreprocessorUIImplementation;
import org.netbeans.spi.options.OptionsPanelController;
import org.openide.util.HelpCtx;
import org.openide.util.Lookup;
import org.openide.util.Parameters;
public final class CssPrepOptionsPanelController extends OptionsPanelController implements ChangeListener {
private static final Logger LOGGER = Logger.getLogger(CssPrepOptionsPanelController.class.getName());
private final PropertyChangeSupport propertyChangeSupport = new PropertyChangeSupport(this);
private final List<CssPreprocessorUIImplementation.Options> allOptions = new CopyOnWriteArrayList<>();
// @GuardedBy("EDT")
private volatile CssPrepOptionsPanel cssPrepOptionsPanel = null;
private volatile boolean changed = false;
@Override
public void update() {
assert EventQueue.isDispatchThread();
for (CssPreprocessorUIImplementation.Options options : allOptions) {
options.update();
}
changed = false;
}
@Override
public void applyChanges() {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
for (CssPreprocessorUIImplementation.Options options : allOptions) {
assert options.isValid() : "Saving invalid options: " + options.getDisplayName() + " (error: " + options.getErrorMessage() + ")";
try {
options.save();
} catch (IOException ex) {
LOGGER.log(Level.WARNING, "Error while saving CSS preprocessor: " + options.getDisplayName(), ex);
}
}
changed = false;
}
});
}
@Override
public void cancel() {
}
@Override
public boolean isValid() {
assert EventQueue.isDispatchThread();
CssPrepOptionsPanel panel = getCssPrepOptionsPanel();
String warning = null;
for (CssPreprocessorUIImplementation.Options options : allOptions) {
if (!options.isValid()) {
String error = options.getErrorMessage();
Parameters.notNull("error", error); // NOI18N
panel.setError(error);
return false;
}
if (warning == null) {
warning = options.getWarningMessage();
}
}
if (warning != null) {
panel.setWarning(warning);
} else {
// everything ok
panel.setError(" "); // NOI18N
}
return true;
}
@Override
public boolean isChanged() {
boolean isChanged = false;
for (CssPreprocessorUIImplementation.Options options : allOptions) {
isChanged |= options.changed();
}
return isChanged;
}
@Override
public JComponent getComponent(Lookup masterLookup) {
assert EventQueue.isDispatchThread();
return getCssPrepOptionsPanel();
}
@Override
public HelpCtx getHelpCtx() {
return new HelpCtx("org.netbeans.modules.web.common.cssprep.CssPrepOptionsPanelController"); // NOI18N
}
@Override
public void addPropertyChangeListener(PropertyChangeListener l) {
propertyChangeSupport.addPropertyChangeListener(l);
}
@Override
public void removePropertyChangeListener(PropertyChangeListener l) {
propertyChangeSupport.removePropertyChangeListener(l);
}
@Override
public void stateChanged(ChangeEvent e) {
if (!changed) {
changed = true;
propertyChangeSupport.firePropertyChange(OptionsPanelController.PROP_CHANGED, false, true);
}
propertyChangeSupport.firePropertyChange(OptionsPanelController.PROP_VALID, null, null);
}
private CssPrepOptionsPanel getCssPrepOptionsPanel() {
assert EventQueue.isDispatchThread();
if (cssPrepOptionsPanel == null) {
for (CssPreprocessorUI preprocessor : CssPreprocessorsAccessor.getDefault().getPreprocessors()) {
CssPreprocessorUIImplementation.Options options = CssPreprocessorAccessor.getDefault().createOptions(preprocessor);
if (options != null) {
allOptions.add(options);
}
}
cssPrepOptionsPanel = new CssPrepOptionsPanel(new ArrayList<>(allOptions));
for (CssPreprocessorUIImplementation.Options options : allOptions) {
options.addChangeListener(this);
}
}
return cssPrepOptionsPanel;
}
}
|
{
"pile_set_name": "Github"
}
|
import re
import gettext
_ = gettext.gettext
from BeautifulSoup import BeautifulSoup, Declaration, Comment, Tag
from html5lib.constants import namespaces
import _base
class TreeWalker(_base.NonRecursiveTreeWalker):
doctype_regexp = re.compile(
r'DOCTYPE\s+(?P<name>[^\s]*)(\s*PUBLIC\s*"(?P<publicId>.*)"\s*"(?P<systemId1>.*)"|\s*SYSTEM\s*"(?P<systemId2>.*)")?')
def getNodeDetails(self, node):
if isinstance(node, BeautifulSoup): # Document or DocumentFragment
return (_base.DOCUMENT,)
elif isinstance(node, Declaration): # DocumentType
string = unicode(node.string)
#Slice needed to remove markup added during unicode conversion,
#but only in some versions of BeautifulSoup/Python
if string.startswith('<!') and string.endswith('>'):
string = string[2:-1]
m = self.doctype_regexp.match(string)
#This regexp approach seems wrong and fragile
#but beautiful soup stores the doctype as a single thing and we want the seperate bits
#It should work as long as the tree is created by html5lib itself but may be wrong if it's
#been modified at all
#We could just feed to it a html5lib tokenizer, I guess...
assert m is not None, "DOCTYPE did not match expected format"
name = m.group('name')
publicId = m.group('publicId')
if publicId is not None:
systemId = m.group('systemId1')
else:
systemId = m.group('systemId2')
return _base.DOCTYPE, name, publicId or "", systemId or ""
elif isinstance(node, Comment):
string = unicode(node.string)
if string.startswith('<!--') and string.endswith('-->'):
string = string[4:-3]
return _base.COMMENT, string
elif isinstance(node, unicode): # TextNode
return _base.TEXT, node
elif isinstance(node, Tag): # Element
return (_base.ELEMENT, namespaces["html"], node.name,
dict(node.attrs).items(), node.contents)
else:
return _base.UNKNOWN, node.__class__.__name__
def getFirstChild(self, node):
return node.contents[0]
def getNextSibling(self, node):
return node.nextSibling
def getParentNode(self, node):
return node.parent
|
{
"pile_set_name": "Github"
}
|
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "content/browser/compositor/image_transport_factory.h"
#include "base/run_loop.h"
#include "build/build_config.h"
#include "components/viz/common/gpu/context_lost_observer.h"
#include "components/viz/common/gpu/context_provider.h"
#include "content/browser/gpu/gpu_data_manager_impl.h"
#include "content/public/test/browser_test.h"
#include "content/public/test/content_browser_test.h"
#include "gpu/GLES2/gl2extchromium.h"
#include "gpu/command_buffer/client/gles2_interface.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "ui/compositor/compositor.h"
namespace content {
namespace {
using ImageTransportFactoryBrowserTest = ContentBrowserTest;
class MockContextLostObserver : public viz::ContextLostObserver {
public:
MOCK_METHOD0(OnContextLost, void());
};
// Flaky on ChromeOS: crbug.com/394083
#if defined(OS_CHROMEOS)
#define MAYBE_TestLostContext DISABLED_TestLostContext
#else
#define MAYBE_TestLostContext TestLostContext
#endif
// Checks that upon context loss the observer is notified.
IN_PROC_BROWSER_TEST_F(ImageTransportFactoryBrowserTest,
MAYBE_TestLostContext) {
ImageTransportFactory* factory = ImageTransportFactory::GetInstance();
// This test doesn't make sense in software compositing mode.
if (GpuDataManagerImpl::GetInstance()->IsGpuCompositingDisabled())
return;
scoped_refptr<viz::ContextProvider> context_provider =
factory->GetContextFactory()->SharedMainThreadContextProvider();
MockContextLostObserver observer;
context_provider->AddObserver(&observer);
base::RunLoop run_loop;
EXPECT_CALL(observer, OnContextLost())
.WillOnce(testing::Invoke(&run_loop, &base::RunLoop::Quit));
gpu::gles2::GLES2Interface* gl = context_provider->ContextGL();
gl->LoseContextCHROMIUM(GL_GUILTY_CONTEXT_RESET_ARB,
GL_INNOCENT_CONTEXT_RESET_ARB);
// We have to flush to make sure that the client side gets a chance to notice
// the context is gone.
gl->Flush();
run_loop.Run();
context_provider->RemoveObserver(&observer);
}
} // anonymous namespace
} // namespace content
|
{
"pile_set_name": "Github"
}
|
// ==========================================================================
// SeqAn - The Library for Sequence Analysis
// ==========================================================================
// Copyright (c) 2006-2016, Knut Reinert, FU Berlin
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of Knut Reinert or the FU Berlin nor the names of
// its contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL KNUT REINERT OR THE FU BERLIN BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
// OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
// DAMAGE.
//
// ==========================================================================
#ifndef SEQAN_HEADER_GRAPH_ITERATOR_VERTEX_H
#define SEQAN_HEADER_GRAPH_ITERATOR_VERTEX_H
namespace seqan
{
//////////////////////////////////////////////////////////////////////////////
// Graph VertexIterator
//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
/*!
* @class VertexIterator
* @extends Iter
* @brief Vertex iterator for @link Graph @endlink.
*
* @signature Iterator<TGraph, VertexIterator>::Type;
* @signature template <typename TGraph, typename TSpec>
* class Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >;
*
* @tparam TGraph The graph to iterate the edges of.
*
* The first signature is the signature of the corresponding @link ContainerConcept#Iterator graph's Iterator @endlink
* metafunction call. The second call is the internal definition of the type. You should always get this type using
* the metafunction call from the first signature.
*
*
* @fn VertexIterator::VertexIterator
* @brief Constructor
*
* @signature Iter::Iter();
* @signature Iter::Iter(iter);
* @signature Iter::Iter(graph);
*
* @param[in] iter Other OutEdgeIterator to copy from.
* @param[in] graph The @link Graph @endlink to iterate vertices of.
*/
template<typename TGraph, typename TSpec>
class Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >
{
public:
typedef typename VertexDescriptor<TGraph>::Type TVertexDescriptor_;
TGraph const* data_host;
TVertexDescriptor_ data_pos;
Iter()
{
}
Iter(TGraph const& _graph) :
data_host(&_graph),
data_pos(getIdLowerBound(_getVertexIdManager(*data_host)))
{
}
Iter(Iter const& _iter) :
data_host(_iter.data_host),
data_pos(_iter.data_pos)
{
}
~Iter() {
}
Iter const& operator = (Iter const & _other) {
if (this == &_other) return *this;
data_host = _other.data_host;
data_pos = _other.data_pos;
return *this;
}
//____________________________________________________________________________
};
//////////////////////////////////////////////////////////////////////////////
// Graph InternalVertexIterator - Metafunctions
//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
template<typename TGraph>
struct Iterator<TGraph, VertexIterator>
{
typedef Iter<TGraph, GraphIterator<InternalVertexIterator<VertexIterator> > > Type;
};
template<typename TGraph>
struct Iterator<TGraph const, VertexIterator>
{
typedef Iter<TGraph const, GraphIterator<InternalVertexIterator<VertexIterator> > > Type;
};
//////////////////////////////////////////////////////////////////////////////
template<typename TGraph, typename TIteratorSpec>
struct Value<Iter<TGraph, GraphIterator<InternalVertexIterator<TIteratorSpec> > > >
{
typedef typename VertexDescriptor<TGraph>::Type Type;
};
template<typename TGraph, typename TIteratorSpec>
struct Value<Iter<TGraph const, GraphIterator<InternalVertexIterator<TIteratorSpec> > > >
{
typedef typename VertexDescriptor<TGraph const>::Type Type;
};
//////////////////////////////////////////////////////////////////////////////
template<typename TGraph, typename TIteratorSpec>
struct Reference<Iter<TGraph, GraphIterator<InternalVertexIterator<TIteratorSpec> > > >
{
typedef typename Value<Iter<TGraph, GraphIterator<InternalVertexIterator<TIteratorSpec> > > >::Type& Type;
};
template<typename TGraph, typename TIteratorSpec>
struct Reference<Iter<TGraph const, GraphIterator<InternalVertexIterator<TIteratorSpec> > > >
{
typedef typename Value<Iter<TGraph const, GraphIterator<InternalVertexIterator<TIteratorSpec> > > >::Type& Type;
};
//////////////////////////////////////////////////////////////////////////////
template<typename TGraph, typename TIteratorSpec>
struct GetValue<Iter<TGraph, GraphIterator<InternalVertexIterator<TIteratorSpec> > > >
{
typedef typename Value<Iter<TGraph, GraphIterator<InternalVertexIterator<TIteratorSpec> > > >::Type Type;
};
template<typename TGraph, typename TIteratorSpec>
struct GetValue<Iter<TGraph const, GraphIterator<InternalVertexIterator<TIteratorSpec> > > >
{
typedef typename Value<Iter<TGraph const, GraphIterator<InternalVertexIterator<TIteratorSpec> > > >::Type Type;
};
//////////////////////////////////////////////////////////////////////////////
template<typename TGraph, typename TIteratorSpec>
struct Spec<Iter<TGraph, GraphIterator<InternalVertexIterator<TIteratorSpec> > > >
{
typedef TIteratorSpec Type;
};
template<typename TGraph, typename TIteratorSpec>
struct Spec<Iter<TGraph const, GraphIterator<InternalVertexIterator<TIteratorSpec> > > >
{
typedef TIteratorSpec Type;
};
//////////////////////////////////////////////////////////////////////////////
// Graph InternalVertexIterator - FUNCTIONS
//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
template<typename TGraph, typename TSpec>
inline typename GetValue<Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > > >::Type
getValue(Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >& it)
{
return it.data_pos;
}
//////////////////////////////////////////////////////////////////////////////
template<typename TGraph, typename TSpec>
inline typename Reference<Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > > >::Type
value(Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >& it)
{
return it.data_pos;
}
//////////////////////////////////////////////////////////////////////////////
template<typename TGraph, typename TSpec>
inline typename Reference<Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > > >::Type
operator * (Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >& it)
{
return value(it);
}
//////////////////////////////////////////////////////////////////////////////
// TODO(holtgrew): Add GraphIterator class that has hostGraph() function?
template<typename TGraph, typename TSpec>
inline typename Host<Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > > >::Type const&
hostGraph(Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >& it)
{
return *it.data_host;
}
//////////////////////////////////////////////////////////////////////////////
template<typename TGraph, typename TSpec>
inline bool
atBegin(Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >& it)
{
return (getValue(it) == getIdLowerBound(_getVertexIdManager(*it.data_host)));
}
//////////////////////////////////////////////////////////////////////////////
template<typename TGraph, typename TSpec>
inline void
goBegin(Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >& it)
{
it.data_pos = getIdLowerBound(_getVertexIdManager(*it.data_host));
}
//////////////////////////////////////////////////////////////////////////////
template<typename TGraph, typename TSpec>
inline bool
atEnd(Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >& it)
{
return (getValue(it) >= getIdUpperBound(_getVertexIdManager(*it.data_host)));
}
//////////////////////////////////////////////////////////////////////////////
template<typename TGraph, typename TSpec>
inline void
goEnd(Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >& it)
{
it.data_pos = getIdUpperBound(_getVertexIdManager(*it.data_host));
}
//////////////////////////////////////////////////////////////////////////////
template<typename TGraph, typename TSpec>
inline void
goNext(Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >& it)
{
if (!atEnd(it)) ++it.data_pos;
while ((!atEnd(it)) && (!idInUse(_getVertexIdManager(*it.data_host), it.data_pos))) ++it.data_pos;
}
//////////////////////////////////////////////////////////////////////////////
template<typename TGraph, typename TSpec>
inline Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >&
operator ++(Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >& it)
{
goNext(it);
return it;
}
//////////////////////////////////////////////////////////////////////////////
template<typename TGraph, typename TSpec>
inline Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >
operator ++(Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >& it, int)
{
Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > > ret = it;
goNext(it);
return ret;
}
//////////////////////////////////////////////////////////////////////////////
template<typename TGraph, typename TSpec>
inline void
goPrevious(Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >& it)
{
if (!atBegin(it)) --it.data_pos;
while ((!atBegin(it)) && (!idInUse(_getVertexIdManager(*it.data_host), it.data_pos))) --it.data_pos;
}
//////////////////////////////////////////////////////////////////////////////
template<typename TGraph, typename TSpec>
inline Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >&
operator --(Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >& it)
{
goPrevious(it);
return it;
}
//////////////////////////////////////////////////////////////////////////////
template<typename TGraph, typename TSpec>
inline Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >
operator --(Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >& it, int)
{
Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > > ret = it;
goPrevious(it);
return ret;
}
//////////////////////////////////////////////////////////////////////////////
template<typename TGraph, typename TSpec>
inline bool
operator ==(Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >& it1,
Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >& it2)
{
return (it1.data_pos==it2.data_pos);
}
//////////////////////////////////////////////////////////////////////////////
template<typename TGraph, typename TSpec>
inline bool
operator !=(Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >& it1,
Iter<TGraph, GraphIterator<InternalVertexIterator<TSpec> > >& it2)
{
return (it1.data_pos!=it2.data_pos);
}
}// namespace seqan
#endif //#ifndef SEQAN_HEADER_...
|
{
"pile_set_name": "Github"
}
|
---
archs: [ armv7, armv7s, arm64, arm64e ]
platform: ios
install-name: /System/Library/PrivateFrameworks/HealthRecordServices.framework/HealthRecordServices
current-version: 1
compatibility-version: 1
exports:
- archs: [ armv7, armv7s, arm64, arm64e ]
symbols: [ _HDCIErrorDomain,
_HDHRSClinicalItemPropertyKeyExtractionFailureInfo,
_HDHealthRecordsXPCServiceInterface,
_HRSAccumulatedIngestionErrorDomain,
_HRSAccumulatedIngestionErrorUserInfoKeyAuthorizationFailures,
_HRSAccumulatedIngestionErrorUserInfoKeyOtherErrors,
_HRSAccumulatedIngestionErrorUserInfoKeyResourceFetchFailures,
_HRSAccumulatedIngestionErrorUserInfoKeyUnsatisfiedConditions,
_HRSPrivateErrorDomain,
_HealthRecordServicesVersionNumber,
_HealthRecordServicesVersionString,
_NSStringFromHDClinicalAnalyticsExtractionFailureCode ]
objc-classes: [ _HDClinicalDataCollectionExtractionResult,
_HDExtractionRequest, _HDExtractionResult,
_HDExtractionResultItem, _HDFHIRJSONObject,
_HDFHIRResourceData, _HDFHIRResourceLastSeenObject,
_HDFHIRResourceObject, _HDHRSExtractionFailureInfo,
_HDHRSExtractionFailureRecord,
_HDHRSOriginInformation,
_HDHealthRecordsXPCServiceClient,
_HDOriginalFHIRResourceObject,
_HDRedactedFHIRResourceObject,
_HDReferenceExtractionRequest,
_HDReferenceExtractionResult,
_HDUnresolvedReferenceSet ]
...
|
{
"pile_set_name": "Github"
}
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_65) on Tue Sep 30 00:45:13 PDT 2014 -->
<meta http-equiv="Content-Type" content="text/html" charset="UTF-8">
<title>Uses of Class org.glyptodon.guacamole.properties.GuacamoleProperties (guacamole-ext 0.9.3 API)</title>
<meta name="date" content="2014-09-30">
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.glyptodon.guacamole.properties.GuacamoleProperties (guacamole-ext 0.9.3 API)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../org/glyptodon/guacamole/properties/GuacamoleProperties.html" title="class in org.glyptodon.guacamole.properties">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/glyptodon/guacamole/properties/class-use/GuacamoleProperties.html" target="_top">Frames</a></li>
<li><a href="GuacamoleProperties.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class org.glyptodon.guacamole.properties.GuacamoleProperties" class="title">Uses of Class<br>org.glyptodon.guacamole.properties.GuacamoleProperties</h2>
</div>
<div class="classUseContainer">No usage of org.glyptodon.guacamole.properties.GuacamoleProperties</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../org/glyptodon/guacamole/properties/GuacamoleProperties.html" title="class in org.glyptodon.guacamole.properties">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/glyptodon/guacamole/properties/class-use/GuacamoleProperties.html" target="_top">Frames</a></li>
<li><a href="GuacamoleProperties.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2014. All rights reserved.</small></p>
<!-- Google Analytics -->
<script type="text/javascript">
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','//www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-75289145-1', 'auto');
ga('send', 'pageview');
</script>
<!-- End Google Analytics -->
</body>
</html>
|
{
"pile_set_name": "Github"
}
|
2017.4:
* Version 6.2 (Rev. 5)
* General: Updated ILA to handle XDC warnings
2017.3:
* Version 6.2 (Rev. 4)
* Added new virtexupluxHBM device support
2017.2:
* Version 6.2 (Rev. 3)
* Added new AZYNQUPLUS device support
2017.1:
* Version 6.2 (Rev. 2)
* Updated ILA and Debug Hub IPs to handle CDC warnings
* Revision change in one or more subcores
2016.4:
* Version 6.2 (Rev. 1)
* Updated ILA and Debug Hub IPs to handle CDC warnings
* Revision change in one or more subcores
2016.3:
* Version 6.2
* Updated DRC to set individual probe MU count value based on all_probe_same_mu_cnt parameter
* Source HDL files are concatenated into a single file to speed up synthesis and simulation. No changes required by the user
* Revision change in one or more subcores
2016.2:
* Version 6.1
* No changes
2016.1:
* Version 6.1
* Updated the IP to support 2 Windows & 1 Sample count configuration
* Number of comparator increased from 1 to 16 in basic mode and 4 to 16 in advanced mode
* Updated probe data width register
2015.4.2:
* Version 6.0 (Rev. 1)
* No changes
2015.4.1:
* Version 6.0 (Rev. 1)
* No changes
2015.4:
* Version 6.0 (Rev. 1)
* No change
2015.3:
* Version 6.0
* Fixed Timing10 DRC violations in ILA IP.
* IP revision number added to HDL module, library, and include file names, to support designs with both locked and upgraded IP instances
2015.2.1:
* Version 5.1 (Rev. 1)
* No changes
2015.2:
* Version 5.1 (Rev. 1)
* Updated IP XDC constraints to fix Partial False path scenario in ILA when operated in mulit clock domain
* Updated IP XDC constraints to fix critical warnings in High Speed Design Debugging mode
2015.1:
* Version 5.1
* Fixed example design placer issue with pin location constraints for SVD packages
2014.4.1:
* Version 5.0 (Rev. 2)
* Updated example XDC pin location constraints for new devices
2014.4:
* Version 5.0 (Rev. 1)
* Encrypted source files are concatenated together to reduce the number of files and to reduce simulator compile time
2014.3:
* Version 5.0
* Added AXI4 Stream monitor support. New parameter option AXI4S added to C_SLOT_0_AXI_PROTOCOL
* Four new user parameters added to support AXI4 Stream. These are C_SLOT_0_AXIS_TDATA_WIDTH, C_SLOT_0_AXIS_TID_WIDTH, C_SLOT_0_AXIS_TUSER_WIDTH, C_SLOT_0_AXIS_TID_WIDTH
* Updated ILA IP to use new helper libraries (ltlib_v1_0 & xsdbs_v1_0)
* Changed C_NNUM_MONITOR_SLOTS field to read only as ILA supported only interface in AXI mode
2014.2:
* Version 4.0 (Rev. 1)
* Fixed TIMING DRC violations, added ASYNC_REG property on the register which has double synchronizer for CDC paths
* Fixed re-execution of First state when ila is used in advanced trigger mode
* Reduced number of unused ports visible to users for AXI mode when AXI4LITE protocol is selected
2014.1:
* Version 4.0
* Updated the IP to support new DBG_HUB stitcher algorithm
* Updated ILA AXI monitor feature to the IP
* Internal device family name change, no functional changes
2013.4:
* Version 3.0 (Rev. 1)
* Kintex UltraScale Pre-Production support
2013.3:
* Version 3.0
* All ports changed to lower case
* Added ILA Advanced Trigger Features
2013.2:
* Version 2.1
* Improved support for multiple instances
* Added C_TRIGOUT_EN parameter to support cross trigring
* Added C_TRIGIN_EN parameter to support cross trigring
2013.1:
* Version 2.0
* Native Vivado Release
(c) Copyright 2000 - 2018 Xilinx, Inc. All rights reserved.
This file contains confidential and proprietary information
of Xilinx, Inc. and is protected under U.S. and
international copyright and other intellectual property
laws.
DISCLAIMER
This disclaimer is not a license and does not grant any
rights to the materials distributed herewith. Except as
otherwise provided in a valid license issued to you by
Xilinx, and to the maximum extent permitted by applicable
law: (1) THESE MATERIALS ARE MADE AVAILABLE "AS IS" AND
WITH ALL FAULTS, AND XILINX HEREBY DISCLAIMS ALL WARRANTIES
AND CONDITIONS, EXPRESS, IMPLIED, OR STATUTORY, INCLUDING
BUT NOT LIMITED TO WARRANTIES OF MERCHANTABILITY, NON-
INFRINGEMENT, OR FITNESS FOR ANY PARTICULAR PURPOSE; and
(2) Xilinx shall not be liable (whether in contract or tort,
including negligence, or under any other theory of
liability) for any loss or damage of any kind or nature
related to, arising under or in connection with these
materials, including for any direct, or any indirect,
special, incidental, or consequential loss or damage
(including loss of data, profits, goodwill, or any type of
loss or damage suffered as a result of any action brought
by a third party) even if such damage or loss was
reasonably foreseeable or Xilinx had been advised of the
possibility of the same.
CRITICAL APPLICATIONS
Xilinx products are not designed or intended to be fail-
safe, or for use in any application requiring fail-safe
performance, such as life-support or safety devices or
systems, Class III medical devices, nuclear facilities,
applications related to the deployment of airbags, or any
other applications that could lead to death, personal
injury, or severe property or environmental damage
(individually and collectively, "Critical
Applications"). Customer assumes the sole risk and
liability of any use of Xilinx products in Critical
Applications, subject only to applicable laws and
regulations governing limitations on product liability.
THIS COPYRIGHT NOTICE AND DISCLAIMER MUST BE RETAINED AS
PART OF THIS FILE AT ALL TIMES.
|
{
"pile_set_name": "Github"
}
|
package client
import (
"bytes"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"strings"
"testing"
"context"
"github.com/hyperhq/hyper-api/types"
)
func TestContainerInspectError(t *testing.T) {
client := &Client{
transport: newMockClient(nil, errorMock(http.StatusInternalServerError, "Server error")),
}
_, err := client.ContainerInspect(context.Background(), "nothing")
if err == nil || err.Error() != "Error response from daemon: Server error" {
t.Fatalf("expected a Server Error, got %v", err)
}
}
func TestContainerInspectContainerNotFound(t *testing.T) {
client := &Client{
transport: newMockClient(nil, errorMock(http.StatusNotFound, "Server error")),
}
_, err := client.ContainerInspect(context.Background(), "unknown")
if err == nil || !IsErrContainerNotFound(err) {
t.Fatalf("expected a containerNotFound error, got %v", err)
}
}
func TestContainerInspect(t *testing.T) {
expectedURL := "/containers/container_id/json"
client := &Client{
transport: newMockClient(nil, func(req *http.Request) (*http.Response, error) {
if !strings.HasPrefix(req.URL.Path, expectedURL) {
return nil, fmt.Errorf("Expected URL '%s', got '%s'", expectedURL, req.URL)
}
content, err := json.Marshal(types.ContainerJSON{
ContainerJSONBase: &types.ContainerJSONBase{
ID: "container_id",
Image: "image",
Name: "name",
},
})
if err != nil {
return nil, err
}
return &http.Response{
StatusCode: http.StatusOK,
Body: ioutil.NopCloser(bytes.NewReader(content)),
}, nil
}),
}
r, err := client.ContainerInspect(context.Background(), "container_id")
if err != nil {
t.Fatal(err)
}
if r.ID != "container_id" {
t.Fatalf("expected `container_id`, got %s", r.ID)
}
if r.Image != "image" {
t.Fatalf("expected `image`, got %s", r.ID)
}
if r.Name != "name" {
t.Fatalf("expected `name`, got %s", r.ID)
}
}
func TestContainerInspectNode(t *testing.T) {
client := &Client{
transport: newMockClient(nil, func(req *http.Request) (*http.Response, error) {
content, err := json.Marshal(types.ContainerJSON{
ContainerJSONBase: &types.ContainerJSONBase{
ID: "container_id",
Image: "image",
Name: "name",
Node: &types.ContainerNode{
ID: "container_node_id",
Addr: "container_node",
Labels: map[string]string{"foo": "bar"},
},
},
})
if err != nil {
return nil, err
}
return &http.Response{
StatusCode: http.StatusOK,
Body: ioutil.NopCloser(bytes.NewReader(content)),
}, nil
}),
}
r, err := client.ContainerInspect(context.Background(), "container_id")
if err != nil {
t.Fatal(err)
}
if r.ID != "container_id" {
t.Fatalf("expected `container_id`, got %s", r.ID)
}
if r.Image != "image" {
t.Fatalf("expected `image`, got %s", r.ID)
}
if r.Name != "name" {
t.Fatalf("expected `name`, got %s", r.ID)
}
if r.Node.ID != "container_node_id" {
t.Fatalf("expected `container_node_id`, got %s", r.Node.ID)
}
if r.Node.Addr != "container_node" {
t.Fatalf("expected `container_node`, got %s", r.Node.Addr)
}
foo, ok := r.Node.Labels["foo"]
if foo != "bar" || !ok {
t.Fatalf("expected `bar` for label `foo`")
}
}
|
{
"pile_set_name": "Github"
}
|
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard.
//
#import <MediaPlayer/MPQueueFeeder.h>
@class NSArray;
@interface MPArrayQueueFeeder : MPQueueFeeder
{
NSArray *_queueItems;
}
- (void).cxx_destruct;
@property(readonly, nonatomic) NSArray *items;
- (void)reloadWithPlaybackContext:(id)arg1 completionHandler:(CDUnknownBlockType)arg2;
- (id)identifierAtIndex:(unsigned long long)arg1;
- (unsigned long long)indexOfItemWithIdentifier:(id)arg1;
- (long long)playbackMode;
- (id)playbackInfoForIdentifier:(id)arg1;
- (id)pathAtIndex:(unsigned long long)arg1;
- (unsigned long long)itemCount;
- (id)copyRawItemAtIndex:(unsigned long long)arg1;
- (void)dealloc;
- (id)initWithItems:(id)arg1;
@end
|
{
"pile_set_name": "Github"
}
|
# automatically generated by the FlatBuffers compiler, do not modify
# namespace: tflite_fb
import flatbuffers
from flatbuffers.compat import import_numpy
np = import_numpy()
class EmbeddingLookupSparseOptions(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsEmbeddingLookupSparseOptions(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = EmbeddingLookupSparseOptions()
x.Init(buf, n + offset)
return x
@classmethod
def EmbeddingLookupSparseOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False):
return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed)
# EmbeddingLookupSparseOptions
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# EmbeddingLookupSparseOptions
def Combiner(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos)
return 0
def EmbeddingLookupSparseOptionsStart(builder): builder.StartObject(1)
def EmbeddingLookupSparseOptionsAddCombiner(builder, combiner): builder.PrependInt8Slot(0, combiner, 0)
def EmbeddingLookupSparseOptionsEnd(builder): return builder.EndObject()
|
{
"pile_set_name": "Github"
}
|
module github.com/hashicorp/golang-lru
go 1.12
|
{
"pile_set_name": "Github"
}
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-18 19:39
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('guests', '0003_guest_is_child'),
]
operations = [
migrations.AddField(
model_name='party',
name='category',
field=models.CharField(blank=True, max_length=20, null=True),
),
]
|
{
"pile_set_name": "Github"
}
|
<?php
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
/**
* The "hybrid" collection of methods.
* Typical usage is:
* <code>
* $apigeeService = new Google_Service_Apigee(...);
* $hybrid = $apigeeService->hybrid;
* </code>
*/
class Google_Service_Apigee_Resource_Hybrid extends Google_Service_Resource
{
}
|
{
"pile_set_name": "Github"
}
|
package com.alibaba.smartfox.eclipse.ui.pmd
/**
* This class contains information for syntax coloring and styling for an
* extension
*/
class SyntaxData(val extension: String) {
var varnameReference: String? = null
var stringStart: String? = null
var stringEnd: String? = null
private var keywords: Collection<String>? = null
private var punctuation: String? = null
var comment: String? = null
var multiLineCommentStart: String? = null
var multiLineCommentEnd: String? = null
fun matches(otherExtension: String): Boolean {
return extension == otherExtension
}
fun isKeyword(word: String): Boolean {
return keywords != null && keywords!!.contains(word)
}
fun isPunctuation(ch: Char): Boolean {
return punctuation != null && punctuation!!.indexOf(ch) >= 0
}
fun setKeywords(keywords: Collection<String>) {
this.keywords = keywords
}
fun setPunctuation(thePunctuationChars: String) {
punctuation = thePunctuationChars
}
}
|
{
"pile_set_name": "Github"
}
|
<?php
use Illuminate\Database\Migrations\Migration;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Support\Facades\Schema;
class CreateDistrictsTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create(config('laravolt.indonesia.table_prefix').'districts', function (Blueprint $table) {
$table->char('id', 7);
$table->char('city_id', 4);
$table->string('name', 255);
$table->text('meta')->nullable();
$table->primary('id');
$table->timestamps();
$table->foreign('city_id')
->references('id')
->on(config('laravolt.indonesia.table_prefix').'cities')
->onUpdate('cascade')->onDelete('restrict');
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::drop(config('laravolt.indonesia.table_prefix').'districts');
}
}
|
{
"pile_set_name": "Github"
}
|
<?php
/**
* Zend Framework
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://framework.zend.com/license/new-bsd
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to [email protected] so we can send you a copy immediately.
*
* @category Zend
* @package Zend_Filter
* @subpackage UnitTests
* @copyright Copyright (c) 2005-2015 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @version $Id$
*/
/**
* @see Zend_Filter_Boolean
*/
require_once 'Zend/Filter/Boolean.php';
/**
* @category Zend
* @package Zend_Filter
* @subpackage UnitTests
* @copyright Copyright (c) 2005-2015 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @group Zend_Filter
*/
class Zend_Filter_BooleanTest extends PHPUnit_Framework_TestCase
{
/**
* Zend_Filter_Boolean object
*
* @var Zend_Filter_Boolean
*/
protected $_filter;
/**
* Creates a new Zend_Filter_Boolean object for each test method
*
* @return void
*/
public function setUp()
{
$this->_filter = new Zend_Filter_Boolean();
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testBasic()
{
$this->assertFalse($this->_filter->filter(false));
$this->assertTrue($this->_filter->filter(true));
$this->assertFalse($this->_filter->filter(0));
$this->assertTrue($this->_filter->filter(1));
$this->assertFalse($this->_filter->filter(0.0));
$this->assertTrue($this->_filter->filter(1.0));
$this->assertFalse($this->_filter->filter(''));
$this->assertTrue($this->_filter->filter('abc'));
$this->assertFalse($this->_filter->filter('0'));
$this->assertTrue($this->_filter->filter('1'));
$this->assertFalse($this->_filter->filter(array()));
$this->assertTrue($this->_filter->filter(array('xxx')));
$this->assertFalse($this->_filter->filter(null));
$this->assertTrue($this->_filter->filter('false'));
$this->assertTrue($this->_filter->filter('true'));
$this->assertTrue($this->_filter->filter('no'));
$this->assertTrue($this->_filter->filter('yes'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testOnlyBoolean()
{
$this->_filter->setType(Zend_Filter_Boolean::BOOLEAN);
$this->assertFalse($this->_filter->filter(false));
$this->assertTrue($this->_filter->filter(true));
$this->assertTrue($this->_filter->filter(0));
$this->assertTrue($this->_filter->filter(1));
$this->assertTrue($this->_filter->filter(0.0));
$this->assertTrue($this->_filter->filter(1.0));
$this->assertTrue($this->_filter->filter(''));
$this->assertTrue($this->_filter->filter('abc'));
$this->assertTrue($this->_filter->filter('0'));
$this->assertTrue($this->_filter->filter('1'));
$this->assertTrue($this->_filter->filter(array()));
$this->assertTrue($this->_filter->filter(array('xxx')));
$this->assertTrue($this->_filter->filter(null));
$this->assertTrue($this->_filter->filter('false'));
$this->assertTrue($this->_filter->filter('true'));
$this->assertTrue($this->_filter->filter('no'));
$this->assertTrue($this->_filter->filter('yes'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testOnlyInteger()
{
$this->_filter->setType(Zend_Filter_Boolean::INTEGER);
$this->assertTrue($this->_filter->filter(false));
$this->assertTrue($this->_filter->filter(true));
$this->assertFalse($this->_filter->filter(0));
$this->assertTrue($this->_filter->filter(1));
$this->assertTrue($this->_filter->filter(0.0));
$this->assertTrue($this->_filter->filter(1.0));
$this->assertTrue($this->_filter->filter(''));
$this->assertTrue($this->_filter->filter('abc'));
$this->assertTrue($this->_filter->filter('0'));
$this->assertTrue($this->_filter->filter('1'));
$this->assertTrue($this->_filter->filter(array()));
$this->assertTrue($this->_filter->filter(array('xxx')));
$this->assertTrue($this->_filter->filter(null));
$this->assertTrue($this->_filter->filter('false'));
$this->assertTrue($this->_filter->filter('true'));
$this->assertTrue($this->_filter->filter('no'));
$this->assertTrue($this->_filter->filter('yes'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testOnlyFloat()
{
$this->_filter->setType(Zend_Filter_Boolean::FLOAT);
$this->assertTrue($this->_filter->filter(false));
$this->assertTrue($this->_filter->filter(true));
$this->assertTrue($this->_filter->filter(0));
$this->assertTrue($this->_filter->filter(1));
$this->assertFalse($this->_filter->filter(0.0));
$this->assertTrue($this->_filter->filter(1.0));
$this->assertTrue($this->_filter->filter(''));
$this->assertTrue($this->_filter->filter('abc'));
$this->assertTrue($this->_filter->filter('0'));
$this->assertTrue($this->_filter->filter('1'));
$this->assertTrue($this->_filter->filter(array()));
$this->assertTrue($this->_filter->filter(array('xxx')));
$this->assertTrue($this->_filter->filter(null));
$this->assertTrue($this->_filter->filter('false'));
$this->assertTrue($this->_filter->filter('true'));
$this->assertTrue($this->_filter->filter('no'));
$this->assertTrue($this->_filter->filter('yes'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testOnlyString()
{
$this->_filter->setType(Zend_Filter_Boolean::STRING);
$this->assertTrue($this->_filter->filter(false));
$this->assertTrue($this->_filter->filter(true));
$this->assertTrue($this->_filter->filter(0));
$this->assertTrue($this->_filter->filter(1));
$this->assertTrue($this->_filter->filter(0.0));
$this->assertTrue($this->_filter->filter(1.0));
$this->assertFalse($this->_filter->filter(''));
$this->assertTrue($this->_filter->filter('abc'));
$this->assertTrue($this->_filter->filter('0'));
$this->assertTrue($this->_filter->filter('1'));
$this->assertTrue($this->_filter->filter(array()));
$this->assertTrue($this->_filter->filter(array('xxx')));
$this->assertTrue($this->_filter->filter(null));
$this->assertTrue($this->_filter->filter('false'));
$this->assertTrue($this->_filter->filter('true'));
$this->assertTrue($this->_filter->filter('no'));
$this->assertTrue($this->_filter->filter('yes'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testOnlyZero()
{
$this->_filter->setType(Zend_Filter_Boolean::ZERO);
$this->assertTrue($this->_filter->filter(false));
$this->assertTrue($this->_filter->filter(true));
$this->assertTrue($this->_filter->filter(0));
$this->assertTrue($this->_filter->filter(1));
$this->assertTrue($this->_filter->filter(0.0));
$this->assertTrue($this->_filter->filter(1.0));
$this->assertTrue($this->_filter->filter(''));
$this->assertTrue($this->_filter->filter('abc'));
$this->assertFalse($this->_filter->filter('0'));
$this->assertTrue($this->_filter->filter('1'));
$this->assertTrue($this->_filter->filter(array()));
$this->assertTrue($this->_filter->filter(array('xxx')));
$this->assertTrue($this->_filter->filter(null));
$this->assertTrue($this->_filter->filter('false'));
$this->assertTrue($this->_filter->filter('true'));
$this->assertTrue($this->_filter->filter('no'));
$this->assertTrue($this->_filter->filter('yes'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testOnlyArray()
{
$this->_filter->setType(Zend_Filter_Boolean::EMPTY_ARRAY);
$this->assertTrue($this->_filter->filter(false));
$this->assertTrue($this->_filter->filter(true));
$this->assertTrue($this->_filter->filter(0));
$this->assertTrue($this->_filter->filter(1));
$this->assertTrue($this->_filter->filter(0.0));
$this->assertTrue($this->_filter->filter(1.0));
$this->assertTrue($this->_filter->filter(''));
$this->assertTrue($this->_filter->filter('abc'));
$this->assertTrue($this->_filter->filter('0'));
$this->assertTrue($this->_filter->filter('1'));
$this->assertFalse($this->_filter->filter(array()));
$this->assertTrue($this->_filter->filter(array('xxx')));
$this->assertTrue($this->_filter->filter(null));
$this->assertTrue($this->_filter->filter('false'));
$this->assertTrue($this->_filter->filter('true'));
$this->assertTrue($this->_filter->filter('no'));
$this->assertTrue($this->_filter->filter('yes'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testOnlyNull()
{
$this->_filter->setType(Zend_Filter_Boolean::NULL);
$this->assertTrue($this->_filter->filter(false));
$this->assertTrue($this->_filter->filter(true));
$this->assertTrue($this->_filter->filter(0));
$this->assertTrue($this->_filter->filter(1));
$this->assertTrue($this->_filter->filter(0.0));
$this->assertTrue($this->_filter->filter(1.0));
$this->assertTrue($this->_filter->filter(''));
$this->assertTrue($this->_filter->filter('abc'));
$this->assertTrue($this->_filter->filter('0'));
$this->assertTrue($this->_filter->filter('1'));
$this->assertTrue($this->_filter->filter(array()));
$this->assertTrue($this->_filter->filter(array('xxx')));
$this->assertFalse($this->_filter->filter(null));
$this->assertTrue($this->_filter->filter('false'));
$this->assertTrue($this->_filter->filter('true'));
$this->assertTrue($this->_filter->filter('no'));
$this->assertTrue($this->_filter->filter('yes'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testOnlyPHP()
{
$this->_filter->setType(Zend_Filter_Boolean::PHP);
$this->assertFalse($this->_filter->filter(false));
$this->assertTrue($this->_filter->filter(true));
$this->assertFalse($this->_filter->filter(0));
$this->assertTrue($this->_filter->filter(1));
$this->assertFalse($this->_filter->filter(0.0));
$this->assertTrue($this->_filter->filter(1.0));
$this->assertFalse($this->_filter->filter(''));
$this->assertTrue($this->_filter->filter('abc'));
$this->assertFalse($this->_filter->filter('0'));
$this->assertTrue($this->_filter->filter('1'));
$this->assertFalse($this->_filter->filter(array()));
$this->assertTrue($this->_filter->filter(array('xxx')));
$this->assertFalse($this->_filter->filter(null));
$this->assertTrue($this->_filter->filter('false'));
$this->assertTrue($this->_filter->filter('true'));
$this->assertTrue($this->_filter->filter('no'));
$this->assertTrue($this->_filter->filter('yes'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testOnlyFalseString()
{
$this->_filter->setType(Zend_Filter_Boolean::FALSE_STRING);
$this->assertTrue($this->_filter->filter(false));
$this->assertTrue($this->_filter->filter(true));
$this->assertTrue($this->_filter->filter(0));
$this->assertTrue($this->_filter->filter(1));
$this->assertTrue($this->_filter->filter(0.0));
$this->assertTrue($this->_filter->filter(1.0));
$this->assertTrue($this->_filter->filter(''));
$this->assertTrue($this->_filter->filter('abc'));
$this->assertTrue($this->_filter->filter('0'));
$this->assertTrue($this->_filter->filter('1'));
$this->assertTrue($this->_filter->filter(array()));
$this->assertTrue($this->_filter->filter(array('xxx')));
$this->assertTrue($this->_filter->filter(null));
$this->assertFalse($this->_filter->filter('false'));
$this->assertTrue($this->_filter->filter('true'));
$this->assertTrue($this->_filter->filter('no'));
$this->assertTrue($this->_filter->filter('yes'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testOnlyYes()
{
$this->_filter->setType(Zend_Filter_Boolean::YES);
$this->_filter->setLocale('en');
$this->assertTrue($this->_filter->filter(false));
$this->assertTrue($this->_filter->filter(true));
$this->assertTrue($this->_filter->filter(0));
$this->assertTrue($this->_filter->filter(1));
$this->assertTrue($this->_filter->filter(0.0));
$this->assertTrue($this->_filter->filter(1.0));
$this->assertTrue($this->_filter->filter(''));
$this->assertTrue($this->_filter->filter('abc'));
$this->assertTrue($this->_filter->filter('0'));
$this->assertTrue($this->_filter->filter('1'));
$this->assertTrue($this->_filter->filter(array()));
$this->assertTrue($this->_filter->filter(array('xxx')));
$this->assertTrue($this->_filter->filter(null));
$this->assertTrue($this->_filter->filter('false'));
$this->assertTrue($this->_filter->filter('true'));
$this->assertFalse($this->_filter->filter('no'));
$this->assertTrue($this->_filter->filter('yes'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testOnlyAll()
{
$this->_filter->setType(Zend_Filter_Boolean::ALL);
$this->_filter->setLocale('en');
$this->assertFalse($this->_filter->filter(false));
$this->assertTrue($this->_filter->filter(true));
$this->assertFalse($this->_filter->filter(0));
$this->assertTrue($this->_filter->filter(1));
$this->assertFalse($this->_filter->filter(0.0));
$this->assertTrue($this->_filter->filter(1.0));
$this->assertFalse($this->_filter->filter(''));
$this->assertTrue($this->_filter->filter('abc'));
$this->assertFalse($this->_filter->filter('0'));
$this->assertTrue($this->_filter->filter('1'));
$this->assertFalse($this->_filter->filter(array()));
$this->assertTrue($this->_filter->filter(array('xxx')));
$this->assertFalse($this->_filter->filter(null));
$this->assertFalse($this->_filter->filter('false'));
$this->assertTrue($this->_filter->filter('true'));
$this->assertFalse($this->_filter->filter('no'));
$this->assertTrue($this->_filter->filter('yes'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testArrayConstantNotation()
{
$filter = new Zend_Filter_Boolean(
array(
'type' => array(
Zend_Filter_Boolean::ZERO,
Zend_Filter_Boolean::STRING,
Zend_Filter_Boolean::BOOLEAN
)
)
);
$this->assertFalse($filter->filter(false));
$this->assertTrue($filter->filter(true));
$this->assertTrue($filter->filter(0));
$this->assertTrue($filter->filter(1));
$this->assertTrue($filter->filter(0.0));
$this->assertTrue($filter->filter(1.0));
$this->assertFalse($filter->filter(''));
$this->assertTrue($filter->filter('abc'));
$this->assertFalse($filter->filter('0'));
$this->assertTrue($filter->filter('1'));
$this->assertTrue($filter->filter(array()));
$this->assertTrue($filter->filter(array('xxx')));
$this->assertTrue($filter->filter(null));
$this->assertTrue($filter->filter('false'));
$this->assertTrue($filter->filter('true'));
$this->assertTrue($filter->filter('no'));
$this->assertTrue($filter->filter('yes'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testArrayConfigNotation()
{
$filter = new Zend_Filter_Boolean(
array(
'type' => array(
Zend_Filter_Boolean::ZERO,
Zend_Filter_Boolean::STRING,
Zend_Filter_Boolean::BOOLEAN),
'test' => false
)
);
$this->assertFalse($filter->filter(false));
$this->assertTrue($filter->filter(true));
$this->assertTrue($filter->filter(0));
$this->assertTrue($filter->filter(1));
$this->assertTrue($filter->filter(0.0));
$this->assertTrue($filter->filter(1.0));
$this->assertFalse($filter->filter(''));
$this->assertTrue($filter->filter('abc'));
$this->assertFalse($filter->filter('0'));
$this->assertTrue($filter->filter('1'));
$this->assertTrue($filter->filter(array()));
$this->assertTrue($filter->filter(array('xxx')));
$this->assertTrue($filter->filter(null));
$this->assertTrue($filter->filter('false'));
$this->assertTrue($filter->filter('true'));
$this->assertTrue($filter->filter('no'));
$this->assertTrue($filter->filter('yes'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testMultiConstantNotation()
{
$filter = new Zend_Filter_Boolean(
Zend_Filter_Boolean::ZERO + Zend_Filter_Boolean::STRING + Zend_Filter_Boolean::BOOLEAN
);
$this->assertFalse($filter->filter(false));
$this->assertTrue($filter->filter(true));
$this->assertTrue($filter->filter(0));
$this->assertTrue($filter->filter(1));
$this->assertTrue($filter->filter(0.0));
$this->assertTrue($filter->filter(1.0));
$this->assertFalse($filter->filter(''));
$this->assertTrue($filter->filter('abc'));
$this->assertFalse($filter->filter('0'));
$this->assertTrue($filter->filter('1'));
$this->assertTrue($filter->filter(array()));
$this->assertTrue($filter->filter(array('xxx')));
$this->assertTrue($filter->filter(null));
$this->assertTrue($filter->filter('false'));
$this->assertTrue($filter->filter('true'));
$this->assertTrue($filter->filter('no'));
$this->assertTrue($filter->filter('yes'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testStringNotation()
{
$filter = new Zend_Filter_Boolean(
array(
'type' => array('zero', 'string', 'boolean')
)
);
$this->assertFalse($filter->filter(false));
$this->assertTrue($filter->filter(true));
$this->assertTrue($filter->filter(0));
$this->assertTrue($filter->filter(1));
$this->assertTrue($filter->filter(0.0));
$this->assertTrue($filter->filter(1.0));
$this->assertFalse($filter->filter(''));
$this->assertTrue($filter->filter('abc'));
$this->assertFalse($filter->filter('0'));
$this->assertTrue($filter->filter('1'));
$this->assertTrue($filter->filter(array()));
$this->assertTrue($filter->filter(array('xxx')));
$this->assertTrue($filter->filter(null));
$this->assertTrue($filter->filter('false'));
$this->assertTrue($filter->filter('true'));
$this->assertTrue($filter->filter('no'));
$this->assertTrue($filter->filter('yes'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testSingleStringNotation()
{
$filter = new Zend_Filter_Boolean(
'boolean'
);
$this->assertFalse($filter->filter(false));
$this->assertTrue($filter->filter(true));
$this->assertTrue($filter->filter(0));
$this->assertTrue($filter->filter(1));
$this->assertTrue($filter->filter(0.0));
$this->assertTrue($filter->filter(1.0));
$this->assertTrue($filter->filter(''));
$this->assertTrue($filter->filter('abc'));
$this->assertTrue($filter->filter('0'));
$this->assertTrue($filter->filter('1'));
$this->assertTrue($filter->filter(array()));
$this->assertTrue($filter->filter(array('xxx')));
$this->assertTrue($filter->filter(null));
$this->assertTrue($filter->filter('false'));
$this->assertTrue($filter->filter('true'));
$this->assertTrue($filter->filter('no'));
$this->assertTrue($filter->filter('yes'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testSettingLocale()
{
$this->_filter->setType(Zend_Filter_Boolean::ALL);
$this->_filter->setLocale('de');
$this->assertFalse($this->_filter->filter(false));
$this->assertTrue($this->_filter->filter(true));
$this->assertFalse($this->_filter->filter(0));
$this->assertTrue($this->_filter->filter(1));
$this->assertFalse($this->_filter->filter(0.0));
$this->assertTrue($this->_filter->filter(1.0));
$this->assertFalse($this->_filter->filter(''));
$this->assertTrue($this->_filter->filter('abc'));
$this->assertFalse($this->_filter->filter('0'));
$this->assertTrue($this->_filter->filter('1'));
$this->assertFalse($this->_filter->filter(array()));
$this->assertTrue($this->_filter->filter(array('xxx')));
$this->assertFalse($this->_filter->filter(null));
$this->assertFalse($this->_filter->filter('false'));
$this->assertTrue($this->_filter->filter('true'));
$this->assertTrue($this->_filter->filter('no'));
$this->assertTrue($this->_filter->filter('yes'));
$this->assertFalse($this->_filter->filter('nein'));
$this->assertTrue($this->_filter->filter('ja'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testSettingLocalePerConstructorString()
{
$filter = new Zend_Filter_Boolean(
'all', true, 'de'
);
$this->assertFalse($filter->filter(false));
$this->assertTrue($filter->filter(true));
$this->assertFalse($filter->filter(0));
$this->assertTrue($filter->filter(1));
$this->assertFalse($filter->filter(0.0));
$this->assertTrue($filter->filter(1.0));
$this->assertFalse($filter->filter(''));
$this->assertTrue($filter->filter('abc'));
$this->assertFalse($filter->filter('0'));
$this->assertTrue($filter->filter('1'));
$this->assertFalse($filter->filter(array()));
$this->assertTrue($filter->filter(array('xxx')));
$this->assertFalse($filter->filter(null));
$this->assertFalse($filter->filter('false'));
$this->assertTrue($filter->filter('true'));
$this->assertTrue($filter->filter('no'));
$this->assertTrue($filter->filter('yes'));
$this->assertFalse($filter->filter('nein'));
$this->assertTrue($filter->filter('ja'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testConfigObject()
{
require_once 'Zend/Config.php';
$options = array('type' => 'all', 'locale' => 'de');
$config = new Zend_Config($options);
$filter = new Zend_Filter_Boolean(
$config
);
$this->assertFalse($filter->filter(false));
$this->assertTrue($filter->filter(true));
$this->assertFalse($filter->filter(0));
$this->assertTrue($filter->filter(1));
$this->assertFalse($filter->filter(0.0));
$this->assertTrue($filter->filter(1.0));
$this->assertFalse($filter->filter(''));
$this->assertTrue($filter->filter('abc'));
$this->assertFalse($filter->filter('0'));
$this->assertTrue($filter->filter('1'));
$this->assertFalse($filter->filter(array()));
$this->assertTrue($filter->filter(array('xxx')));
$this->assertFalse($filter->filter(null));
$this->assertFalse($filter->filter('false'));
$this->assertTrue($filter->filter('true'));
$this->assertTrue($filter->filter('no'));
$this->assertTrue($filter->filter('yes'));
$this->assertFalse($filter->filter('nein'));
$this->assertTrue($filter->filter('ja'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testSettingLocalePerConstructorArray()
{
$filter = new Zend_Filter_Boolean(
array('type' => 'all', 'locale' => 'de')
);
$this->assertFalse($filter->filter(false));
$this->assertTrue($filter->filter(true));
$this->assertFalse($filter->filter(0));
$this->assertTrue($filter->filter(1));
$this->assertFalse($filter->filter(0.0));
$this->assertTrue($filter->filter(1.0));
$this->assertFalse($filter->filter(''));
$this->assertTrue($filter->filter('abc'));
$this->assertFalse($filter->filter('0'));
$this->assertTrue($filter->filter('1'));
$this->assertFalse($filter->filter(array()));
$this->assertTrue($filter->filter(array('xxx')));
$this->assertFalse($filter->filter(null));
$this->assertFalse($filter->filter('false'));
$this->assertTrue($filter->filter('true'));
$this->assertTrue($filter->filter('no'));
$this->assertTrue($filter->filter('yes'));
$this->assertFalse($filter->filter('nein'));
$this->assertTrue($filter->filter('ja'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testSettingLocaleInstance()
{
$locale = new Zend_Locale('de');
$filter = new Zend_Filter_Boolean(
array('type' => 'all', 'locale' => $locale)
);
$this->assertFalse($filter->filter(false));
$this->assertTrue($filter->filter(true));
$this->assertFalse($filter->filter(0));
$this->assertTrue($filter->filter(1));
$this->assertFalse($filter->filter(0.0));
$this->assertTrue($filter->filter(1.0));
$this->assertFalse($filter->filter(''));
$this->assertTrue($filter->filter('abc'));
$this->assertFalse($filter->filter('0'));
$this->assertTrue($filter->filter('1'));
$this->assertFalse($filter->filter(array()));
$this->assertTrue($filter->filter(array('xxx')));
$this->assertFalse($filter->filter(null));
$this->assertFalse($filter->filter('false'));
$this->assertTrue($filter->filter('true'));
$this->assertTrue($filter->filter('no'));
$this->assertTrue($filter->filter('yes'));
$this->assertFalse($filter->filter('nein'));
$this->assertTrue($filter->filter('ja'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testWithoutCasting()
{
$locale = new Zend_Locale('de');
$filter = new Zend_Filter_Boolean(
array('type' => 'all', 'casting' => false, 'locale' => $locale)
);
$this->assertFalse($filter->filter(false));
$this->assertTrue($filter->filter(true));
$this->assertFalse($filter->filter(0));
$this->assertTrue($filter->filter(1));
$this->assertEquals(2, $filter->filter(2));
$this->assertFalse($filter->filter(0.0));
$this->assertTrue($filter->filter(1.0));
$this->assertEquals(0.5, $filter->filter(0.5));
$this->assertFalse($filter->filter(''));
$this->assertEquals('abc', $filter->filter('abc'));
$this->assertFalse($filter->filter('0'));
$this->assertTrue($filter->filter('1'));
$this->assertEquals('2', $filter->filter('2'));
$this->assertFalse($filter->filter(array()));
$this->assertEquals(array('xxx'), $filter->filter(array('xxx')));
$this->assertEquals(null, $filter->filter(null));
$this->assertFalse($filter->filter('false'));
$this->assertTrue($filter->filter('true'));
$this->assertEquals('no', $filter->filter('no'));
$this->assertEquals('yes', $filter->filter('yes'));
$this->assertFalse($filter->filter('nein'));
$this->assertTrue($filter->filter('ja'));
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testSettingFalseType()
{
try {
$this->_filter->setType(true);
$this->fail();
} catch (Zend_Exception $e) {
$this->assertContains('Unknown', $e->getMessage());
}
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testGetType()
{
$this->assertEquals(127, $this->_filter->getType());
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testSettingFalseLocaleType()
{
try {
$this->_filter->setLocale(true);
$this->fail();
} catch (Zend_Exception $e) {
$this->assertContains('Locale has to be', $e->getMessage());
}
}
/**
* Ensures that the filter follows expected behavior
*
* @return void
*/
public function testSettingUnknownLocale()
{
try {
$this->_filter->setLocale('yy');
$this->fail();
} catch (Zend_Exception $e) {
$this->assertContains('Unknown locale', $e->getMessage());
}
}
}
|
{
"pile_set_name": "Github"
}
|
//
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Oct 15 2018 10:31:50).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard.
//
#import <AppKit/NSButtonCell.h>
__attribute__((visibility("hidden")))
@interface SiriUIButtonCell : NSButtonCell
{
struct NSEdgeInsets _imageEdgeInsets;
struct NSEdgeInsets _titleEdgeInsets;
}
@property struct NSEdgeInsets titleEdgeInsets; // @synthesize titleEdgeInsets=_titleEdgeInsets;
@property struct NSEdgeInsets imageEdgeInsets; // @synthesize imageEdgeInsets=_imageEdgeInsets;
- (struct CGRect)drawTitle:(id)arg1 withFrame:(struct CGRect)arg2 inView:(id)arg3;
- (void)drawImage:(id)arg1 withFrame:(struct CGRect)arg2 inView:(id)arg3;
@end
|
{
"pile_set_name": "Github"
}
|
# 为了方便所有数据集通用,以及简单起见,此处不对yolo作单独处理,默认都是img和label分离不同文件夹
# 如果是yolo,先用op_on_dataset tool将其选择性复制到不同的文件夹再划分子集
import os
import sys
import random
import ipdb
import shutil
# random.seed(666)
def clear_folder(path):
if os.listdir(path) == []:
print('{} is already clean'.format(path))
else:
files = os.listdir(path)
for file in files:
os.remove(os.path.join(path,file))
def division_and_copy(src_path,dst_path,indexes):
files= os.listdir(src_path)
files.sort() # !!!!排序,不然label就乱了
for index in indexes:
src = os.path.join(src_path,files[index])
dst = os.path.join(dst_path,files[index])
shutil.copyfile(src,dst)
if __name__ == "__main__":
# Setting
sub_ratio = 0.1
src_imgs = '/py/datasets/ICDAR2015/yolo/13+15/val_img'
src_labels = '/py/datasets/ICDAR2015/yolo/13+15/val_label'
dst_sub_imgs = '/py/datasets/ICDAR2015/yolo/subdata/val'
dst_sub_labels = '/py/datasets/ICDAR2015/yolo/subdata/val'
total_size = len(os.listdir(src_imgs)) # 原始数据集大小
# 生成随机index
index = set([i for i in range(total_size)])
sub_index = set(random.sample(index,int(total_size*sub_ratio)))
# 清空目标文件夹
clear_folder(dst_sub_imgs)
clear_folder(dst_sub_labels)
division_and_copy(src_imgs,dst_sub_imgs,sub_index)
division_and_copy(src_labels,dst_sub_labels,sub_index)
|
{
"pile_set_name": "Github"
}
|
/*
*
* Copyright 2014 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
// Package transport defines and implements message oriented communication
// channel to complete various transactions (e.g., an RPC). It is meant for
// grpc-internal usage and is not intended to be imported directly by users.
package transport
import (
"bytes"
"context"
"errors"
"fmt"
"io"
"net"
"sync"
"sync/atomic"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/credentials"
"google.golang.org/grpc/keepalive"
"google.golang.org/grpc/metadata"
"google.golang.org/grpc/stats"
"google.golang.org/grpc/status"
"google.golang.org/grpc/tap"
)
type bufferPool struct {
pool sync.Pool
}
func newBufferPool() *bufferPool {
return &bufferPool{
pool: sync.Pool{
New: func() interface{} {
return new(bytes.Buffer)
},
},
}
}
func (p *bufferPool) get() *bytes.Buffer {
return p.pool.Get().(*bytes.Buffer)
}
func (p *bufferPool) put(b *bytes.Buffer) {
p.pool.Put(b)
}
// recvMsg represents the received msg from the transport. All transport
// protocol specific info has been removed.
type recvMsg struct {
buffer *bytes.Buffer
// nil: received some data
// io.EOF: stream is completed. data is nil.
// other non-nil error: transport failure. data is nil.
err error
}
// recvBuffer is an unbounded channel of recvMsg structs.
//
// Note: recvBuffer differs from buffer.Unbounded only in the fact that it
// holds a channel of recvMsg structs instead of objects implementing "item"
// interface. recvBuffer is written to much more often and using strict recvMsg
// structs helps avoid allocation in "recvBuffer.put"
type recvBuffer struct {
c chan recvMsg
mu sync.Mutex
backlog []recvMsg
err error
}
func newRecvBuffer() *recvBuffer {
b := &recvBuffer{
c: make(chan recvMsg, 1),
}
return b
}
func (b *recvBuffer) put(r recvMsg) {
b.mu.Lock()
if b.err != nil {
b.mu.Unlock()
// An error had occurred earlier, don't accept more
// data or errors.
return
}
b.err = r.err
if len(b.backlog) == 0 {
select {
case b.c <- r:
b.mu.Unlock()
return
default:
}
}
b.backlog = append(b.backlog, r)
b.mu.Unlock()
}
func (b *recvBuffer) load() {
b.mu.Lock()
if len(b.backlog) > 0 {
select {
case b.c <- b.backlog[0]:
b.backlog[0] = recvMsg{}
b.backlog = b.backlog[1:]
default:
}
}
b.mu.Unlock()
}
// get returns the channel that receives a recvMsg in the buffer.
//
// Upon receipt of a recvMsg, the caller should call load to send another
// recvMsg onto the channel if there is any.
func (b *recvBuffer) get() <-chan recvMsg {
return b.c
}
// recvBufferReader implements io.Reader interface to read the data from
// recvBuffer.
type recvBufferReader struct {
closeStream func(error) // Closes the client transport stream with the given error and nil trailer metadata.
ctx context.Context
ctxDone <-chan struct{} // cache of ctx.Done() (for performance).
recv *recvBuffer
last *bytes.Buffer // Stores the remaining data in the previous calls.
err error
freeBuffer func(*bytes.Buffer)
}
// Read reads the next len(p) bytes from last. If last is drained, it tries to
// read additional data from recv. It blocks if there no additional data available
// in recv. If Read returns any non-nil error, it will continue to return that error.
func (r *recvBufferReader) Read(p []byte) (n int, err error) {
if r.err != nil {
return 0, r.err
}
if r.last != nil {
// Read remaining data left in last call.
copied, _ := r.last.Read(p)
if r.last.Len() == 0 {
r.freeBuffer(r.last)
r.last = nil
}
return copied, nil
}
if r.closeStream != nil {
n, r.err = r.readClient(p)
} else {
n, r.err = r.read(p)
}
return n, r.err
}
func (r *recvBufferReader) read(p []byte) (n int, err error) {
select {
case <-r.ctxDone:
return 0, ContextErr(r.ctx.Err())
case m := <-r.recv.get():
return r.readAdditional(m, p)
}
}
func (r *recvBufferReader) readClient(p []byte) (n int, err error) {
// If the context is canceled, then closes the stream with nil metadata.
// closeStream writes its error parameter to r.recv as a recvMsg.
// r.readAdditional acts on that message and returns the necessary error.
select {
case <-r.ctxDone:
// Note that this adds the ctx error to the end of recv buffer, and
// reads from the head. This will delay the error until recv buffer is
// empty, thus will delay ctx cancellation in Recv().
//
// It's done this way to fix a race between ctx cancel and trailer. The
// race was, stream.Recv() may return ctx error if ctxDone wins the
// race, but stream.Trailer() may return a non-nil md because the stream
// was not marked as done when trailer is received. This closeStream
// call will mark stream as done, thus fix the race.
//
// TODO: delaying ctx error seems like a unnecessary side effect. What
// we really want is to mark the stream as done, and return ctx error
// faster.
r.closeStream(ContextErr(r.ctx.Err()))
m := <-r.recv.get()
return r.readAdditional(m, p)
case m := <-r.recv.get():
return r.readAdditional(m, p)
}
}
func (r *recvBufferReader) readAdditional(m recvMsg, p []byte) (n int, err error) {
r.recv.load()
if m.err != nil {
return 0, m.err
}
copied, _ := m.buffer.Read(p)
if m.buffer.Len() == 0 {
r.freeBuffer(m.buffer)
r.last = nil
} else {
r.last = m.buffer
}
return copied, nil
}
type streamState uint32
const (
streamActive streamState = iota
streamWriteDone // EndStream sent
streamReadDone // EndStream received
streamDone // the entire stream is finished.
)
// Stream represents an RPC in the transport layer.
type Stream struct {
id uint32
st ServerTransport // nil for client side Stream
ct *http2Client // nil for server side Stream
ctx context.Context // the associated context of the stream
cancel context.CancelFunc // always nil for client side Stream
done chan struct{} // closed at the end of stream to unblock writers. On the client side.
ctxDone <-chan struct{} // same as done chan but for server side. Cache of ctx.Done() (for performance)
method string // the associated RPC method of the stream
recvCompress string
sendCompress string
buf *recvBuffer
trReader io.Reader
fc *inFlow
wq *writeQuota
// Callback to state application's intentions to read data. This
// is used to adjust flow control, if needed.
requestRead func(int)
headerChan chan struct{} // closed to indicate the end of header metadata.
headerChanClosed uint32 // set when headerChan is closed. Used to avoid closing headerChan multiple times.
// headerValid indicates whether a valid header was received. Only
// meaningful after headerChan is closed (always call waitOnHeader() before
// reading its value). Not valid on server side.
headerValid bool
// hdrMu protects header and trailer metadata on the server-side.
hdrMu sync.Mutex
// On client side, header keeps the received header metadata.
//
// On server side, header keeps the header set by SetHeader(). The complete
// header will merged into this after t.WriteHeader() is called.
header metadata.MD
trailer metadata.MD // the key-value map of trailer metadata.
noHeaders bool // set if the client never received headers (set only after the stream is done).
// On the server-side, headerSent is atomically set to 1 when the headers are sent out.
headerSent uint32
state streamState
// On client-side it is the status error received from the server.
// On server-side it is unused.
status *status.Status
bytesReceived uint32 // indicates whether any bytes have been received on this stream
unprocessed uint32 // set if the server sends a refused stream or GOAWAY including this stream
// contentSubtype is the content-subtype for requests.
// this must be lowercase or the behavior is undefined.
contentSubtype string
}
// isHeaderSent is only valid on the server-side.
func (s *Stream) isHeaderSent() bool {
return atomic.LoadUint32(&s.headerSent) == 1
}
// updateHeaderSent updates headerSent and returns true
// if it was alreay set. It is valid only on server-side.
func (s *Stream) updateHeaderSent() bool {
return atomic.SwapUint32(&s.headerSent, 1) == 1
}
func (s *Stream) swapState(st streamState) streamState {
return streamState(atomic.SwapUint32((*uint32)(&s.state), uint32(st)))
}
func (s *Stream) compareAndSwapState(oldState, newState streamState) bool {
return atomic.CompareAndSwapUint32((*uint32)(&s.state), uint32(oldState), uint32(newState))
}
func (s *Stream) getState() streamState {
return streamState(atomic.LoadUint32((*uint32)(&s.state)))
}
func (s *Stream) waitOnHeader() {
if s.headerChan == nil {
// On the server headerChan is always nil since a stream originates
// only after having received headers.
return
}
select {
case <-s.ctx.Done():
// Close the stream to prevent headers/trailers from changing after
// this function returns.
s.ct.CloseStream(s, ContextErr(s.ctx.Err()))
// headerChan could possibly not be closed yet if closeStream raced
// with operateHeaders; wait until it is closed explicitly here.
<-s.headerChan
case <-s.headerChan:
}
}
// RecvCompress returns the compression algorithm applied to the inbound
// message. It is empty string if there is no compression applied.
func (s *Stream) RecvCompress() string {
s.waitOnHeader()
return s.recvCompress
}
// SetSendCompress sets the compression algorithm to the stream.
func (s *Stream) SetSendCompress(str string) {
s.sendCompress = str
}
// Done returns a channel which is closed when it receives the final status
// from the server.
func (s *Stream) Done() <-chan struct{} {
return s.done
}
// Header returns the header metadata of the stream.
//
// On client side, it acquires the key-value pairs of header metadata once it is
// available. It blocks until i) the metadata is ready or ii) there is no header
// metadata or iii) the stream is canceled/expired.
//
// On server side, it returns the out header after t.WriteHeader is called. It
// does not block and must not be called until after WriteHeader.
func (s *Stream) Header() (metadata.MD, error) {
if s.headerChan == nil {
// On server side, return the header in stream. It will be the out
// header after t.WriteHeader is called.
return s.header.Copy(), nil
}
s.waitOnHeader()
if !s.headerValid {
return nil, s.status.Err()
}
return s.header.Copy(), nil
}
// TrailersOnly blocks until a header or trailers-only frame is received and
// then returns true if the stream was trailers-only. If the stream ends
// before headers are received, returns true, nil. Client-side only.
func (s *Stream) TrailersOnly() bool {
s.waitOnHeader()
return s.noHeaders
}
// Trailer returns the cached trailer metedata. Note that if it is not called
// after the entire stream is done, it could return an empty MD. Client
// side only.
// It can be safely read only after stream has ended that is either read
// or write have returned io.EOF.
func (s *Stream) Trailer() metadata.MD {
c := s.trailer.Copy()
return c
}
// ContentSubtype returns the content-subtype for a request. For example, a
// content-subtype of "proto" will result in a content-type of
// "application/grpc+proto". This will always be lowercase. See
// https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md#requests for
// more details.
func (s *Stream) ContentSubtype() string {
return s.contentSubtype
}
// Context returns the context of the stream.
func (s *Stream) Context() context.Context {
return s.ctx
}
// Method returns the method for the stream.
func (s *Stream) Method() string {
return s.method
}
// Status returns the status received from the server.
// Status can be read safely only after the stream has ended,
// that is, after Done() is closed.
func (s *Stream) Status() *status.Status {
return s.status
}
// SetHeader sets the header metadata. This can be called multiple times.
// Server side only.
// This should not be called in parallel to other data writes.
func (s *Stream) SetHeader(md metadata.MD) error {
if md.Len() == 0 {
return nil
}
if s.isHeaderSent() || s.getState() == streamDone {
return ErrIllegalHeaderWrite
}
s.hdrMu.Lock()
s.header = metadata.Join(s.header, md)
s.hdrMu.Unlock()
return nil
}
// SendHeader sends the given header metadata. The given metadata is
// combined with any metadata set by previous calls to SetHeader and
// then written to the transport stream.
func (s *Stream) SendHeader(md metadata.MD) error {
return s.st.WriteHeader(s, md)
}
// SetTrailer sets the trailer metadata which will be sent with the RPC status
// by the server. This can be called multiple times. Server side only.
// This should not be called parallel to other data writes.
func (s *Stream) SetTrailer(md metadata.MD) error {
if md.Len() == 0 {
return nil
}
if s.getState() == streamDone {
return ErrIllegalHeaderWrite
}
s.hdrMu.Lock()
s.trailer = metadata.Join(s.trailer, md)
s.hdrMu.Unlock()
return nil
}
func (s *Stream) write(m recvMsg) {
s.buf.put(m)
}
// Read reads all p bytes from the wire for this stream.
func (s *Stream) Read(p []byte) (n int, err error) {
// Don't request a read if there was an error earlier
if er := s.trReader.(*transportReader).er; er != nil {
return 0, er
}
s.requestRead(len(p))
return io.ReadFull(s.trReader, p)
}
// tranportReader reads all the data available for this Stream from the transport and
// passes them into the decoder, which converts them into a gRPC message stream.
// The error is io.EOF when the stream is done or another non-nil error if
// the stream broke.
type transportReader struct {
reader io.Reader
// The handler to control the window update procedure for both this
// particular stream and the associated transport.
windowHandler func(int)
er error
}
func (t *transportReader) Read(p []byte) (n int, err error) {
n, err = t.reader.Read(p)
if err != nil {
t.er = err
return
}
t.windowHandler(n)
return
}
// BytesReceived indicates whether any bytes have been received on this stream.
func (s *Stream) BytesReceived() bool {
return atomic.LoadUint32(&s.bytesReceived) == 1
}
// Unprocessed indicates whether the server did not process this stream --
// i.e. it sent a refused stream or GOAWAY including this stream ID.
func (s *Stream) Unprocessed() bool {
return atomic.LoadUint32(&s.unprocessed) == 1
}
// GoString is implemented by Stream so context.String() won't
// race when printing %#v.
func (s *Stream) GoString() string {
return fmt.Sprintf("<stream: %p, %v>", s, s.method)
}
// state of transport
type transportState int
const (
reachable transportState = iota
closing
draining
)
// ServerConfig consists of all the configurations to establish a server transport.
type ServerConfig struct {
MaxStreams uint32
AuthInfo credentials.AuthInfo
InTapHandle tap.ServerInHandle
StatsHandler stats.Handler
KeepaliveParams keepalive.ServerParameters
KeepalivePolicy keepalive.EnforcementPolicy
InitialWindowSize int32
InitialConnWindowSize int32
WriteBufferSize int
ReadBufferSize int
ChannelzParentID int64
MaxHeaderListSize *uint32
HeaderTableSize *uint32
}
// NewServerTransport creates a ServerTransport with conn or non-nil error
// if it fails.
func NewServerTransport(protocol string, conn net.Conn, config *ServerConfig) (ServerTransport, error) {
return newHTTP2Server(conn, config)
}
// ConnectOptions covers all relevant options for communicating with the server.
type ConnectOptions struct {
// UserAgent is the application user agent.
UserAgent string
// Dialer specifies how to dial a network address.
Dialer func(context.Context, string) (net.Conn, error)
// FailOnNonTempDialError specifies if gRPC fails on non-temporary dial errors.
FailOnNonTempDialError bool
// PerRPCCredentials stores the PerRPCCredentials required to issue RPCs.
PerRPCCredentials []credentials.PerRPCCredentials
// TransportCredentials stores the Authenticator required to setup a client
// connection. Only one of TransportCredentials and CredsBundle is non-nil.
TransportCredentials credentials.TransportCredentials
// CredsBundle is the credentials bundle to be used. Only one of
// TransportCredentials and CredsBundle is non-nil.
CredsBundle credentials.Bundle
// KeepaliveParams stores the keepalive parameters.
KeepaliveParams keepalive.ClientParameters
// StatsHandler stores the handler for stats.
StatsHandler stats.Handler
// InitialWindowSize sets the initial window size for a stream.
InitialWindowSize int32
// InitialConnWindowSize sets the initial window size for a connection.
InitialConnWindowSize int32
// WriteBufferSize sets the size of write buffer which in turn determines how much data can be batched before it's written on the wire.
WriteBufferSize int
// ReadBufferSize sets the size of read buffer, which in turn determines how much data can be read at most for one read syscall.
ReadBufferSize int
// ChannelzParentID sets the addrConn id which initiate the creation of this client transport.
ChannelzParentID int64
// MaxHeaderListSize sets the max (uncompressed) size of header list that is prepared to be received.
MaxHeaderListSize *uint32
}
// TargetInfo contains the information of the target such as network address and metadata.
type TargetInfo struct {
Addr string
Metadata interface{}
Authority string
}
// NewClientTransport establishes the transport with the required ConnectOptions
// and returns it to the caller.
func NewClientTransport(connectCtx, ctx context.Context, target TargetInfo, opts ConnectOptions, onPrefaceReceipt func(), onGoAway func(GoAwayReason), onClose func()) (ClientTransport, error) {
return newHTTP2Client(connectCtx, ctx, target, opts, onPrefaceReceipt, onGoAway, onClose)
}
// Options provides additional hints and information for message
// transmission.
type Options struct {
// Last indicates whether this write is the last piece for
// this stream.
Last bool
}
// CallHdr carries the information of a particular RPC.
type CallHdr struct {
// Host specifies the peer's host.
Host string
// Method specifies the operation to perform.
Method string
// SendCompress specifies the compression algorithm applied on
// outbound message.
SendCompress string
// Creds specifies credentials.PerRPCCredentials for a call.
Creds credentials.PerRPCCredentials
// ContentSubtype specifies the content-subtype for a request. For example, a
// content-subtype of "proto" will result in a content-type of
// "application/grpc+proto". The value of ContentSubtype must be all
// lowercase, otherwise the behavior is undefined. See
// https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md#requests
// for more details.
ContentSubtype string
PreviousAttempts int // value of grpc-previous-rpc-attempts header to set
}
// ClientTransport is the common interface for all gRPC client-side transport
// implementations.
type ClientTransport interface {
// Close tears down this transport. Once it returns, the transport
// should not be accessed any more. The caller must make sure this
// is called only once.
Close() error
// GracefulClose starts to tear down the transport: the transport will stop
// accepting new RPCs and NewStream will return error. Once all streams are
// finished, the transport will close.
//
// It does not block.
GracefulClose()
// Write sends the data for the given stream. A nil stream indicates
// the write is to be performed on the transport as a whole.
Write(s *Stream, hdr []byte, data []byte, opts *Options) error
// NewStream creates a Stream for an RPC.
NewStream(ctx context.Context, callHdr *CallHdr) (*Stream, error)
// CloseStream clears the footprint of a stream when the stream is
// not needed any more. The err indicates the error incurred when
// CloseStream is called. Must be called when a stream is finished
// unless the associated transport is closing.
CloseStream(stream *Stream, err error)
// Error returns a channel that is closed when some I/O error
// happens. Typically the caller should have a goroutine to monitor
// this in order to take action (e.g., close the current transport
// and create a new one) in error case. It should not return nil
// once the transport is initiated.
Error() <-chan struct{}
// GoAway returns a channel that is closed when ClientTransport
// receives the draining signal from the server (e.g., GOAWAY frame in
// HTTP/2).
GoAway() <-chan struct{}
// GetGoAwayReason returns the reason why GoAway frame was received.
GetGoAwayReason() GoAwayReason
// RemoteAddr returns the remote network address.
RemoteAddr() net.Addr
// IncrMsgSent increments the number of message sent through this transport.
IncrMsgSent()
// IncrMsgRecv increments the number of message received through this transport.
IncrMsgRecv()
}
// ServerTransport is the common interface for all gRPC server-side transport
// implementations.
//
// Methods may be called concurrently from multiple goroutines, but
// Write methods for a given Stream will be called serially.
type ServerTransport interface {
// HandleStreams receives incoming streams using the given handler.
HandleStreams(func(*Stream), func(context.Context, string) context.Context)
// WriteHeader sends the header metadata for the given stream.
// WriteHeader may not be called on all streams.
WriteHeader(s *Stream, md metadata.MD) error
// Write sends the data for the given stream.
// Write may not be called on all streams.
Write(s *Stream, hdr []byte, data []byte, opts *Options) error
// WriteStatus sends the status of a stream to the client. WriteStatus is
// the final call made on a stream and always occurs.
WriteStatus(s *Stream, st *status.Status) error
// Close tears down the transport. Once it is called, the transport
// should not be accessed any more. All the pending streams and their
// handlers will be terminated asynchronously.
Close() error
// RemoteAddr returns the remote network address.
RemoteAddr() net.Addr
// Drain notifies the client this ServerTransport stops accepting new RPCs.
Drain()
// IncrMsgSent increments the number of message sent through this transport.
IncrMsgSent()
// IncrMsgRecv increments the number of message received through this transport.
IncrMsgRecv()
}
// connectionErrorf creates an ConnectionError with the specified error description.
func connectionErrorf(temp bool, e error, format string, a ...interface{}) ConnectionError {
return ConnectionError{
Desc: fmt.Sprintf(format, a...),
temp: temp,
err: e,
}
}
// ConnectionError is an error that results in the termination of the
// entire connection and the retry of all the active streams.
type ConnectionError struct {
Desc string
temp bool
err error
}
func (e ConnectionError) Error() string {
return fmt.Sprintf("connection error: desc = %q", e.Desc)
}
// Temporary indicates if this connection error is temporary or fatal.
func (e ConnectionError) Temporary() bool {
return e.temp
}
// Origin returns the original error of this connection error.
func (e ConnectionError) Origin() error {
// Never return nil error here.
// If the original error is nil, return itself.
if e.err == nil {
return e
}
return e.err
}
var (
// ErrConnClosing indicates that the transport is closing.
ErrConnClosing = connectionErrorf(true, nil, "transport is closing")
// errStreamDrain indicates that the stream is rejected because the
// connection is draining. This could be caused by goaway or balancer
// removing the address.
errStreamDrain = status.Error(codes.Unavailable, "the connection is draining")
// errStreamDone is returned from write at the client side to indiacte application
// layer of an error.
errStreamDone = errors.New("the stream is done")
// StatusGoAway indicates that the server sent a GOAWAY that included this
// stream's ID in unprocessed RPCs.
statusGoAway = status.New(codes.Unavailable, "the stream is rejected because server is draining the connection")
)
// GoAwayReason contains the reason for the GoAway frame received.
type GoAwayReason uint8
const (
// GoAwayInvalid indicates that no GoAway frame is received.
GoAwayInvalid GoAwayReason = 0
// GoAwayNoReason is the default value when GoAway frame is received.
GoAwayNoReason GoAwayReason = 1
// GoAwayTooManyPings indicates that a GoAway frame with
// ErrCodeEnhanceYourCalm was received and that the debug data said
// "too_many_pings".
GoAwayTooManyPings GoAwayReason = 2
)
// channelzData is used to store channelz related data for http2Client and http2Server.
// These fields cannot be embedded in the original structs (e.g. http2Client), since to do atomic
// operation on int64 variable on 32-bit machine, user is responsible to enforce memory alignment.
// Here, by grouping those int64 fields inside a struct, we are enforcing the alignment.
type channelzData struct {
kpCount int64
// The number of streams that have started, including already finished ones.
streamsStarted int64
// Client side: The number of streams that have ended successfully by receiving
// EoS bit set frame from server.
// Server side: The number of streams that have ended successfully by sending
// frame with EoS bit set.
streamsSucceeded int64
streamsFailed int64
// lastStreamCreatedTime stores the timestamp that the last stream gets created. It is of int64 type
// instead of time.Time since it's more costly to atomically update time.Time variable than int64
// variable. The same goes for lastMsgSentTime and lastMsgRecvTime.
lastStreamCreatedTime int64
msgSent int64
msgRecv int64
lastMsgSentTime int64
lastMsgRecvTime int64
}
// ContextErr converts the error from context package into a status error.
func ContextErr(err error) error {
switch err {
case context.DeadlineExceeded:
return status.Error(codes.DeadlineExceeded, err.Error())
case context.Canceled:
return status.Error(codes.Canceled, err.Error())
}
return status.Errorf(codes.Internal, "Unexpected error from context packet: %v", err)
}
|
{
"pile_set_name": "Github"
}
|
sha256:a0b38a28ba00d7ce8ca04c89613713fcf672628498ee7851210f604b9f1622c7
|
{
"pile_set_name": "Github"
}
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
/**
*
*/
@Deprecated
public class AndQueryParser implements QueryParser {
public static final String NAME = "and";
private final DeprecationLogger deprecationLogger;
@Inject
public AndQueryParser() {
ESLogger logger = Loggers.getLogger(getClass());
deprecationLogger = new DeprecationLogger(logger);
}
@Override
public String[] names() {
return new String[]{NAME};
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
deprecationLogger.deprecated("The [and] query is deprecated, please use a [bool] query instead with [must] clauses.");
XContentParser parser = parseContext.parser();
ArrayList<Query> queries = new ArrayList<>();
boolean queriesFound = false;
String queryName = null;
String currentFieldName = null;
XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.START_ARRAY) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
queriesFound = true;
Query filter = parseContext.parseInnerFilter();
if (filter != null) {
queries.add(filter);
}
}
} else {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
// skip
} else if (token == XContentParser.Token.START_ARRAY) {
if ("filters".equals(currentFieldName)) {
queriesFound = true;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
Query filter = parseContext.parseInnerFilter();
if (filter != null) {
queries.add(filter);
}
}
} else {
queriesFound = true;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
Query filter = parseContext.parseInnerFilter();
if (filter != null) {
queries.add(filter);
}
}
}
} else if (token.isValue()) {
if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else {
throw new QueryParsingException(parseContext, "[and] query does not support [" + currentFieldName + "]");
}
}
}
}
if (!queriesFound) {
throw new QueryParsingException(parseContext, "[and] query requires 'filters' to be set on it'");
}
if (queries.isEmpty()) {
// no filters provided, this should be ignored upstream
return null;
}
BooleanQuery.Builder queryBuilder = new BooleanQuery.Builder();
for (Query f : queries) {
queryBuilder.add(f, Occur.MUST);
}
BooleanQuery query = queryBuilder.build();
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
}
return query;
}
}
|
{
"pile_set_name": "Github"
}
|
For a rundown of changes per QuantLibAddin release please visit
http://www.quantlibaddin.org/history.html
|
{
"pile_set_name": "Github"
}
|
[HEADER]
[CHROME]
[NAV_PRE_POST]
[$~MESSAGE~$]<br>
<table border=0 cellpadding=2 cellspacing=0 width='100%'>
[CONTENTS]
</table><br>
[NAV_PRE_POST]
[FOOTER]
|
{
"pile_set_name": "Github"
}
|
package am2.items;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.item.ItemStack;
public class InventoryKeyStone implements IInventory{
public static int inventorySize = 3;
private ItemStack[] inventoryItems;
public InventoryKeyStone(){
inventoryItems = new ItemStack[inventorySize];
}
public void SetInventoryContents(ItemStack[] inventoryContents){
int loops = Math.min(inventorySize, inventoryContents.length);
for (int i = 0; i < loops; ++i){
inventoryItems[i] = inventoryContents[i];
}
}
@Override
public int getSizeInventory(){
return inventorySize;
}
@Override
public ItemStack getStackInSlot(int i){
if (i < 0 || i > inventoryItems.length - 1){
return null;
}
return inventoryItems[i];
}
@Override
public ItemStack decrStackSize(int i, int j){
if (inventoryItems[i] != null){
if (inventoryItems[i].stackSize <= j){
ItemStack itemstack = inventoryItems[i];
inventoryItems[i] = null;
return itemstack;
}
ItemStack itemstack1 = inventoryItems[i].splitStack(j);
if (inventoryItems[i].stackSize == 0){
inventoryItems[i] = null;
}
return itemstack1;
}else{
return null;
}
}
@Override
public void setInventorySlotContents(int i, ItemStack itemstack){
inventoryItems[i] = itemstack;
}
@Override
public String getInventoryName(){
return "Keystone";
}
@Override
public int getInventoryStackLimit(){
return 1;
}
@Override
public boolean isUseableByPlayer(EntityPlayer entityplayer){
return true;
}
@Override
public void openInventory(){
}
@Override
public void closeInventory(){
}
public ItemStack[] GetInventoryContents(){
return inventoryItems;
}
@Override
public ItemStack getStackInSlotOnClosing(int i){
if (inventoryItems[i] != null){
ItemStack itemstack = inventoryItems[i];
inventoryItems[i] = null;
return itemstack;
}else{
return null;
}
}
@Override
public boolean hasCustomInventoryName(){
return false;
}
@Override
public boolean isItemValidForSlot(int i, ItemStack itemstack){
return false;
}
@Override
public void markDirty(){
}
}
|
{
"pile_set_name": "Github"
}
|
<?php
/**
* This file has been @generated by a phing task by {@link GeneratePhonePrefixData}.
* See [README.md](README.md#generating-data) for more information.
*
* Pull requests changing data in these files will not be accepted. See the
* [FAQ in the README](README.md#problems-with-invalid-numbers] on how to make
* metadata changes.
*
* Do not modify this file directly!
*/
return array (
52231 => 'Teteles/Teziutlán, PUE',
52232 => 'La Vigueta/Martínez de la Torre, VER',
52238 => 'Santiago Miahuatlán/Tehuacán, PUE',
52284 => 'Ángel Rosario Cabada/Lerdo de Tejada, VER',
52314 => 'Manzanillo/Peña Colorada, COL',
52317 => 'Autlán/El Chante, JAL',
52328 => 'Michoacán',
52341 => 'Ciudad Guzmán, JAL',
52342 => 'Gómez Farías/Sayula, JAL',
52351 => 'Ario de Rayón/Zamora, MICH',
52353 => 'Michoacán',
52354 => 'Michoacán',
52355 => 'Michoacán',
52356 => 'Tanhuato/Yurécuaro, MICH',
52358 => 'Tamazula/Zapotiltic, JAL',
52383 => 'Michoacán',
52392 => 'Jamay/Ocotlán, JAL',
52418 => 'Dolores Hidalgo/San Diego de la Unión, GTO',
52422 => 'Michoacán',
52423 => 'Michoacán',
52424 => 'Michoacán',
52425 => 'Michoacán',
52426 => 'Michoacán',
52427 => 'México/Quintana Roo',
52431 => 'Jalostotitlán/Villa Obregón, JAL',
52434 => 'Michoacán',
52438 => 'Michoacán',
52441 => 'Querétaro',
52442 => 'Querétaro',
52443 => 'Morelia/Tarímbaro, MICH',
52444 => 'San Luis Potosí, SLP',
52447 => 'Contepec/Maravatío, MICH',
52448 => 'Querétaro',
52449 => 'Aguascalientes/Jesús María, AGS',
52451 => 'Michoacán',
52453 => 'Apatzingán, MICH',
52454 => 'Michoacán',
52455 => 'Michoacán',
52459 => 'Michoacán',
52469 => 'Buenavista de Cortés/Pénjamo, GTO',
52475 => 'Bajío de San José/Encarnación de Diaz, JAL',
52476 => 'San Francisco del Rincón, GTO',
52477 => 'León, GTO',
5248 => 'San Luis Potosí',
52481 => '',
52494 => 'Jerez de García Salinas, ZAC',
5255 => 'Ciudad de México, CDMX',
52632 => 'Ímuris/Magdalena, SON',
52641 => 'Benjamín Hill/Santa Ana, SON',
52653 => 'Luis B. Sánchez/San Luis Río Colorado, SON',
52711 => 'México/Michoacán',
52715 => 'Michoacán',
52719 => 'San Francisco Xonacatlán/Temoaya, MEX',
52725 => 'Almoloya de Juárez/Santa María del Monte, MEX',
52728 => 'Lerma/Santa María Atarasquillo, MEX',
52753 => 'Michoacán',
52756 => 'Chilapa/Olinalá, GRO',
52765 => 'Álamo Temapache/Alazán/Potrero del Llano, VER',
52766 => 'Gutiérrez Zamora/Tecolutla, VER',
52771 => 'Pachuca/Real del Monte, HGO',
52781 => 'Coyuca de Benítez/San Jerónimo de Juárez, GRO',
52791 => 'Ciudad Sahagún, HGO',
52823 => 'Nuevo León',
52825 => 'Nuevo León',
52826 => 'Nuevo León',
52829 => 'Nuevo León',
52866 => 'Castaños/Monclova, COAH',
52867 => 'Nuevo León/Tamaulipas',
52869 => 'Cuatro Ciénegas/San Buenaventura, COAH',
52873 => 'Nuevo León',
52877 => 'Ciudad Acuña, COAH',
52892 => 'Nuevo León',
52921 => 'Coatzacoalcos/Ixhuatlán del Sureste, VER',
52937 => 'Cárdenas, TAB',
52938 => 'Ciudad del Carmen, CAMP',
52966 => 'Arriaga/Tonalá, CHIS',
52967 => 'San Cristóbal de las Casas, CHIS',
52985 => 'Yucatán',
52986 => 'Yucatán',
52988 => 'Yucatán',
52991 => 'Yucatán',
52995 => 'Magdalena Tequisistlán/Santa Maria Jalapa del Marqués, OAX',
52997 => 'Yucatán',
52999 => 'Conkal/Mérida, YUC',
);
|
{
"pile_set_name": "Github"
}
|
package com.vladsch.flexmark.ext.xwiki.macros;
import com.vladsch.flexmark.util.ast.VisitHandler;
public class MacroVisitorExt {
public static <V extends MacroVisitor> VisitHandler<?>[] VISIT_HANDLERS(V visitor) {
return new VisitHandler<?>[] {
new VisitHandler<>(Macro.class, visitor::visit),
new VisitHandler<>(MacroClose.class, visitor::visit),
new VisitHandler<>(MacroBlock.class, visitor::visit),
};
}
}
|
{
"pile_set_name": "Github"
}
|
// Copyright 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "xrtl/port/common/gfx/es3/egl_strings.h"
#include <glad/glad_egl.h>
namespace xrtl {
namespace gfx {
namespace es3 {
const char* GetEglErrorName(int error) {
switch (error) {
case EGL_SUCCESS:
return "EGL_SUCCESS";
case EGL_NOT_INITIALIZED:
return "EGL_NOT_INITIALIZED";
case EGL_BAD_ACCESS:
return "EGL_BAD_ACCESS";
case EGL_BAD_ALLOC:
return "EGL_BAD_ALLOC";
case EGL_BAD_ATTRIBUTE:
return "EGL_BAD_ATTRIBUTE";
case EGL_BAD_CONTEXT:
return "EGL_BAD_CONTEXT";
case EGL_BAD_CONFIG:
return "EGL_BAD_CONFIG";
case EGL_BAD_CURRENT_SURFACE:
return "EGL_BAD_CURRENT_SURFACE";
case EGL_BAD_DISPLAY:
return "EGL_BAD_DISPLAY";
case EGL_BAD_SURFACE:
return "EGL_BAD_SURFACE";
case EGL_BAD_MATCH:
return "EGL_BAD_MATCH";
case EGL_BAD_PARAMETER:
return "EGL_BAD_PARAMETER";
case EGL_BAD_NATIVE_PIXMAP:
return "EGL_BAD_NATIVE_PIXMAP";
case EGL_BAD_NATIVE_WINDOW:
return "EGL_BAD_NATIVE_WINDOW";
case EGL_CONTEXT_LOST:
return "EGL_CONTEXT_LOST";
default:
return "UNKNOWN";
}
}
const char* GetEglErrorDescription(int error) {
switch (error) {
case EGL_SUCCESS:
return "The last function succeeded without error.";
case EGL_NOT_INITIALIZED:
return "EGL is not initialized, or could not be initialized, for the "
"specified EGL display connection.";
case EGL_BAD_ACCESS:
return "EGL cannot access a requested resource (for example a context is "
"bound in another thread).";
case EGL_BAD_ALLOC:
return "EGL failed to allocate resources for the requested operation.";
case EGL_BAD_ATTRIBUTE:
return "An unrecognized attribute or attribute value was passed in the "
"attribute list.";
case EGL_BAD_CONTEXT:
return "An EGLContext argument does not name a valid EGL rendering "
"context.";
case EGL_BAD_CONFIG:
return "An EGLConfig argument does not name a valid EGL frame buffer "
"configuration.";
case EGL_BAD_CURRENT_SURFACE:
return "The current surface of the calling thread is a window, pixel "
"buffer or pixmap that is no longer valid.";
case EGL_BAD_DISPLAY:
return "An EGLDisplay argument does not name a valid EGL display "
"connection.";
case EGL_BAD_SURFACE:
return "An EGLSurface argument does not name a valid surface (window, "
"pixel buffer or pixmap) configured for GL rendering.";
case EGL_BAD_MATCH:
return "Arguments are inconsistent (for example, a valid context "
"requires buffers not supplied by a valid surface).";
case EGL_BAD_PARAMETER:
return "One or more argument values are invalid.";
case EGL_BAD_NATIVE_PIXMAP:
return "A NativePixmapType argument does not refer to a valid native "
"pixmap.";
case EGL_BAD_NATIVE_WINDOW:
return "A NativeWindowType argument does not refer to a valid native "
"window.";
case EGL_CONTEXT_LOST:
return "A power management event has occurred. The application must "
"destroy all contexts and reinitialise OpenGL ES state and "
"objects to continue rendering.";
default:
return "An unknown error occurred.";
}
}
} // namespace es3
} // namespace gfx
} // namespace xrtl
|
{
"pile_set_name": "Github"
}
|
#import "headers/headers.h"
@interface NCLookViewBackdropViewSettings : _UIBackdropViewSettings
@property (getter=_isBlurred,nonatomic,readonly) BOOL blurred;
@property (getter=_isDarkened,nonatomic,readonly) BOOL darkened;
+(id)lookViewBackdropViewSettingsWithBlur:(BOOL)arg1 darken:(BOOL)arg2 ;
+(id)lookViewBackdropViewSettingsWithBlur:(BOOL)arg1 ;
-(void)setDefaultValues;
-(BOOL)_isDarkened;
-(BOOL)_isBlurred;
@end
@interface NCShortLookView : UIView
@end
@interface NCNotificationShortLookView : UIView
@end
@interface NCLookHeaderContentView : UIView
@end
@interface NCNotificationContentView : UIView
@end
@interface MPUControlCenterTransportButton : UIButton
- (void)_updateEffectForStateChange:(NSUInteger)state;
@end
@interface CCUIControlCenterSlider : UIView
@end
@interface CCUIControlCenterPagePlatterView : UIView
@end
@interface _UIBackdropViewSettingsATVAdaptiveLighten : _UIBackdropViewSettings
@end
@interface OYGBackdropViewSettingsBlurred : _UIBackdropViewSettingsATVAdaptiveLighten
@end
@interface OYGBackdropViewSettings : _UIBackdropViewSettingsATVAdaptiveLighten
@end
@interface WGShortLookStyleButton : UIButton
@end
@interface CALayer (OGY)
@property (nonatomic, retain) NSArray *disabledFilters;
@property (nonatomic, assign) BOOL isDarkModeEnabled;
@property (nonatomic, assign) BOOL hasChangeListener;
@property (nonatomic, retain) UIColor *correctContentsMultiplyColor;
@property (nonatomic, retain) UIColor *substitutedContentsMultiplyColor;
@property (nonatomic, assign) BOOL isCheckingDarkMode;
- (void)setDarkModeEnabled:(BOOL)enabled;
- (void)reloadFilters;
@end
@interface UILabel (OYG)
@property (nonatomic, retain) UIColor *correctTextColor;
@property (nonatomic, retain) UIColor *substitutedTextColor;
- (void)setDarkModeEnabled:(BOOL)enabled;
@end
@interface UIView (OYG)
@property (nonatomic, assign) BOOL isDarkModeEnabled;
@property (nonatomic, assign) BOOL hasChangeListener;
@property (nonatomic, assign) BOOL darkModeChangeInProgress;
@property (nonatomic, retain) UIColor *correctBackgroundColor;
@property (nonatomic, retain) UIColor *substitutedBackgroundColor;
- (void)setDarkModeEnabled:(BOOL)enabled;
-(void)_setMaskView:(id)arg1 ;
@end
@interface _UIBackdropView (OYG)
- (void)setDarkModeEnabled:(BOOL)enabled;
- (void)darkModeChanged:(NSNotification *)notification;
@end
@interface UIImageView (OYG)
- (void)behaveAsWhiteLayerView;
@property (nonatomic, assign) BOOL isWhiteLayerView;
@property (nonatomic, retain) UIView *contentsMultiplyView;
@end
@interface NCMaterialView : UIView
@property (nonatomic, retain) _UIBackdropView *substitutedBackdropView;
+(id)materialViewWithStyleOptions:(NSUInteger)arg1;
-(void)_setSubviewsContinuousCornerRadius:(CGFloat)arg1 ;
@end
@interface CAFilter (OYG)
@property (nonatomic, assign) BOOL isDarkModeFilter;
@end
@interface UIActivityIndicatorView (OYG)
@property (nonatomic, retain) UIColor *correctSpinnerColor;
@property (nonatomic, retain) UIColor *substitutedSpinnerColor;
@end
@interface NCNotificationTextInputView : UIView
@end
@interface CAShapeLayer (OYG)
@property (nonatomic, retain) UIColor *correctFillColor;
@property (nonatomic, retain) UIColor *substitutedFillColor;
@end
@interface SBTestDataProvider (FNT)
@property (nonatomic, retain) NSString *sectionIdentifierReplacement;
@end
@interface _UIBackdropViewSettingsATVDark : _UIBackdropViewSettings
@end
@interface _UIBackdropViewSettingsATVAccessoryLight : _UIBackdropViewSettings
-(void)setDefaultValues;
@end
extern NSString * const kCAFilterVibrantDark;
|
{
"pile_set_name": "Github"
}
|
#!/usr/bin/env bash
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn ( ) {
echo "$*"
}
die ( ) {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
esac
# For Cygwin, ensure paths are in UNIX format before anything is touched.
if $cygwin ; then
[ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
fi
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >&-
APP_HOME="`pwd -P`"
cd "$SAVED" >&-
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
function splitJvmOpts() {
JVM_OPTS=("$@")
}
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
|
{
"pile_set_name": "Github"
}
|
{
"extends": "./tsconfig",
"compilerOptions": {
"module": "commonjs",
"declaration": false,
"declarationMap": false,
"declarationDir": null,
"outDir": "dist/cjs",
"composite": false,
"incremental": true,
"tsBuildInfoFile": "../../.tsc-cache/packages__composite-checkout--cjs"
}
}
|
{
"pile_set_name": "Github"
}
|
---
# Display name
title: 吳恩達
# Username (this should match the folder name)
authors:
- 吳恩達
# Is this the primary user of the site?
superuser: false
# Role/position
role: Professor of Artificial Intelligence
# Organizations/Affiliations
organizations:
- name: Stanford University
url: ""
# Short bio (displayed in user profile at end of posts)
bio: My research interests include distributed robotics, mobile computing and programmable matter.
interests:
- Artificial Intelligence
- Computational Linguistics
- Information Retrieval
education:
courses:
- course: PhD in Artificial Intelligence
institution: Stanford University
year: 2012
- course: MEng in Artificial Intelligence
institution: Massachusetts Institute of Technology
year: 2009
- course: BSc in Artificial Intelligence
institution: Massachusetts Institute of Technology
year: 2008
# Social/Academic Networking
# For available icons, see: https://sourcethemes.com/academic/docs/page-builder/#icons
# For an email link, use "fas" icon pack, "envelope" icon, and a link in the
# form "mailto:[email protected]" or "#contact" for contact widget.
social:
- icon: envelope
icon_pack: fas
link: '#contact' # For a direct email link, use "mailto:[email protected]".
- icon: twitter
icon_pack: fab
link: https://twitter.com/GeorgeCushen
- icon: google-scholar
icon_pack: ai
link: https://scholar.google.co.uk/citations?user=sIwtMXoAAAAJ
- icon: github
icon_pack: fab
link: https://github.com/gcushen
# Link to a PDF of your resume/CV from the About widget.
# To enable, copy your resume/CV to `static/files/cv.pdf` and uncomment the lines below.
# - icon: cv
# icon_pack: ai
# link: files/cv.pdf
# Enter email to display Gravatar (if Gravatar enabled in Config)
email: ""
# Organizational groups that you belong to (for People widget)
# Set this to `[]` or comment out if you are not using People widget.
user_groups:
- Researchers
- Visitors
---
吳恩達 is a professor of artificial intelligence at the Stanford AI Lab. His research interests include distributed robotics, mobile computing and programmable matter. He leads the Robotic Neurobiology group, which develops self-reconfiguring robots, systems of self-organizing robots, and mobile sensor networks.
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed neque elit, tristique placerat feugiat ac, facilisis vitae arcu. Proin eget egestas augue. Praesent ut sem nec arcu pellentesque aliquet. Duis dapibus diam vel metus tempus vulputate.
|
{
"pile_set_name": "Github"
}
|
/Area.php////*///
/Line.php////*///
|
{
"pile_set_name": "Github"
}
|
/* +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
* -------------------------------------------------------------------------- *
* Lepton *
* -------------------------------------------------------------------------- *
* This is part of the Lepton expression parser originating from *
* Simbios, the NIH National Center for Physics-Based Simulation of *
* Biological Structures at Stanford, funded under the NIH Roadmap for *
* Medical Research, grant U54 GM072970. See https://simtk.org. *
* *
* Portions copyright (c) 2013-2016 Stanford University and the Authors. *
* Authors: Peter Eastman *
* Contributors: *
* *
* Permission is hereby granted, free of charge, to any person obtaining a *
* copy of this software and associated documentation files (the "Software"), *
* to deal in the Software without restriction, including without limitation *
* the rights to use, copy, modify, merge, publish, distribute, sublicense, *
* and/or sell copies of the Software, and to permit persons to whom the *
* Software is furnished to do so, subject to the following conditions: *
* *
* The above copyright notice and this permission notice shall be included in *
* all copies or substantial portions of the Software. *
* *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR *
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *
* THE AUTHORS, CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, *
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR *
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE *
* USE OR OTHER DEALINGS IN THE SOFTWARE. *
* -------------------------------------------------------------------------- *
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ */
/* -------------------------------------------------------------------------- *
* lepton *
* -------------------------------------------------------------------------- *
* This is part of the lepton expression parser originating from *
* Simbios, the NIH National Center for Physics-Based Simulation of *
* Biological Structures at Stanford, funded under the NIH Roadmap for *
* Medical Research, grant U54 GM072970. See https://simtk.org. *
* *
* Portions copyright (c) 2009-2015 Stanford University and the Authors. *
* Authors: Peter Eastman *
* Contributors: *
* *
* Permission is hereby granted, free of charge, to any person obtaining a *
* copy of this software and associated documentation files (the "Software"), *
* to deal in the Software without restriction, including without limitation *
* the rights to use, copy, modify, merge, publish, distribute, sublicense, *
* and/or sell copies of the Software, and to permit persons to whom the *
* Software is furnished to do so, subject to the following conditions: *
* *
* The above copyright notice and this permission notice shall be included in *
* all copies or substantial portions of the Software. *
* *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR *
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *
* THE AUTHORS, CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, *
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR *
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE *
* USE OR OTHER DEALINGS IN THE SOFTWARE. *
* -------------------------------------------------------------------------- */
#include "ExpressionTreeNode.h"
#include "Exception.h"
#include "Operation.h"
namespace PLMD {
using namespace lepton;
using namespace std;
ExpressionTreeNode::ExpressionTreeNode(Operation* operation, const vector<ExpressionTreeNode>& children) : operation(operation), children(children) {
if (operation->getNumArguments() != children.size())
throw Exception("wrong number of arguments to function: "+operation->getName());
}
ExpressionTreeNode::ExpressionTreeNode(Operation* operation, const ExpressionTreeNode& child1, const ExpressionTreeNode& child2) : operation(operation) {
children.push_back(child1);
children.push_back(child2);
if (operation->getNumArguments() != children.size())
throw Exception("wrong number of arguments to function: "+operation->getName());
}
ExpressionTreeNode::ExpressionTreeNode(Operation* operation, const ExpressionTreeNode& child) : operation(operation) {
children.push_back(child);
if (operation->getNumArguments() != children.size())
throw Exception("wrong number of arguments to function: "+operation->getName());
}
ExpressionTreeNode::ExpressionTreeNode(Operation* operation) : operation(operation) {
if (operation->getNumArguments() != children.size())
throw Exception("wrong number of arguments to function: "+operation->getName());
}
ExpressionTreeNode::ExpressionTreeNode(const ExpressionTreeNode& node) : operation(node.operation == NULL ? NULL : node.operation->clone()), children(node.getChildren()) {
}
ExpressionTreeNode::ExpressionTreeNode() : operation(NULL) {
}
ExpressionTreeNode::~ExpressionTreeNode() {
if (operation != NULL)
delete operation;
}
bool ExpressionTreeNode::operator!=(const ExpressionTreeNode& node) const {
if (node.getOperation() != getOperation())
return true;
if (getOperation().isSymmetric() && getChildren().size() == 2) {
if (getChildren()[0] == node.getChildren()[0] && getChildren()[1] == node.getChildren()[1])
return false;
if (getChildren()[0] == node.getChildren()[1] && getChildren()[1] == node.getChildren()[0])
return false;
return true;
}
for (int i = 0; i < (int) getChildren().size(); i++)
if (getChildren()[i] != node.getChildren()[i])
return true;
return false;
}
bool ExpressionTreeNode::operator==(const ExpressionTreeNode& node) const {
return !(*this != node);
}
ExpressionTreeNode& ExpressionTreeNode::operator=(const ExpressionTreeNode& node) {
if (operation != NULL)
delete operation;
operation = node.getOperation().clone();
children = node.getChildren();
return *this;
}
const Operation& ExpressionTreeNode::getOperation() const {
return *operation;
}
const vector<ExpressionTreeNode>& ExpressionTreeNode::getChildren() const {
return children;
}
}
|
{
"pile_set_name": "Github"
}
|
// http://goo.gl/XkBrjD
var $export = require('./$.export')
, $values = require('./$.object-to-array')(false);
$export($export.S, 'Object', {
values: function values(it){
return $values(it);
}
});
|
{
"pile_set_name": "Github"
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.