text
stringlengths 2
100k
| meta
dict |
---|---|
.DirectoryWrapper {
display: grid;
width: 100%;
height: 100%;
.splitPaneWrapper {
grid-area: 2 / 1 / 101 / 2;
position: relative;
.scrollAreaWrapper {
width: 100%;
display: grid;
align-content: start;
.scrollarea {
overflow: hidden;
position: relative;
height: 100%;
.scrollarea-content {
position: relative;
}
.scrollbar-container {
position: absolute;
background: inherit;
width: 0.9em;
&vertical .scrollbar {
width: 100%;
}
}
}
.scrollbar-container {
background-color: inherit;
&.vertical {
right: 0;
top: 0;
}
}
}
}
}
| {
"pile_set_name": "Github"
} |
/*
* pata_oldpiix.c - Intel PATA/SATA controllers
*
* (C) 2005 Red Hat
*
* Some parts based on ata_piix.c by Jeff Garzik and others.
*
* Early PIIX differs significantly from the later PIIX as it lacks
* SITRE and the slave timing registers. This means that you have to
* set timing per channel, or be clever. Libata tells us whenever it
* does drive selection and we use this to reload the timings.
*
* Because of these behaviour differences PIIX gets its own driver module.
*/
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/pci.h>
#include <linux/init.h>
#include <linux/blkdev.h>
#include <linux/delay.h>
#include <linux/device.h>
#include <scsi/scsi_host.h>
#include <linux/libata.h>
#include <linux/ata.h>
#define DRV_NAME "pata_oldpiix"
#define DRV_VERSION "0.5.5"
/**
* oldpiix_pre_reset - probe begin
* @link: ATA link
* @deadline: deadline jiffies for the operation
*
* Set up cable type and use generic probe init
*/
static int oldpiix_pre_reset(struct ata_link *link, unsigned long deadline)
{
struct ata_port *ap = link->ap;
struct pci_dev *pdev = to_pci_dev(ap->host->dev);
static const struct pci_bits oldpiix_enable_bits[] = {
{ 0x41U, 1U, 0x80UL, 0x80UL }, /* port 0 */
{ 0x43U, 1U, 0x80UL, 0x80UL }, /* port 1 */
};
if (!pci_test_config_bits(pdev, &oldpiix_enable_bits[ap->port_no]))
return -ENOENT;
return ata_sff_prereset(link, deadline);
}
/**
* oldpiix_set_piomode - Initialize host controller PATA PIO timings
* @ap: Port whose timings we are configuring
* @adev: Device whose timings we are configuring
*
* Set PIO mode for device, in host controller PCI config space.
*
* LOCKING:
* None (inherited from caller).
*/
static void oldpiix_set_piomode (struct ata_port *ap, struct ata_device *adev)
{
unsigned int pio = adev->pio_mode - XFER_PIO_0;
struct pci_dev *dev = to_pci_dev(ap->host->dev);
unsigned int idetm_port= ap->port_no ? 0x42 : 0x40;
u16 idetm_data;
int control = 0;
/*
* See Intel Document 298600-004 for the timing programing rules
* for PIIX/ICH. Note that the early PIIX does not have the slave
* timing port at 0x44.
*/
static const /* ISP RTC */
u8 timings[][2] = { { 0, 0 },
{ 0, 0 },
{ 1, 0 },
{ 2, 1 },
{ 2, 3 }, };
if (pio > 1)
control |= 1; /* TIME */
if (ata_pio_need_iordy(adev))
control |= 2; /* IE */
/* Intel specifies that the prefetch/posting is for disk only */
if (adev->class == ATA_DEV_ATA)
control |= 4; /* PPE */
pci_read_config_word(dev, idetm_port, &idetm_data);
/*
* Set PPE, IE and TIME as appropriate.
* Clear the other drive's timing bits.
*/
if (adev->devno == 0) {
idetm_data &= 0xCCE0;
idetm_data |= control;
} else {
idetm_data &= 0xCC0E;
idetm_data |= (control << 4);
}
idetm_data |= (timings[pio][0] << 12) |
(timings[pio][1] << 8);
pci_write_config_word(dev, idetm_port, idetm_data);
/* Track which port is configured */
ap->private_data = adev;
}
/**
* oldpiix_set_dmamode - Initialize host controller PATA DMA timings
* @ap: Port whose timings we are configuring
* @adev: Device to program
*
* Set MWDMA mode for device, in host controller PCI config space.
*
* LOCKING:
* None (inherited from caller).
*/
static void oldpiix_set_dmamode (struct ata_port *ap, struct ata_device *adev)
{
struct pci_dev *dev = to_pci_dev(ap->host->dev);
u8 idetm_port = ap->port_no ? 0x42 : 0x40;
u16 idetm_data;
static const /* ISP RTC */
u8 timings[][2] = { { 0, 0 },
{ 0, 0 },
{ 1, 0 },
{ 2, 1 },
{ 2, 3 }, };
/*
* MWDMA is driven by the PIO timings. We must also enable
* IORDY unconditionally along with TIME1. PPE has already
* been set when the PIO timing was set.
*/
unsigned int mwdma = adev->dma_mode - XFER_MW_DMA_0;
unsigned int control;
const unsigned int needed_pio[3] = {
XFER_PIO_0, XFER_PIO_3, XFER_PIO_4
};
int pio = needed_pio[mwdma] - XFER_PIO_0;
pci_read_config_word(dev, idetm_port, &idetm_data);
control = 3; /* IORDY|TIME0 */
/* Intel specifies that the PPE functionality is for disk only */
if (adev->class == ATA_DEV_ATA)
control |= 4; /* PPE enable */
/* If the drive MWDMA is faster than it can do PIO then
we must force PIO into PIO0 */
if (adev->pio_mode < needed_pio[mwdma])
/* Enable DMA timing only */
control |= 8; /* PIO cycles in PIO0 */
/* Mask out the relevant control and timing bits we will load. Also
clear the other drive TIME register as a precaution */
if (adev->devno == 0) {
idetm_data &= 0xCCE0;
idetm_data |= control;
} else {
idetm_data &= 0xCC0E;
idetm_data |= (control << 4);
}
idetm_data |= (timings[pio][0] << 12) | (timings[pio][1] << 8);
pci_write_config_word(dev, idetm_port, idetm_data);
/* Track which port is configured */
ap->private_data = adev;
}
/**
* oldpiix_qc_issue - command issue
* @qc: command pending
*
* Called when the libata layer is about to issue a command. We wrap
* this interface so that we can load the correct ATA timings if
* necessary. Our logic also clears TIME0/TIME1 for the other device so
* that, even if we get this wrong, cycles to the other device will
* be made PIO0.
*/
static unsigned int oldpiix_qc_issue(struct ata_queued_cmd *qc)
{
struct ata_port *ap = qc->ap;
struct ata_device *adev = qc->dev;
if (adev != ap->private_data) {
oldpiix_set_piomode(ap, adev);
if (ata_dma_enabled(adev))
oldpiix_set_dmamode(ap, adev);
}
return ata_bmdma_qc_issue(qc);
}
static struct scsi_host_template oldpiix_sht = {
ATA_BMDMA_SHT(DRV_NAME),
};
static struct ata_port_operations oldpiix_pata_ops = {
.inherits = &ata_bmdma_port_ops,
.qc_issue = oldpiix_qc_issue,
.cable_detect = ata_cable_40wire,
.set_piomode = oldpiix_set_piomode,
.set_dmamode = oldpiix_set_dmamode,
.prereset = oldpiix_pre_reset,
};
/**
* oldpiix_init_one - Register PIIX ATA PCI device with kernel services
* @pdev: PCI device to register
* @ent: Entry in oldpiix_pci_tbl matching with @pdev
*
* Called from kernel PCI layer. We probe for combined mode (sigh),
* and then hand over control to libata, for it to do the rest.
*
* LOCKING:
* Inherited from PCI layer (may sleep).
*
* RETURNS:
* Zero on success, or -ERRNO value.
*/
static int oldpiix_init_one (struct pci_dev *pdev, const struct pci_device_id *ent)
{
static int printed_version;
static const struct ata_port_info info = {
.flags = ATA_FLAG_SLAVE_POSS,
.pio_mask = ATA_PIO4,
.mwdma_mask = ATA_MWDMA12_ONLY,
.port_ops = &oldpiix_pata_ops,
};
const struct ata_port_info *ppi[] = { &info, NULL };
if (!printed_version++)
dev_printk(KERN_DEBUG, &pdev->dev,
"version " DRV_VERSION "\n");
return ata_pci_bmdma_init_one(pdev, ppi, &oldpiix_sht, NULL, 0);
}
static const struct pci_device_id oldpiix_pci_tbl[] = {
{ PCI_VDEVICE(INTEL, 0x1230), },
{ } /* terminate list */
};
static struct pci_driver oldpiix_pci_driver = {
.name = DRV_NAME,
.id_table = oldpiix_pci_tbl,
.probe = oldpiix_init_one,
.remove = ata_pci_remove_one,
#ifdef CONFIG_PM
.suspend = ata_pci_device_suspend,
.resume = ata_pci_device_resume,
#endif
};
static int __init oldpiix_init(void)
{
return pci_register_driver(&oldpiix_pci_driver);
}
static void __exit oldpiix_exit(void)
{
pci_unregister_driver(&oldpiix_pci_driver);
}
module_init(oldpiix_init);
module_exit(oldpiix_exit);
MODULE_AUTHOR("Alan Cox");
MODULE_DESCRIPTION("SCSI low-level driver for early PIIX series controllers");
MODULE_LICENSE("GPL");
MODULE_DEVICE_TABLE(pci, oldpiix_pci_tbl);
MODULE_VERSION(DRV_VERSION);
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:">
</FileRef>
</Workspace>
| {
"pile_set_name": "Github"
} |
---
layout: base
title: 'Statistics of case in UD_Polish-LFG'
udver: '2'
---
## Treebank Statistics: UD_Polish-LFG: Relations: `case`
This relation is universal.
11106 nodes (8%) are attached to their parents as `case`.
11077 instances of `case` (100%) are right-to-left (child precedes parent).
Average distance between parent and child is 1.27174500270124.
The following 6 pairs of parts of speech are connected with `case`: <tt><a href="pl_lfg-pos-NOUN.html">NOUN</a></tt>-<tt><a href="pl_lfg-pos-ADP.html">ADP</a></tt> (8536; 77% instances), <tt><a href="pl_lfg-pos-PRON.html">PRON</a></tt>-<tt><a href="pl_lfg-pos-ADP.html">ADP</a></tt> (1260; 11% instances), <tt><a href="pl_lfg-pos-PROPN.html">PROPN</a></tt>-<tt><a href="pl_lfg-pos-ADP.html">ADP</a></tt> (1038; 9% instances), <tt><a href="pl_lfg-pos-ADJ.html">ADJ</a></tt>-<tt><a href="pl_lfg-pos-ADP.html">ADP</a></tt> (200; 2% instances), <tt><a href="pl_lfg-pos-DET.html">DET</a></tt>-<tt><a href="pl_lfg-pos-ADP.html">ADP</a></tt> (60; 1% instances), <tt><a href="pl_lfg-pos-NUM.html">NUM</a></tt>-<tt><a href="pl_lfg-pos-ADP.html">ADP</a></tt> (12; 0% instances).
~~~ conllu
# visual-style 5 bgColor:blue
# visual-style 5 fgColor:white
# visual-style 7 bgColor:blue
# visual-style 7 fgColor:white
# visual-style 7 5 case color:blue
1 A a CCONJ conj _ 4 cc 4:cc _
2 Janio Janio PROPN subst:sg:nom:m1 Case=Nom|Gender=Masc|Number=Sing|SubGender=Masc1 4 nsubj 4:nsubj _
3 tymczasem tymczasem ADV adv _ 4 advmod 4:advmod _
4 pracował pracować VERB praet:sg:m1:imperf Aspect=Imp|Gender=Masc|Mood=Ind|Number=Sing|SubGender=Masc1|Tense=Past|VerbForm=Fin|Voice=Act 0 root 0:root _
5 w w ADP prep:loc:nwok AdpType=Prep|Variant=Short 7 case 7:case Case=Loc
6 swoim swój DET adj:sg:loc:m3:pos Case=Loc|Gender=Masc|Number=Sing|Poss=Yes|PronType=Prs|Reflex=Yes|SubGender=Masc3 7 det 7:det _
7 gabinecie gabinet NOUN subst:sg:loc:m3 Case=Loc|Gender=Masc|Number=Sing|SubGender=Masc3 4 obl 4:obl:w SpaceAfter=No
8 . . PUNCT interp PunctType=Peri 4 punct 4:punct _
~~~
~~~ conllu
# visual-style 4 bgColor:blue
# visual-style 4 fgColor:white
# visual-style 5 bgColor:blue
# visual-style 5 fgColor:white
# visual-style 5 4 case color:blue
1 Ale ale CCONJ conj _ 2 cc 2:cc _
2 było być VERB praet:sg:n:imperf Aspect=Imp|Gender=Neut|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Act 0 root 0:root _
3 mu on PRON ppron3:sg:dat:m1:ter:nakc:npraep Case=Dat|Gender=Masc|Number=Sing|Person=3|PrepCase=Npr|PronType=Prs|SubGender=Masc1|Variant=Short 2 iobj 2:iobj _
4 z z ADP prep:inst:nwok AdpType=Prep|Variant=Short 5 case 5:case Case=Ins
5 tym to PRON subst:sg:inst:n Case=Ins|Gender=Neut|Number=Sing|PronType=Dem 2 obl 2:obl:z _
6 dobrze dobrze ADV adv:pos Degree=Pos 2 advmod 2:advmod SpaceAfter=No
7 . . PUNCT interp PunctType=Peri 2 punct 2:punct _
~~~
~~~ conllu
# visual-style 4 bgColor:blue
# visual-style 4 fgColor:white
# visual-style 5 bgColor:blue
# visual-style 5 fgColor:white
# visual-style 5 4 case color:blue
1 Bandyci bandyta NOUN subst:pl:nom:m1 Case=Nom|Gender=Masc|Number=Plur|SubGender=Masc1 2 nsubj 2:nsubj _
2 wsadzili wsadzić VERB praet:pl:m1:perf Aspect=Perf|Gender=Masc|Mood=Ind|Number=Plur|SubGender=Masc1|Tense=Past|VerbForm=Fin|Voice=Act 0 root 0:root _
3 ją on PRON ppron3:sg:acc:f:ter:akc:npraep Case=Acc|Gender=Fem|Number=Sing|Person=3|PrepCase=Npr|PronType=Prs|Variant=Long 2 obj 2:obj _
4 w w ADP prep:loc:nwok AdpType=Prep|Variant=Short 5 case 5:case Case=Loc
5 Szczecinku Szczecinek PROPN subst:sg:loc:m3 Case=Loc|Gender=Masc|Number=Sing|SubGender=Masc3 2 obl 2:obl:w _
6 do do ADP prep:gen AdpType=Prep 7 case 7:case Case=Gen
7 pociągu pociąg NOUN subst:sg:gen:m3 Case=Gen|Gender=Masc|Number=Sing|SubGender=Masc3 2 obl 2:obl:do SpaceAfter=No
8 . . PUNCT interp PunctType=Peri 2 punct 2:punct _
~~~
| {
"pile_set_name": "Github"
} |
/*
* hdac_i915.c - routines for sync between HD-A core and i915 display driver
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*/
#include <linux/init.h>
#include <linux/module.h>
#include <linux/pci.h>
#include <linux/component.h>
#include <drm/i915_component.h>
#include <sound/core.h>
#include <sound/hdaudio.h>
#include <sound/hda_i915.h>
#include <sound/hda_register.h>
static struct i915_audio_component *hdac_acomp;
/**
* snd_hdac_set_codec_wakeup - Enable / disable HDMI/DP codec wakeup
* @bus: HDA core bus
* @enable: enable or disable the wakeup
*
* This function is supposed to be used only by a HD-audio controller
* driver that needs the interaction with i915 graphics.
*
* This function should be called during the chip reset, also called at
* resume for updating STATESTS register read.
*
* Returns zero for success or a negative error code.
*/
int snd_hdac_set_codec_wakeup(struct hdac_bus *bus, bool enable)
{
struct i915_audio_component *acomp = bus->audio_component;
if (!acomp || !acomp->ops)
return -ENODEV;
if (!acomp->ops->codec_wake_override) {
dev_warn(bus->dev,
"Invalid codec wake callback\n");
return 0;
}
dev_dbg(bus->dev, "%s codec wakeup\n",
enable ? "enable" : "disable");
acomp->ops->codec_wake_override(acomp->dev, enable);
return 0;
}
EXPORT_SYMBOL_GPL(snd_hdac_set_codec_wakeup);
/**
* snd_hdac_display_power - Power up / down the power refcount
* @bus: HDA core bus
* @enable: power up or down
*
* This function is supposed to be used only by a HD-audio controller
* driver that needs the interaction with i915 graphics.
*
* This function manages a refcount and calls the i915 get_power() and
* put_power() ops accordingly, toggling the codec wakeup, too.
*
* Returns zero for success or a negative error code.
*/
int snd_hdac_display_power(struct hdac_bus *bus, bool enable)
{
struct i915_audio_component *acomp = bus->audio_component;
if (!acomp || !acomp->ops)
return -ENODEV;
dev_dbg(bus->dev, "display power %s\n",
enable ? "enable" : "disable");
if (enable) {
if (!bus->i915_power_refcount++) {
acomp->ops->get_power(acomp->dev);
snd_hdac_set_codec_wakeup(bus, true);
snd_hdac_set_codec_wakeup(bus, false);
}
} else {
WARN_ON(!bus->i915_power_refcount);
if (!--bus->i915_power_refcount)
acomp->ops->put_power(acomp->dev);
}
return 0;
}
EXPORT_SYMBOL_GPL(snd_hdac_display_power);
#define CONTROLLER_IN_GPU(pci) (((pci)->device == 0x0a0c) || \
((pci)->device == 0x0c0c) || \
((pci)->device == 0x0d0c) || \
((pci)->device == 0x160c))
/**
* snd_hdac_i915_set_bclk - Reprogram BCLK for HSW/BDW
* @bus: HDA core bus
*
* Intel HSW/BDW display HDA controller is in GPU. Both its power and link BCLK
* depends on GPU. Two Extended Mode registers EM4 (M value) and EM5 (N Value)
* are used to convert CDClk (Core Display Clock) to 24MHz BCLK:
* BCLK = CDCLK * M / N
* The values will be lost when the display power well is disabled and need to
* be restored to avoid abnormal playback speed.
*
* Call this function at initializing and changing power well, as well as
* at ELD notifier for the hotplug.
*/
void snd_hdac_i915_set_bclk(struct hdac_bus *bus)
{
struct i915_audio_component *acomp = bus->audio_component;
struct pci_dev *pci = to_pci_dev(bus->dev);
int cdclk_freq;
unsigned int bclk_m, bclk_n;
if (!acomp || !acomp->ops || !acomp->ops->get_cdclk_freq)
return; /* only for i915 binding */
if (!CONTROLLER_IN_GPU(pci))
return; /* only HSW/BDW */
cdclk_freq = acomp->ops->get_cdclk_freq(acomp->dev);
switch (cdclk_freq) {
case 337500:
bclk_m = 16;
bclk_n = 225;
break;
case 450000:
default: /* default CDCLK 450MHz */
bclk_m = 4;
bclk_n = 75;
break;
case 540000:
bclk_m = 4;
bclk_n = 90;
break;
case 675000:
bclk_m = 8;
bclk_n = 225;
break;
}
snd_hdac_chip_writew(bus, HSW_EM4, bclk_m);
snd_hdac_chip_writew(bus, HSW_EM5, bclk_n);
}
EXPORT_SYMBOL_GPL(snd_hdac_i915_set_bclk);
/* There is a fixed mapping between audio pin node and display port.
* on SNB, IVY, HSW, BSW, SKL, BXT, KBL:
* Pin Widget 5 - PORT B (port = 1 in i915 driver)
* Pin Widget 6 - PORT C (port = 2 in i915 driver)
* Pin Widget 7 - PORT D (port = 3 in i915 driver)
*
* on VLV, ILK:
* Pin Widget 4 - PORT B (port = 1 in i915 driver)
* Pin Widget 5 - PORT C (port = 2 in i915 driver)
* Pin Widget 6 - PORT D (port = 3 in i915 driver)
*/
static int pin2port(struct hdac_device *codec, hda_nid_t pin_nid)
{
int base_nid;
switch (codec->vendor_id) {
case 0x80860054: /* ILK */
case 0x80862804: /* ILK */
case 0x80862882: /* VLV */
base_nid = 3;
break;
default:
base_nid = 4;
break;
}
if (WARN_ON(pin_nid <= base_nid || pin_nid > base_nid + 3))
return -1;
return pin_nid - base_nid;
}
/**
* snd_hdac_sync_audio_rate - Set N/CTS based on the sample rate
* @codec: HDA codec
* @nid: the pin widget NID
* @rate: the sample rate to set
*
* This function is supposed to be used only by a HD-audio controller
* driver that needs the interaction with i915 graphics.
*
* This function sets N/CTS value based on the given sample rate.
* Returns zero for success, or a negative error code.
*/
int snd_hdac_sync_audio_rate(struct hdac_device *codec, hda_nid_t nid, int rate)
{
struct hdac_bus *bus = codec->bus;
struct i915_audio_component *acomp = bus->audio_component;
int port;
if (!acomp || !acomp->ops || !acomp->ops->sync_audio_rate)
return -ENODEV;
port = pin2port(codec, nid);
if (port < 0)
return -EINVAL;
return acomp->ops->sync_audio_rate(acomp->dev, port, rate);
}
EXPORT_SYMBOL_GPL(snd_hdac_sync_audio_rate);
/**
* snd_hdac_acomp_get_eld - Get the audio state and ELD via component
* @codec: HDA codec
* @nid: the pin widget NID
* @audio_enabled: the pointer to store the current audio state
* @buffer: the buffer pointer to store ELD bytes
* @max_bytes: the max bytes to be stored on @buffer
*
* This function is supposed to be used only by a HD-audio controller
* driver that needs the interaction with i915 graphics.
*
* This function queries the current state of the audio on the given
* digital port and fetches the ELD bytes onto the given buffer.
* It returns the number of bytes for the total ELD data, zero for
* invalid ELD, or a negative error code.
*
* The return size is the total bytes required for the whole ELD bytes,
* thus it may be over @max_bytes. If it's over @max_bytes, it implies
* that only a part of ELD bytes have been fetched.
*/
int snd_hdac_acomp_get_eld(struct hdac_device *codec, hda_nid_t nid,
bool *audio_enabled, char *buffer, int max_bytes)
{
struct hdac_bus *bus = codec->bus;
struct i915_audio_component *acomp = bus->audio_component;
int port;
if (!acomp || !acomp->ops || !acomp->ops->get_eld)
return -ENODEV;
port = pin2port(codec, nid);
if (port < 0)
return -EINVAL;
return acomp->ops->get_eld(acomp->dev, port, audio_enabled,
buffer, max_bytes);
}
EXPORT_SYMBOL_GPL(snd_hdac_acomp_get_eld);
static int hdac_component_master_bind(struct device *dev)
{
struct i915_audio_component *acomp = hdac_acomp;
int ret;
ret = component_bind_all(dev, acomp);
if (ret < 0)
return ret;
if (WARN_ON(!(acomp->dev && acomp->ops && acomp->ops->get_power &&
acomp->ops->put_power && acomp->ops->get_cdclk_freq))) {
ret = -EINVAL;
goto out_unbind;
}
/*
* Atm, we don't support dynamic unbinding initiated by the child
* component, so pin its containing module until we unbind.
*/
if (!try_module_get(acomp->ops->owner)) {
ret = -ENODEV;
goto out_unbind;
}
return 0;
out_unbind:
component_unbind_all(dev, acomp);
return ret;
}
static void hdac_component_master_unbind(struct device *dev)
{
struct i915_audio_component *acomp = hdac_acomp;
module_put(acomp->ops->owner);
component_unbind_all(dev, acomp);
WARN_ON(acomp->ops || acomp->dev);
}
static const struct component_master_ops hdac_component_master_ops = {
.bind = hdac_component_master_bind,
.unbind = hdac_component_master_unbind,
};
static int hdac_component_master_match(struct device *dev, void *data)
{
/* i915 is the only supported component */
return !strcmp(dev->driver->name, "i915");
}
/**
* snd_hdac_i915_register_notifier - Register i915 audio component ops
* @aops: i915 audio component ops
*
* This function is supposed to be used only by a HD-audio controller
* driver that needs the interaction with i915 graphics.
*
* This function sets the given ops to be called by the i915 graphics driver.
*
* Returns zero for success or a negative error code.
*/
int snd_hdac_i915_register_notifier(const struct i915_audio_component_audio_ops *aops)
{
if (WARN_ON(!hdac_acomp))
return -ENODEV;
hdac_acomp->audio_ops = aops;
return 0;
}
EXPORT_SYMBOL_GPL(snd_hdac_i915_register_notifier);
/* check whether intel graphics is present */
static bool i915_gfx_present(void)
{
static struct pci_device_id ids[] = {
{ PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_ANY_ID),
.class = PCI_BASE_CLASS_DISPLAY << 16,
.class_mask = 0xff << 16 },
{}
};
return pci_dev_present(ids);
}
/**
* snd_hdac_i915_init - Initialize i915 audio component
* @bus: HDA core bus
*
* This function is supposed to be used only by a HD-audio controller
* driver that needs the interaction with i915 graphics.
*
* This function initializes and sets up the audio component to communicate
* with i915 graphics driver.
*
* Returns zero for success or a negative error code.
*/
int snd_hdac_i915_init(struct hdac_bus *bus)
{
struct component_match *match = NULL;
struct device *dev = bus->dev;
struct i915_audio_component *acomp;
int ret;
if (WARN_ON(hdac_acomp))
return -EBUSY;
if (!i915_gfx_present())
return -ENODEV;
acomp = kzalloc(sizeof(*acomp), GFP_KERNEL);
if (!acomp)
return -ENOMEM;
bus->audio_component = acomp;
hdac_acomp = acomp;
component_match_add(dev, &match, hdac_component_master_match, bus);
ret = component_master_add_with_match(dev, &hdac_component_master_ops,
match);
if (ret < 0)
goto out_err;
/*
* Atm, we don't support deferring the component binding, so make sure
* i915 is loaded and that the binding successfully completes.
*/
request_module("i915");
if (!acomp->ops) {
ret = -ENODEV;
goto out_master_del;
}
dev_dbg(dev, "bound to i915 component master\n");
return 0;
out_master_del:
component_master_del(dev, &hdac_component_master_ops);
out_err:
kfree(acomp);
bus->audio_component = NULL;
hdac_acomp = NULL;
dev_info(dev, "failed to add i915 component master (%d)\n", ret);
return ret;
}
EXPORT_SYMBOL_GPL(snd_hdac_i915_init);
/**
* snd_hdac_i915_exit - Finalize i915 audio component
* @bus: HDA core bus
*
* This function is supposed to be used only by a HD-audio controller
* driver that needs the interaction with i915 graphics.
*
* This function releases the i915 audio component that has been used.
*
* Returns zero for success or a negative error code.
*/
int snd_hdac_i915_exit(struct hdac_bus *bus)
{
struct device *dev = bus->dev;
struct i915_audio_component *acomp = bus->audio_component;
if (!acomp)
return 0;
WARN_ON(bus->i915_power_refcount);
if (bus->i915_power_refcount > 0 && acomp->ops)
acomp->ops->put_power(acomp->dev);
component_master_del(dev, &hdac_component_master_ops);
kfree(acomp);
bus->audio_component = NULL;
hdac_acomp = NULL;
return 0;
}
EXPORT_SYMBOL_GPL(snd_hdac_i915_exit);
| {
"pile_set_name": "Github"
} |
per-file i18n.*[email protected]
per-file i18n.*[email protected]
per-file typing-asm.*[email protected]
per-file typing-asm.*[email protected]
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2020, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "HugeCTR/include/data_reader.hpp"
#include <fstream>
#include <thread>
#include "HugeCTR/include/data_generator.hpp"
#include "gtest/gtest.h"
#include "utest/test_utils.h"
using namespace HugeCTR;
const std::vector<long long> slot_size = {
39884406, 39043, 17289, 7420, 20263, 3, 7120, 1543, 63,
38532951, 2953546, 403346, 10, 2208, 11938, 155, 4, 976,
14, 39979771, 25641295, 39664984, 585935, 12972, 108, 36};
const std::vector<long long> slot_offset = {
0, 39884406, 39923449, 39940738, 39948158, 39968421, 39968424, 39975544, 39977087,
39977150, 78510101, 81463647, 81866993, 81867003, 81869211, 81881149, 81881304, 81881308,
81882284, 81882298, 121862069, 147503364, 187168348, 187754283, 187767255, 187767363};
const long long num_samples = 131072 * 12;
const int max_nnz = 1;
const int slot_num = 26;
const int label_dim = 1;
const int dense_dim = 13;
typedef unsigned int T;
const Check_t CHK = Check_t::None;
const std::string file_name = "./train_data.bin";
void data_reader_worker_raw_test_impl(bool float_label_dense) {
test::mpi_init();
// data generation
data_generation_for_raw(file_name, num_samples, label_dim, dense_dim, slot_num,
float_label_dense);
// setup a CSR heap
const int num_devices = 1;
const int batchsize = 2048;
const DataReaderSparseParam param = {DataReaderSparse_t::Distributed, max_nnz * slot_num, 1,
slot_num};
std::vector<DataReaderSparseParam> params;
params.push_back(param);
std::shared_ptr<HeapEx<CSRChunk<T>>> csr_heap(
new HeapEx<CSRChunk<T>>(1, num_devices, batchsize, label_dim + dense_dim, params));
// setup a data reader
auto file_offset_list = std::make_shared<MmapOffsetList>(
file_name, num_samples, (label_dim + dense_dim + slot_num) * sizeof(int), batchsize, false,
1);
DataReaderWorkerRaw<T> data_reader(0, 1, file_offset_list, csr_heap, file_name, params,
slot_offset, label_dim, float_label_dense);
// call read a batch
data_reader.read_a_batch();
}
void data_reader_raw_test_impl(bool float_label_dense) {
// data generation
// data_generation_for_raw(file_name, num_samples, label_dim, dense_dim, slot_num);
const int batchsize = 131072;
test::mpi_init();
int numprocs = 1;
std::vector<std::vector<int>> vvgpu;
std::vector<int> device_list = {0, 1};
#ifdef ENABLE_MPI
MPI_Comm_size(MPI_COMM_WORLD, &numprocs);
#endif
for (int i = 0; i < numprocs; i++) {
vvgpu.push_back(device_list);
}
auto gpu_resource_group = ResourceManager::create(vvgpu, 0);
const DataReaderSparseParam param = {DataReaderSparse_t::Localized, max_nnz * slot_num, 1,
slot_num};
std::vector<DataReaderSparseParam> params;
params.push_back(param);
DataReader<T> data_reader(batchsize, label_dim, dense_dim, params, gpu_resource_group, 1, true,
false);
data_reader.create_drwg_raw(file_name, num_samples, slot_offset, float_label_dense, true, true);
long long current_batchsize = data_reader.read_a_batch_to_device();
std::cout << "current_batchsize: " << current_batchsize << std::endl;
/* print_tensor(data_reader.get_label_tensors()[1], 0, 30);
print_tensor(data_reader.get_value_tensors()[1], 0, 30);
print_tensor(data_reader.get_row_offsets_tensors()[1], 0, 30);
print_tensor(data_reader.get_label_tensors()[0], 0, 30);
print_tensor(Tensor2<__half>::stretch_from(data_reader.get_dense_tensors()[0]), 0, 30);
print_tensor(data_reader.get_value_tensors()[0], 0, 30);
print_tensor(data_reader.get_row_offsets_tensors()[0], 0, 30); */
current_batchsize = data_reader.read_a_batch_to_device();
std::cout << "current_batchsize: " << current_batchsize << std::endl;
/* print_tensor(data_reader.get_label_tensors()[1], -10, -1);
print_tensor(data_reader.get_value_tensors()[1], 0, 10);
print_tensor(data_reader.get_row_offsets_tensors()[1], 0, 10); */
current_batchsize = data_reader.read_a_batch_to_device();
/* print_tensor(data_reader.get_value_tensors()[0], -30, -1);
print_tensor(data_reader.get_row_offsets_tensors()[0], -30, -1);
print_tensor(data_reader.get_value_tensors()[1], -30, -1);
print_tensor(data_reader.get_row_offsets_tensors()[1], -30, -1); */
std::cout << "current_batchsize: " << current_batchsize << std::endl;
/* print_tensor(data_reader.get_label_tensors()[1], -10, -1);
print_tensor(data_reader.get_value_tensors()[1], 0, 10);
print_tensor(data_reader.get_row_offsets_tensors()[1], 0, 10); */
}
TEST(data_reader_raw, data_reader_worker_raw_float_test) { data_reader_worker_raw_test_impl(true); }
TEST(data_reader_raw, data_reader_raw_float_test) { data_reader_raw_test_impl(true); }
TEST(data_reader_raw, data_reader_worker_raw_int_test) { data_reader_worker_raw_test_impl(false); }
TEST(data_reader_raw, data_reader_raw_int_test) { data_reader_raw_test_impl(false); }
| {
"pile_set_name": "Github"
} |
/*
* drivers/sh/superhyway/superhyway-sysfs.c
*
* SuperHyway Bus sysfs interface
*
* Copyright (C) 2004, 2005 Paul Mundt <[email protected]>
*
* This file is subject to the terms and conditions of the GNU General Public
* License. See the file "COPYING" in the main directory of this archive
* for more details.
*/
#include <linux/kernel.h>
#include <linux/device.h>
#include <linux/types.h>
#include <linux/superhyway.h>
#define superhyway_ro_attr(name, fmt, field) \
static ssize_t name##_show(struct device *dev, struct device_attribute *attr, char *buf) \
{ \
struct superhyway_device *s = to_superhyway_device(dev); \
return sprintf(buf, fmt, s->field); \
} \
static DEVICE_ATTR_RO(name);
/* VCR flags */
superhyway_ro_attr(perr_flags, "0x%02x\n", vcr.perr_flags);
superhyway_ro_attr(merr_flags, "0x%02x\n", vcr.merr_flags);
superhyway_ro_attr(mod_vers, "0x%04x\n", vcr.mod_vers);
superhyway_ro_attr(mod_id, "0x%04x\n", vcr.mod_id);
superhyway_ro_attr(bot_mb, "0x%02x\n", vcr.bot_mb);
superhyway_ro_attr(top_mb, "0x%02x\n", vcr.top_mb);
/* Misc */
superhyway_ro_attr(resource, "0x%08lx\n", resource[0].start);
static struct attribute *superhyway_dev_attrs[] = {
&dev_attr_perr_flags.attr,
&dev_attr_merr_flags.attr,
&dev_attr_mod_vers.attr,
&dev_attr_mod_id.attr,
&dev_attr_bot_mb.attr,
&dev_attr_top_mb.attr,
&dev_attr_resource.attr,
NULL,
};
static const struct attribute_group superhyway_dev_group = {
.attrs = superhyway_dev_attrs,
};
const struct attribute_group *superhyway_dev_groups[] = {
&superhyway_dev_group,
NULL,
};
| {
"pile_set_name": "Github"
} |
/// @ref gtx_vector_angle
/// @file glm/gtx/vector_angle.hpp
///
/// @see core (dependence)
/// @see gtx_quaternion (dependence)
/// @see gtx_epsilon (dependence)
///
/// @defgroup gtx_vector_angle GLM_GTX_vector_angle
/// @ingroup gtx
///
/// @brief Compute angle between vectors
///
/// <glm/gtx/vector_angle.hpp> need to be included to use these functionalities.
#pragma once
// Dependency:
#include "../glm.hpp"
#include "../gtc/epsilon.hpp"
#include "../gtx/quaternion.hpp"
#include "../gtx/rotate_vector.hpp"
#if GLM_MESSAGES == GLM_MESSAGES_ENABLED && !defined(GLM_EXT_INCLUDED)
# pragma message("GLM: GLM_GTX_vector_angle extension included")
#endif
namespace glm
{
/// @addtogroup gtx_vector_angle
/// @{
//! Returns the absolute angle between two vectors.
//! Parameters need to be normalized.
/// @see gtx_vector_angle extension.
template <typename vecType>
GLM_FUNC_DECL typename vecType::value_type angle(
vecType const & x,
vecType const & y);
//! Returns the oriented angle between two 2d vectors.
//! Parameters need to be normalized.
/// @see gtx_vector_angle extension.
template <typename T, precision P>
GLM_FUNC_DECL T orientedAngle(
tvec2<T, P> const & x,
tvec2<T, P> const & y);
//! Returns the oriented angle between two 3d vectors based from a reference axis.
//! Parameters need to be normalized.
/// @see gtx_vector_angle extension.
template <typename T, precision P>
GLM_FUNC_DECL T orientedAngle(
tvec3<T, P> const & x,
tvec3<T, P> const & y,
tvec3<T, P> const & ref);
/// @}
}// namespace glm
#include "vector_angle.inl"
| {
"pile_set_name": "Github"
} |
{
"citation": "@misc{\n author={Karpathy, Andrej},\n title={char-rnn},\n year={2015},\n howpublished={\\url{https://github.com/karpathy/char-rnn}}\n}",
"description": "40,000 lines of Shakespeare from a variety of Shakespeare's plays. Featured in Andrej Karpathy's blog post 'The Unreasonable Effectiveness of Recurrent Neural Networks': http://karpathy.github.io/2015/05/21/rnn-effectiveness/.\n\nTo use for e.g. character modelling:\n```\nd = tfds.load(name='tiny_shakespeare')['train']\nd = d.map(lambda x: tf.strings.unicode_split(x['text'], 'UTF-8'))\n# train split includes vocabulary for other splits\nvocabulary = sorted(set(next(iter(d)).numpy()))\nd = d.map(lambda x: {'cur_char': x[:-1], 'next_char': x[1:]})\nd = d.unbatch()\nseq_len = 100\nbatch_size = 2\nd = d.batch(seq_len)\nd = d.batch(batch_size)\n```",
"location": {
"urls": [
"https://github.com/karpathy/char-rnn/blob/master/data/tinyshakespeare/input.txt"
]
},
"name": "tiny_shakespeare",
"schema": {
"feature": [
{
"name": "text",
"type": "BYTES"
}
]
},
"splits": [
{
"name": "test",
"numBytes": "55796",
"numShards": "1",
"shardLengths": [
"1"
],
"statistics": {
"features": [
{
"bytesStats": {
"commonStats": {
"numNonMissing": "1"
}
},
"name": "text",
"type": "BYTES"
}
],
"numExamples": "1"
}
},
{
"name": "train",
"numBytes": "1003880",
"numShards": "1",
"shardLengths": [
"1"
],
"statistics": {
"features": [
{
"bytesStats": {
"commonStats": {
"numNonMissing": "1"
}
},
"name": "text",
"type": "BYTES"
}
],
"numExamples": "1"
}
},
{
"name": "validation",
"numBytes": "55796",
"numShards": "1",
"shardLengths": [
"1"
],
"statistics": {
"features": [
{
"bytesStats": {
"commonStats": {
"numNonMissing": "1"
}
},
"name": "text",
"type": "BYTES"
}
],
"numExamples": "1"
}
}
],
"version": "1.0.0"
} | {
"pile_set_name": "Github"
} |
/*******************************************************************************
* Copyright (c) 2016 QNX Software Systems and others.
*
* This program and the accompanying materials
* are made available under the terms of the Eclipse Public License 2.0
* which accompanies this distribution, and is available at
* https://www.eclipse.org/legal/epl-2.0/
*
* SPDX-License-Identifier: EPL-2.0
*******************************************************************************/
package org.eclipse.cdt.make.internal.ui;
import java.util.HashSet;
import java.util.Set;
import org.eclipse.cdt.core.model.ICContainer;
import org.eclipse.cdt.make.core.IMakeTarget;
import org.eclipse.cdt.make.core.IMakeTargetListener;
import org.eclipse.cdt.make.core.MakeCorePlugin;
import org.eclipse.cdt.make.core.MakeTargetEvent;
import org.eclipse.core.resources.IContainer;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.jface.viewers.ITreeContentProvider;
import org.eclipse.jface.viewers.StructuredViewer;
import org.eclipse.jface.viewers.Viewer;
public class MakeNavContentProvider implements ITreeContentProvider, IMakeTargetListener {
private StructuredViewer viewer;
public MakeNavContentProvider() {
MakeCorePlugin.getDefault().getTargetManager().addListener(this);
}
@Override
public void inputChanged(Viewer viewer, Object oldInput, Object newInput) {
this.viewer = (StructuredViewer) viewer;
}
@Override
public Object[] getElements(Object inputElement) {
// We're not a root provider so this won't get called
return null;
}
@Override
public Object[] getChildren(Object parentElement) {
if (parentElement instanceof IContainer) {
IContainer container = (IContainer) parentElement;
return getContainer(container);
} else if (parentElement instanceof ICContainer) {
IContainer container = ((ICContainer) parentElement).getResource();
return getContainer(container);
} else if (parentElement instanceof MakeTargetsContainer) {
return ((MakeTargetsContainer) parentElement).getTargets();
}
return new Object[0];
}
private MakeTargetsContainer[] getContainer(IContainer container) {
try {
IMakeTarget[] targets = MakeCorePlugin.getDefault().getTargetManager().getTargets(container);
if (targets != null && targets.length > 0) {
return new MakeTargetsContainer[] { new MakeTargetsContainer(container, targets) };
}
} catch (CoreException e) {
MakeUIPlugin.log(e.getStatus());
}
return new MakeTargetsContainer[0];
}
@Override
public Object getParent(Object element) {
if (element instanceof IMakeTarget || element instanceof MakeTargetsContainer) {
// TODO need this?
return null;
} else {
return null;
}
}
@Override
public boolean hasChildren(Object element) {
if (element instanceof IContainer || element instanceof ICContainer
|| element instanceof MakeTargetsContainer) {
return true;
} else {
return false;
}
}
@Override
public void targetChanged(MakeTargetEvent event) {
if (viewer == null || viewer.getControl().isDisposed()) {
return;
}
switch (event.getType()) {
case MakeTargetEvent.TARGET_ADD:
case MakeTargetEvent.TARGET_REMOVED:
Set<Object> elements = new HashSet<>();
for (IMakeTarget target : event.getTargets()) {
IContainer container = target.getContainer();
elements.add(container);
}
if (!elements.isEmpty()) {
viewer.getControl().getDisplay().asyncExec(() -> {
for (Object element : elements) {
viewer.refresh(element);
}
});
}
}
}
}
| {
"pile_set_name": "Github"
} |
<?php
/**
* Edit Site Themes Administration Screen
*
* @package WordPress
* @subpackage Multisite
* @since 3.1.0
*/
/** Load WordPress Administration Bootstrap */
require_once( dirname( __FILE__ ) . '/admin.php' );
if ( ! is_multisite() )
wp_die( __( 'Multisite support is not enabled.' ) );
if ( ! current_user_can( 'manage_sites' ) )
wp_die( __( 'You do not have sufficient permissions to manage themes for this site.' ) );
get_current_screen()->add_help_tab( array(
'id' => 'overview',
'title' => __('Overview'),
'content' =>
'<p>' . __('The menu is for editing information specific to individual sites, particularly if the admin area of a site is unavailable.') . '</p>' .
'<p>' . __('<strong>Info</strong> - The domain and path are rarely edited as this can cause the site to not work properly. The Registered date and Last Updated date are displayed. Network admins can mark a site as archived, spam, deleted and mature, to remove from public listings or disable.') . '</p>' .
'<p>' . __('<strong>Users</strong> - This displays the users associated with this site. You can also change their role, reset their password, or remove them from the site. Removing the user from the site does not remove the user from the network.') . '</p>' .
'<p>' . sprintf( __('<strong>Themes</strong> - This area shows themes that are not already enabled across the network. Enabling a theme in this menu makes it accessible to this site. It does not activate the theme, but allows it to show in the site’s Appearance menu. To enable a theme for the entire network, see the <a href="%s">Network Themes</a> screen.' ), network_admin_url( 'themes.php' ) ) . '</p>' .
'<p>' . __('<strong>Settings</strong> - This page shows a list of all settings associated with this site. Some are created by WordPress and others are created by plugins you activate. Note that some fields are grayed out and say Serialized Data. You cannot modify these values due to the way the setting is stored in the database.') . '</p>'
) );
get_current_screen()->set_help_sidebar(
'<p><strong>' . __('For more information:') . '</strong></p>' .
'<p>' . __('<a href="http://codex.wordpress.org/Network_Admin_Sites_Screen" target="_blank">Documentation on Site Management</a>') . '</p>' .
'<p>' . __('<a href="http://wordpress.org/support/forum/multisite/" target="_blank">Support Forums</a>') . '</p>'
);
$wp_list_table = _get_list_table('WP_MS_Themes_List_Table');
$action = $wp_list_table->current_action();
$s = isset($_REQUEST['s']) ? $_REQUEST['s'] : '';
// Clean up request URI from temporary args for screen options/paging uri's to work as expected.
$temp_args = array( 'enabled', 'disabled', 'error' );
$_SERVER['REQUEST_URI'] = remove_query_arg( $temp_args, $_SERVER['REQUEST_URI'] );
$referer = remove_query_arg( $temp_args, wp_get_referer() );
$id = isset( $_REQUEST['id'] ) ? intval( $_REQUEST['id'] ) : 0;
if ( ! $id )
wp_die( __('Invalid site ID.') );
$wp_list_table->prepare_items();
$details = get_blog_details( $id );
if ( !can_edit_network( $details->site_id ) )
wp_die( __( 'You do not have permission to access this page.' ) );
$is_main_site = is_main_site( $id );
if ( $action ) {
switch_to_blog( $id );
$allowed_themes = get_option( 'allowedthemes' );
switch ( $action ) {
case 'enable':
check_admin_referer( 'enable-theme_' . $_GET['theme'] );
$theme = $_GET['theme'];
$action = 'enabled';
$n = 1;
if ( !$allowed_themes )
$allowed_themes = array( $theme => true );
else
$allowed_themes[$theme] = true;
break;
case 'disable':
check_admin_referer( 'disable-theme_' . $_GET['theme'] );
$theme = $_GET['theme'];
$action = 'disabled';
$n = 1;
if ( !$allowed_themes )
$allowed_themes = array();
else
unset( $allowed_themes[$theme] );
break;
case 'enable-selected':
check_admin_referer( 'bulk-themes' );
if ( isset( $_POST['checked'] ) ) {
$themes = (array) $_POST['checked'];
$action = 'enabled';
$n = count( $themes );
foreach( (array) $themes as $theme )
$allowed_themes[ $theme ] = true;
} else {
$action = 'error';
$n = 'none';
}
break;
case 'disable-selected':
check_admin_referer( 'bulk-themes' );
if ( isset( $_POST['checked'] ) ) {
$themes = (array) $_POST['checked'];
$action = 'disabled';
$n = count( $themes );
foreach( (array) $themes as $theme )
unset( $allowed_themes[ $theme ] );
} else {
$action = 'error';
$n = 'none';
}
break;
}
update_option( 'allowedthemes', $allowed_themes );
restore_current_blog();
wp_safe_redirect( add_query_arg( array( 'id' => $id, $action => $n ), $referer ) );
exit;
}
if ( isset( $_GET['action'] ) && 'update-site' == $_GET['action'] ) {
wp_safe_redirect( $referer );
exit();
}
add_thickbox();
add_screen_option( 'per_page', array( 'label' => _x( 'Themes', 'themes per page (screen options)' ) ) );
$site_url_no_http = preg_replace( '#^http(s)?://#', '', get_blogaddress_by_id( $id ) );
$title_site_url_linked = sprintf( __('Edit Site: <a href="%1$s">%2$s</a>'), get_blogaddress_by_id( $id ), $site_url_no_http );
$title = sprintf( __('Edit Site: %s'), $site_url_no_http );
$parent_file = 'sites.php';
$submenu_file = 'sites.php';
require( ABSPATH . 'wp-admin/admin-header.php' ); ?>
<div class="wrap">
<h2 id="edit-site"><?php echo $title_site_url_linked ?></h2>
<h3 class="nav-tab-wrapper">
<?php
$tabs = array(
'site-info' => array( 'label' => __( 'Info' ), 'url' => 'site-info.php' ),
'site-users' => array( 'label' => __( 'Users' ), 'url' => 'site-users.php' ),
'site-themes' => array( 'label' => __( 'Themes' ), 'url' => 'site-themes.php' ),
'site-settings' => array( 'label' => __( 'Settings' ), 'url' => 'site-settings.php' ),
);
foreach ( $tabs as $tab_id => $tab ) {
$class = ( $tab['url'] == $pagenow ) ? ' nav-tab-active' : '';
echo '<a href="' . $tab['url'] . '?id=' . $id .'" class="nav-tab' . $class . '">' . esc_html( $tab['label'] ) . '</a>';
}
?>
</h3><?php
if ( isset( $_GET['enabled'] ) ) {
$_GET['enabled'] = absint( $_GET['enabled'] );
echo '<div id="message" class="updated"><p>' . sprintf( _n( 'Theme enabled.', '%s themes enabled.', $_GET['enabled'] ), number_format_i18n( $_GET['enabled'] ) ) . '</p></div>';
} elseif ( isset( $_GET['disabled'] ) ) {
$_GET['disabled'] = absint( $_GET['disabled'] );
echo '<div id="message" class="updated"><p>' . sprintf( _n( 'Theme disabled.', '%s themes disabled.', $_GET['disabled'] ), number_format_i18n( $_GET['disabled'] ) ) . '</p></div>';
} elseif ( isset( $_GET['error'] ) && 'none' == $_GET['error'] ) {
echo '<div id="message" class="error"><p>' . __( 'No theme selected.' ) . '</p></div>';
} ?>
<p><?php _e( 'Network enabled themes are not shown on this screen.' ) ?></p>
<form method="get" action="">
<?php $wp_list_table->search_box( __( 'Search Installed Themes' ), 'theme' ); ?>
<input type="hidden" name="id" value="<?php echo esc_attr( $id ) ?>" />
</form>
<?php $wp_list_table->views(); ?>
<form method="post" action="site-themes.php?action=update-site">
<input type="hidden" name="id" value="<?php echo esc_attr( $id ) ?>" />
<?php $wp_list_table->display(); ?>
</form>
</div>
<?php include(ABSPATH . 'wp-admin/admin-footer.php'); ?>
| {
"pile_set_name": "Github"
} |
#include "ncmcrypt.h"
#include "aes.h"
#include "base64.h"
#include "cJSON.h"
#include <libtag/mpeg/mpegfile.h>
#include <flacfile.h>
#include <attachedpictureframe.h>
#include <id3v2tag.h>
#include <tag.h>
#include <stdexcept>
#include <string>
#include "io_bunnyblue_droidncm_dump_NcmDumper.h"
const unsigned char NeteaseCrypt::sCoreKey[17] = {0x68, 0x7A, 0x48, 0x52, 0x41, 0x6D, 0x73, 0x6F, 0x35, 0x6B, 0x49, 0x6E, 0x62, 0x61, 0x78, 0x57, 0};
const unsigned char NeteaseCrypt::sModifyKey[17] = {0x23, 0x31, 0x34, 0x6C, 0x6A, 0x6B, 0x5F, 0x21, 0x5C, 0x5D, 0x26, 0x30, 0x55, 0x3C, 0x27, 0x28, 0};
const unsigned char NeteaseCrypt::mPng[8] = {0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A};
static void aesEcbDecrypt(const unsigned char *key, std::string &src, std::string &dst)
{
int n, i;
unsigned char out[16];
n = src.length() >> 4;
dst.clear();
AES aes(key);
for (i = 0; i < n - 1; i++)
{
aes.decrypt((unsigned char *)src.c_str() + (i << 4), out);
dst += std::string((char *)out, 16);
}
aes.decrypt((unsigned char *)src.c_str() + (i << 4), out);
char pad = out[15];
if (pad > 16)
{
pad = 0;
}
dst += std::string((char *)out, 16 - pad);
}
static void replace(std::string &str, const std::string &from, const std::string &to)
{
if (from.empty())
return;
size_t start_pos = 0;
while ((start_pos = str.find(from, start_pos)) != std::string::npos)
{
str.replace(start_pos, from.length(), to);
start_pos += to.length(); // In case 'to' contains 'from', like replacing 'x' with 'yx'
}
}
static std::string fileNameWithoutExt(const std::string &str)
{
size_t lastPath = str.find_last_of("/\\");
std::string path = str.substr(lastPath + 1);
size_t lastExt = path.find_last_of(".");
return path.substr(0, lastExt);
}
NeteaseMusicMetadata::~NeteaseMusicMetadata()
{
cJSON_Delete(mRaw);
}
NeteaseMusicMetadata::NeteaseMusicMetadata(cJSON *raw)
{
if (!raw)
{
return;
}
cJSON *swap;
int artistLen, i;
mRaw = raw;
swap = cJSON_GetObjectItem(raw, "musicName");
if (swap)
{
mName = std::string(cJSON_GetStringValue(swap));
}
swap = cJSON_GetObjectItem(raw, "album");
if (swap)
{
mAlbum = std::string(cJSON_GetStringValue(swap));
}
swap = cJSON_GetObjectItem(raw, "artist");
if (swap)
{
artistLen = cJSON_GetArraySize(swap);
i = 0;
for (i = 0; i < artistLen - 1; i++)
{
mArtist += std::string(cJSON_GetStringValue(cJSON_GetArrayItem(cJSON_GetArrayItem(swap, i), 0)));
mArtist += ",";
}
mArtist += std::string(cJSON_GetStringValue(cJSON_GetArrayItem(cJSON_GetArrayItem(swap, i), 0)));
}
swap = cJSON_GetObjectItem(raw, "bitrate");
if (swap)
{
mBitrate = swap->valueint;
}
swap = cJSON_GetObjectItem(raw, "duration");
if (swap)
{
mDuration = swap->valueint;
}
swap = cJSON_GetObjectItem(raw, "format");
if (swap)
{
mFormat = std::string(cJSON_GetStringValue(swap));
}
}
bool NeteaseCrypt::openFile(std::string const &path)
{
try
{
mFile.open(path, std::ios::in | std::ios::binary);
}
catch (...)
{
return false;
}
return true;
}
bool NeteaseCrypt::isNcmFile()
{
unsigned int header;
mFile.read(reinterpret_cast<char *>(&header), sizeof(header));
if (header != (unsigned int)0x4e455443)
{
return false;
}
mFile.read(reinterpret_cast<char *>(&header), sizeof(header));
if (header != (unsigned int)0x4d414446)
{
return false;
}
return true;
}
int NeteaseCrypt::read(char *s, std::streamsize n)
{
mFile.read(s, n);
int gcount = mFile.gcount();
if (gcount <= 0)
{
throw std::invalid_argument("can't read file");
}
return gcount;
}
void NeteaseCrypt::buildKeyBox(unsigned char *key, int keyLen)
{
int i;
for (i = 0; i < 256; ++i)
{
mKeyBox[i] = (unsigned char)i;
}
unsigned char swap = 0;
unsigned char c = 0;
unsigned char last_byte = 0;
unsigned char key_offset = 0;
for (i = 0; i < 256; ++i)
{
swap = mKeyBox[i];
c = ((swap + last_byte + key[key_offset++]) & 0xff);
if (key_offset >= keyLen)
key_offset = 0;
mKeyBox[i] = mKeyBox[c];
mKeyBox[c] = swap;
last_byte = c;
}
}
std::string NeteaseCrypt::mimeType(std::string &data)
{
if (memcmp(data.c_str(), mPng, 8) == 0)
{
return std::string("image/png");
}
return std::string("image/jpeg");
}
void NeteaseCrypt::FixMetadata()
{
if (mDumpFilepath.length() <= 0)
{
throw std::invalid_argument("must dump before");
}
TagLib::File *audioFile;
TagLib::Tag *tag;
TagLib::ByteVector vector(mImageData.c_str(), mImageData.length());
if (mFormat == NeteaseCrypt::MP3)
{
audioFile = new TagLib::MPEG::File(mDumpFilepath.c_str());
tag = dynamic_cast<TagLib::MPEG::File *>(audioFile)->ID3v2Tag(true);
if (mImageData.length() > 0)
{
TagLib::ID3v2::AttachedPictureFrame *frame = new TagLib::ID3v2::AttachedPictureFrame;
frame->setMimeType(mimeType(mImageData));
frame->setPicture(vector);
dynamic_cast<TagLib::ID3v2::Tag *>(tag)->addFrame(frame);
}
}
else if (mFormat == NeteaseCrypt::FLAC)
{
audioFile = new TagLib::FLAC::File(mDumpFilepath.c_str());
tag = audioFile->tag();
if (mImageData.length() > 0)
{
TagLib::FLAC::Picture *cover = new TagLib::FLAC::Picture;
cover->setMimeType(mimeType(mImageData));
cover->setType(TagLib::FLAC::Picture::FrontCover);
cover->setData(vector);
dynamic_cast<TagLib::FLAC::File *>(audioFile)->addPicture(cover);
}
}
if (mMetaData != NULL)
{
tag->setTitle(TagLib::String(mMetaData->name(), TagLib::String::UTF8));
tag->setArtist(TagLib::String(mMetaData->artist(), TagLib::String::UTF8));
tag->setAlbum(TagLib::String(mMetaData->album(), TagLib::String::UTF8));
}
//tag->setComment(TagLib::String("Create by netease copyright protected dump tool. author 5L", TagLib::String::UTF8));
tag->setComment(TagLib::String(m163key, TagLib::String::UTF8));
bool status=audioFile->save();
if (!status)
{
LOGE("save file failure");
}
}
void filterAllCase(std::string &str)
{
replace(str, "\\", "\");
replace(str, "/", "");
replace(str, "?", "?");
replace(str, ":", ":");
replace(str, "*", "*");
replace(str, "\"", "");
replace(str, "<", "<");
replace(str, ">", ">");
replace(str, "|", "|");
}
//fix https://github.com/bunnyblueair/DroidNCM/issues/23
void filterAllCaseForArtist(std::string &str)
{
replace(str, "\\", "\");
replace(str, "/", ",");
replace(str, "?", "?");
replace(str, ":", ":");
replace(str, "*", "*");
replace(str, "\"", "");
replace(str, "<", "<");
replace(str, ">", ">");
replace(str, "|", "|");
}
void NeteaseCrypt::Dump()
{
int n, i;
char targetPath[1024] = {'\0'};
std::string metaName = std::string(mMetaData->name().c_str());
std::string albumName = std::string(mMetaData->album().c_str());
std::string artist = std::string(mMetaData->artist());
//LOGE("albumName.() %s",artist.c_str());
filterAllCase(metaName);
filterAllCase(albumName);
filterAllCaseForArtist(artist);
sprintf(targetPath, "/sdcard/Music/%s - %s", artist.c_str(), metaName.c_str());
mDumpFilepath = std::string(targetPath);
n = 0x8000;
i = 0;
unsigned char buffer[n];
std::ofstream output;
while (!mFile.eof())
{
n = read((char *)buffer, n);
for (i = 0; i < n; i++)
{
int j = (i + 1) & 0xff;
buffer[i] ^= mKeyBox[(mKeyBox[j] + mKeyBox[(mKeyBox[j] + j) & 0xff]) & 0xff];
}
if (!output.is_open())
{
// identify format
// ID3 format mp3
if (buffer[0] == 0x49 && buffer[1] == 0x44 && buffer[2] == 0x33)
{
mDumpFilepath += ".mp3";
mFormat = NeteaseCrypt::MP3;
}
else
{
mDumpFilepath += ".flac";
mFormat = NeteaseCrypt::FLAC;
}
output.open(mDumpFilepath, output.out | output.binary);
}
output.write((char *)buffer, n);
}
output.flush();
output.close();
}
NeteaseCrypt::~NeteaseCrypt()
{
if (mMetaData != NULL)
{
delete mMetaData;
}
mFile.close();
}
NeteaseCrypt::NeteaseCrypt(std::string const &path)
{
if (!openFile(path))
{
throw std::invalid_argument("can't open file");
}
if (!isNcmFile())
{
throw std::invalid_argument("not netease protected file");
}
if (!mFile.seekg(2, mFile.cur))
{
throw std::invalid_argument("can't seek file");
}
mFilepath = path;
int i;
unsigned int n;
read(reinterpret_cast<char *>(&n), sizeof(n));
if (n <= 0)
{
throw std::invalid_argument("broken ncm file");
}
char keydata[n];
read(keydata, n);
for (i = 0; i < n; i++)
{
keydata[i] ^= 0x64;
}
std::string rawKeyData(keydata, n);
std::string mKeyData;
aesEcbDecrypt(sCoreKey, rawKeyData, mKeyData);
buildKeyBox((unsigned char *)mKeyData.c_str() + 17, mKeyData.length() - 17);
read(reinterpret_cast<char *>(&n), sizeof(n));
if (n <= 0)
{
printf("[Warn] `%s` missing metadata infomation can't fix some infomation!\n", path.c_str());
mMetaData = NULL;
}
else
{
char modifyData[n];
read(modifyData, n);
for (i = 0; i < n; i++)
{
modifyData[i] ^= 0x63;
}
std::string swapModifyData;
std::string modifyOutData;
std::string modifyDecryptData;
m163key = std::string(modifyData, n);//patch from https://github.com/anonymous5l/ncmdump/pull/44/files
swapModifyData = std::string(modifyData + 22, n - 22);
// escape `163 key(Don't modify):`
Base64::Decode(swapModifyData, modifyOutData);
aesEcbDecrypt(sModifyKey, modifyOutData, modifyDecryptData);
// escape `music:`
modifyDecryptData = std::string(modifyDecryptData.begin() + 6, modifyDecryptData.end());
// std::cout << modifyDecryptData << std::endl;
mMetaData = new NeteaseMusicMetadata(cJSON_Parse(modifyDecryptData.c_str()));
}
// skip crc32 & unuse charset
if (!mFile.seekg(9, mFile.cur))
{
throw std::invalid_argument("can't seek file");
}
read(reinterpret_cast<char *>(&n), sizeof(n));
if (n > 0)
{
char *imageData = (char *)malloc(n);
read(imageData, n);
mImageData = std::string(imageData, n);
}
else
{
printf("[Warn] `%s` missing album can't fix album image!\n", path.c_str());
}
}
| {
"pile_set_name": "Github"
} |
/**
* Copyright (c) 2016-present, RxJava Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.rxjava3.internal.operators.maybe;
import static org.junit.Assert.*;
import java.util.*;
import java.util.concurrent.TimeUnit;
import org.junit.Test;
import io.reactivex.rxjava3.core.*;
import io.reactivex.rxjava3.core.Observer;
import io.reactivex.rxjava3.disposables.Disposable;
import io.reactivex.rxjava3.exceptions.TestException;
import io.reactivex.rxjava3.functions.Function;
import io.reactivex.rxjava3.internal.fuseable.*;
import io.reactivex.rxjava3.internal.util.CrashingIterable;
import io.reactivex.rxjava3.schedulers.Schedulers;
import io.reactivex.rxjava3.testsupport.*;
public class MaybeFlatMapIterableObservableTest extends RxJavaTest {
@Test
public void normal() {
Maybe.just(1).flattenAsObservable(new Function<Integer, Iterable<Integer>>() {
@Override
public Iterable<Integer> apply(Integer v) throws Exception {
return Arrays.asList(v, v + 1);
}
})
.test()
.assertResult(1, 2);
}
@Test
public void emptyIterable() {
Maybe.just(1).flattenAsObservable(new Function<Integer, Iterable<Integer>>() {
@Override
public Iterable<Integer> apply(Integer v) throws Exception {
return Collections.<Integer>emptyList();
}
})
.test()
.assertResult();
}
@Test
public void error() {
Maybe.<Integer>error(new TestException()).flattenAsObservable(new Function<Integer, Iterable<Integer>>() {
@Override
public Iterable<Integer> apply(Integer v) throws Exception {
return Arrays.asList(v, v + 1);
}
})
.test()
.assertFailure(TestException.class);
}
@Test
public void empty() {
Maybe.<Integer>empty().flattenAsObservable(new Function<Integer, Iterable<Integer>>() {
@Override
public Iterable<Integer> apply(Integer v) throws Exception {
return Arrays.asList(v, v + 1);
}
})
.test()
.assertResult();
}
@Test
public void take() {
Maybe.just(1).flattenAsObservable(new Function<Integer, Iterable<Integer>>() {
@Override
public Iterable<Integer> apply(Integer v) throws Exception {
return Arrays.asList(v, v + 1);
}
})
.take(1)
.test()
.assertResult(1);
}
@Test
public void fused() {
TestObserverEx<Integer> to = new TestObserverEx<>(QueueFuseable.ANY);
Maybe.just(1).flattenAsObservable(new Function<Integer, Iterable<Integer>>() {
@Override
public Iterable<Integer> apply(Integer v) throws Exception {
return Arrays.asList(v, v + 1);
}
})
.subscribe(to);
to.assertFuseable()
.assertFusionMode(QueueFuseable.ASYNC)
.assertResult(1, 2);
;
}
@Test
public void fusedNoSync() {
TestObserverEx<Integer> to = new TestObserverEx<>(QueueFuseable.SYNC);
Maybe.just(1).flattenAsObservable(new Function<Integer, Iterable<Integer>>() {
@Override
public Iterable<Integer> apply(Integer v) throws Exception {
return Arrays.asList(v, v + 1);
}
})
.subscribe(to);
to.assertFuseable()
.assertFusionMode(QueueFuseable.NONE)
.assertResult(1, 2)
;
}
@Test
public void iteratorCrash() {
Maybe.just(1).flattenAsObservable(new Function<Integer, Iterable<Integer>>() {
@Override
public Iterable<Integer> apply(Integer v) throws Exception {
return new CrashingIterable(1, 100, 100);
}
})
.to(TestHelper.<Integer>testConsumer())
.assertFailureAndMessage(TestException.class, "iterator()");
}
@Test
public void hasNextCrash() {
Maybe.just(1).flattenAsObservable(new Function<Integer, Iterable<Integer>>() {
@Override
public Iterable<Integer> apply(Integer v) throws Exception {
return new CrashingIterable(100, 1, 100);
}
})
.to(TestHelper.<Integer>testConsumer())
.assertFailureAndMessage(TestException.class, "hasNext()");
}
@Test
public void nextCrash() {
Maybe.just(1).flattenAsObservable(new Function<Integer, Iterable<Integer>>() {
@Override
public Iterable<Integer> apply(Integer v) throws Exception {
return new CrashingIterable(100, 100, 1);
}
})
.to(TestHelper.<Integer>testConsumer())
.assertFailureAndMessage(TestException.class, "next()");
}
@Test
public void hasNextCrash2() {
Maybe.just(1).flattenAsObservable(new Function<Integer, Iterable<Integer>>() {
@Override
public Iterable<Integer> apply(Integer v) throws Exception {
return new CrashingIterable(100, 2, 100);
}
})
.to(TestHelper.<Integer>testConsumer())
.assertFailureAndMessage(TestException.class, "hasNext()", 0);
}
@Test
public void doubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeMaybeToObservable(new Function<Maybe<Object>, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Maybe<Object> o) throws Exception {
return o.flattenAsObservable(new Function<Object, Iterable<Integer>>() {
@Override
public Iterable<Integer> apply(Object v) throws Exception {
return Collections.singleton(1);
}
});
}
});
}
@Test
public void dispose() {
TestHelper.checkDisposed(Maybe.just(1).flattenAsObservable(new Function<Object, Iterable<Integer>>() {
@Override
public Iterable<Integer> apply(Object v) throws Exception {
return Collections.singleton(1);
}
}));
}
@Test
public void async1() {
Maybe.just(1)
.flattenAsObservable(new Function<Object, Iterable<Integer>>() {
@Override
public Iterable<Integer> apply(Object v) throws Exception {
Integer[] array = new Integer[1000 * 1000];
Arrays.fill(array, 1);
return Arrays.asList(array);
}
})
.hide()
.observeOn(Schedulers.single())
.to(TestHelper.<Integer>testConsumer())
.awaitDone(5, TimeUnit.SECONDS)
.assertSubscribed()
.assertValueCount(1000 * 1000)
.assertNoErrors()
.assertComplete();
}
@Test
public void async2() {
Maybe.just(1)
.flattenAsObservable(new Function<Object, Iterable<Integer>>() {
@Override
public Iterable<Integer> apply(Object v) throws Exception {
Integer[] array = new Integer[1000 * 1000];
Arrays.fill(array, 1);
return Arrays.asList(array);
}
})
.observeOn(Schedulers.single())
.to(TestHelper.<Integer>testConsumer())
.awaitDone(5, TimeUnit.SECONDS)
.assertSubscribed()
.assertValueCount(1000 * 1000)
.assertNoErrors()
.assertComplete();
}
@Test
public void async3() {
Maybe.just(1)
.flattenAsObservable(new Function<Object, Iterable<Integer>>() {
@Override
public Iterable<Integer> apply(Object v) throws Exception {
Integer[] array = new Integer[1000 * 1000];
Arrays.fill(array, 1);
return Arrays.asList(array);
}
})
.take(500 * 1000)
.observeOn(Schedulers.single())
.to(TestHelper.<Integer>testConsumer())
.awaitDone(5, TimeUnit.SECONDS)
.assertSubscribed()
.assertValueCount(500 * 1000)
.assertNoErrors()
.assertComplete();
}
@Test
public void async4() {
Maybe.just(1)
.flattenAsObservable(new Function<Object, Iterable<Integer>>() {
@Override
public Iterable<Integer> apply(Object v) throws Exception {
Integer[] array = new Integer[1000 * 1000];
Arrays.fill(array, 1);
return Arrays.asList(array);
}
})
.observeOn(Schedulers.single())
.take(500 * 1000)
.to(TestHelper.<Integer>testConsumer())
.awaitDone(5, TimeUnit.SECONDS)
.assertSubscribed()
.assertValueCount(500 * 1000)
.assertNoErrors()
.assertComplete();
}
@Test
public void fusedEmptyCheck() {
Maybe.just(1)
.flattenAsObservable(new Function<Object, Iterable<Integer>>() {
@Override
public Iterable<Integer> apply(Object v) throws Exception {
return Arrays.asList(1, 2, 3);
}
}).subscribe(new Observer<Integer>() {
QueueDisposable<Integer> qd;
@SuppressWarnings("unchecked")
@Override
public void onSubscribe(Disposable d) {
qd = (QueueDisposable<Integer>)d;
assertEquals(QueueFuseable.ASYNC, qd.requestFusion(QueueFuseable.ANY));
}
@Override
public void onNext(Integer value) {
assertFalse(qd.isEmpty());
qd.clear();
assertTrue(qd.isEmpty());
qd.dispose();
}
@Override
public void onError(Throwable e) {
}
@Override
public void onComplete() {
}
});
}
}
| {
"pile_set_name": "Github"
} |
; RUN: opt < %s -pgo-icall-prom -S -icp-total-percent-threshold=50 | FileCheck %s --check-prefix=ICALL-PROM
; RUN: opt < %s -pgo-icall-prom -S -icp-samplepgo -icp-total-percent-threshold=50 | FileCheck %s --check-prefix=ICALL-PROM
; RUN: opt < %s -passes=pgo-icall-prom -S -icp-total-percent-threshold=50 | FileCheck %s --check-prefix=ICALL-PROM
; RUN: opt < %s -pgo-icall-prom -S -pass-remarks=pgo-icall-prom -icp-remaining-percent-threshold=0 -icp-total-percent-threshold=0 -icp-max-prom=4 2>&1 | FileCheck %s --check-prefix=PASS-REMARK
; RUN: opt < %s -passes=pgo-icall-prom -S -pass-remarks=pgo-icall-prom -icp-remaining-percent-threshold=0 -icp-total-percent-threshold=0 -icp-max-prom=4 2>&1 | FileCheck %s --check-prefix=PASS-REMARK
; RUN: opt < %s -passes=pgo-icall-prom -S -pass-remarks=pgo-icall-prom -icp-remaining-percent-threshold=0 -icp-total-percent-threshold=20 -icp-max-prom=4 2>&1 | FileCheck %s --check-prefix=PASS2-REMARK
; PASS-REMARK: remark: <unknown>:0:0: Promote indirect call to func4 with count 1030 out of 1600
; PASS-REMARK: remark: <unknown>:0:0: Promote indirect call to func2 with count 410 out of 570
; PASS-REMARK: remark: <unknown>:0:0: Promote indirect call to func3 with count 150 out of 160
; PASS-REMARK: remark: <unknown>:0:0: Promote indirect call to func1 with count 10 out of 10
; PASS2-REMARK: remark: <unknown>:0:0: Promote indirect call to func4 with count 1030 out of 1600
; PASS2-REMARK: remark: <unknown>:0:0: Promote indirect call to func2 with count 410 out of 570
; PASS2-REMARK-NOT: remark: <unknown>:0:0: Promote indirect call to func3
; PASS2-REMARK-NOT: remark: <unknown>:0:0: Promote indirect call to func1
target datalayout = "e-m:e-i64:64-f80:128-n8:16:32:64-S128"
target triple = "x86_64-unknown-linux-gnu"
@foo = common global i32 ()* null, align 8
define i32 @func1() {
entry:
ret i32 0
}
define i32 @func2() {
entry:
ret i32 1
}
define i32 @func3() {
entry:
ret i32 2
}
define i32 @func4() {
entry:
ret i32 3
}
define i32 @bar() {
entry:
%tmp = load i32 ()*, i32 ()** @foo, align 8
; ICALL-PROM: [[CMP:%[0-9]+]] = icmp eq i32 ()* %tmp, @func4
; ICALL-PROM: br i1 [[CMP]], label %if.true.direct_targ, label %if.false.orig_indirect, !prof [[BRANCH_WEIGHT:![0-9]+]]
; ICALL-PROM: if.true.direct_targ:
; ICALL-PROM: [[DIRCALL_RET:%[0-9]+]] = call i32 @func4()
; ICALL-PROM-SAMPLEPGO: call i32 @func4(), !prof [[CALL_METADATA:![0-9]+]]
; ICALL-PROM: br label %if.end.icp
%call = call i32 %tmp(), !prof !1
; ICALL-PROM: if.false.orig_indirect:
; ICALL-PROM: %call = call i32 %tmp(), !prof [[NEW_VP_METADATA:![0-9]+]]
ret i32 %call
; ICALL-PROM: if.end.icp:
; ICALL-PROM: [[PHI_RET:%[0-9]+]] = phi i32 [ %call, %if.false.orig_indirect ], [ [[DIRCALL_RET]], %if.true.direct_targ ]
; ICALL-PROM: ret i32 [[PHI_RET]]
}
!1 = !{!"VP", i32 0, i64 1600, i64 7651369219802541373, i64 1030, i64 -4377547752858689819, i64 410, i64 -6929281286627296573, i64 150, i64 -2545542355363006406, i64 10}
; ICALL-PROM: [[BRANCH_WEIGHT]] = !{!"branch_weights", i32 1030, i32 570}
; ICALL-PROM: [[NEW_VP_METADATA]] = !{!"VP", i32 0, i64 570, i64 -4377547752858689819, i64 410}
; ICALL-PROM-SAMPLEPGO: [[CALL_METADATA]] = !{!"branch_weights", i32 1030}
| {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: 5b1c87a3b91d3414ca85dca100de04ed
folderAsset: yes
timeCreated: 1426647464
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
| {
"pile_set_name": "Github"
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
var d = document.getElementById("header")
d.innerHTML = "Script file successfully injected";
| {
"pile_set_name": "Github"
} |
(in-package #:cl-user)
(use-package '#:iterate)
(defun load-log (path)
(iter
(with head = (list 'first nil '()))
(with tree = (list head))
(for line in-file path using 'read-line)
(let ((action (char line 0))
(mode (char line 1))
(address (subseq line 2)#+nil(parse-integer line :start 2 :radix 16)))
(case mode
(#\d (setf mode "dynamic"))
(#\s (setf mode "static")))
(case action
(#\> ;; Enter object.
(let ((new (list address nil '())))
(push new (third (first tree)))
(push new tree)))
(#\~ ;; New address.
(setf (second (first tree)) address))
(#\< ;; Leave object.
(let ((self (pop tree)))
(setf (third self) (nreverse (third self)))))))
(finally (dolist (x tree)
(setf (third x) (nreverse (third x))))
(return head))))
(defun dottify-log (log file)
(with-open-file (s file :direction :output :if-exists :supersede)
(labels ((crunch (node)
(dolist (x (third node))
(format s " ~S -> ~S;~%" (first node) (first x)))
(mapc #'crunch (third node))))
(format s "digraph arse {~%")
(crunch log)
(format s "}~%"))))
| {
"pile_set_name": "Github"
} |
import React from "react";
import { TimerTypes } from "store";
import { StyledSessions, StyledSessionReset } from "styles";
import { SVG } from "components";
type Props = {
timerType: TimerTypes["timerType"];
round: number;
sessionRounds: number;
onClick?:
| ((event: React.MouseEvent<HTMLButtonElement, MouseEvent>) => void)
| undefined;
};
const Sessions: React.FC<Props> = ({
timerType,
round,
sessionRounds,
onClick,
}) => {
return (
<StyledSessions>
<span>
{round} / {sessionRounds}
</span>
<span>Sessions</span>
<StyledSessionReset timerType={timerType} onClick={onClick}>
<SVG name="refresh" />
</StyledSessionReset>
</StyledSessions>
);
};
export default React.memo(Sessions);
| {
"pile_set_name": "Github"
} |
<?xml version='1.0' encoding='UTF-8'?>
<!DOCTYPE index
PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp Index Version 1.0//EN"
"http://java.sun.com/products/javahelp/index_2_0.dtd">
<index version="2.0">
<!-- index entries are merged (sorted) into core index -->
<indexitem text="import urls " target="importUrls" />
</index>
| {
"pile_set_name": "Github"
} |
class X:
def stuff(self):
pass
x = X()
f = getattr(x, "stuff")
print f
fu = getattr(X, "stuff")
print fu
| {
"pile_set_name": "Github"
} |
package com.android.mms.transaction;
import java.util.ArrayList;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.provider.Telephony.Mms;
import android.provider.Telephony.Sms;
import android.telephony.PhoneNumberUtils;
import android.telephony.SmsManager;
import android.util.Log;
import com.android.mms.LogTag;
import com.android.mms.MmsConfig;
import com.android.mms.data.Conversation;
import com.android.mms.ui.MessageUtils;
import com.google.android.mms.MmsException;
public class SmsSingleRecipientSender extends SmsMessageSender {
private final boolean mRequestDeliveryReport;
private String mDest;
private Uri mUri;
private static final String TAG = LogTag.TAG;
public SmsSingleRecipientSender(Context context, String dest, String msgText, long threadId,
boolean requestDeliveryReport, Uri uri) {
super(context, null, msgText, threadId);
mRequestDeliveryReport = requestDeliveryReport;
mDest = dest;
mUri = uri;
}
public boolean sendMessage(long token) throws MmsException {
if (LogTag.DEBUG_SEND) {
Log.v(TAG, "sendMessage token: " + token);
}
if (mMessageText == null) {
// Don't try to send an empty message, and destination should be just
// one.
throw new MmsException("Null message body or have multiple destinations.");
}
SmsManager smsManager = SmsManager.getDefault();
ArrayList<String> messages = null;
if ((MmsConfig.getEmailGateway() != null) &&
(Mms.isEmailAddress(mDest) || MessageUtils.isAlias(mDest))) {
String msgText;
msgText = mDest + " " + mMessageText;
mDest = MmsConfig.getEmailGateway();
messages = smsManager.divideMessage(msgText);
} else {
messages = smsManager.divideMessage(mMessageText);
// remove spaces and dashes from destination number
// (e.g. "801 555 1212" -> "8015551212")
// (e.g. "+8211-123-4567" -> "+82111234567")
mDest = PhoneNumberUtils.stripSeparators(mDest);
mDest = Conversation.verifySingleRecipient(mContext, mThreadId, mDest);
}
int messageCount = messages.size();
if (messageCount == 0) {
// Don't try to send an empty message.
throw new MmsException("SmsMessageSender.sendMessage: divideMessage returned " +
"empty messages. Original message is \"" + mMessageText + "\"");
}
boolean moved = Sms.moveMessageToFolder(mContext, mUri, Sms.MESSAGE_TYPE_OUTBOX, 0);
if (!moved) {
throw new MmsException("SmsMessageSender.sendMessage: couldn't move message " +
"to outbox: " + mUri);
}
if (LogTag.DEBUG_SEND) {
Log.v(TAG, "sendMessage mDest: " + mDest + " mRequestDeliveryReport: " +
mRequestDeliveryReport);
}
ArrayList<PendingIntent> deliveryIntents = new ArrayList<PendingIntent>(messageCount);
ArrayList<PendingIntent> sentIntents = new ArrayList<PendingIntent>(messageCount);
for (int i = 0; i < messageCount; i++) {
if (mRequestDeliveryReport && (i == (messageCount - 1))) {
// TODO: Fix: It should not be necessary to
// specify the class in this intent. Doing that
// unnecessarily limits customizability.
deliveryIntents.add(PendingIntent.getBroadcast(
mContext, 0,
new Intent(
MessageStatusReceiver.MESSAGE_STATUS_RECEIVED_ACTION,
mUri,
mContext,
MessageStatusReceiver.class),
0));
} else {
deliveryIntents.add(null);
}
Intent intent = new Intent(SmsReceiverService.MESSAGE_SENT_ACTION,
mUri,
mContext,
SmsReceiver.class);
int requestCode = 0;
if (i == messageCount -1) {
// Changing the requestCode so that a different pending intent
// is created for the last fragment with
// EXTRA_MESSAGE_SENT_SEND_NEXT set to true.
requestCode = 1;
intent.putExtra(SmsReceiverService.EXTRA_MESSAGE_SENT_SEND_NEXT, true);
}
if (LogTag.DEBUG_SEND) {
Log.v(TAG, "sendMessage sendIntent: " + intent);
}
sentIntents.add(PendingIntent.getBroadcast(mContext, requestCode, intent, 0));
}
try {
smsManager.sendMultipartTextMessage(mDest, mServiceCenter, messages, sentIntents, deliveryIntents);
} catch (Exception ex) {
Log.e(TAG, "SmsMessageSender.sendMessage: caught", ex);
throw new MmsException("SmsMessageSender.sendMessage: caught " + ex +
" from SmsManager.sendTextMessage()");
}
if (Log.isLoggable(LogTag.TRANSACTION, Log.VERBOSE) || LogTag.DEBUG_SEND) {
log("sendMessage: address=" + mDest + ", threadId=" + mThreadId +
", uri=" + mUri + ", msgs.count=" + messageCount);
}
return false;
}
private void log(String msg) {
Log.d(LogTag.TAG, "[SmsSingleRecipientSender] " + msg);
}
}
| {
"pile_set_name": "Github"
} |
/**
* Copyright (C) 2016 MongoDB Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License, version 3,
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* As a special exception, the copyright holders give permission to link the
* code of portions of this program with the OpenSSL library under certain
* conditions as described in each individual source file and distribute
* linked combinations including the program with the OpenSSL library. You
* must comply with the GNU Affero General Public License in all respects
* for all of the code used other than as permitted herein. If you modify
* file(s) with this exception, you may extend this exception to your
* version of the file(s), but you are not obligated to do so. If you do not
* wish to do so, delete this exception statement from your version. If you
* delete this exception statement from all source files in the program,
* then also delete it in the license file.
*/
#pragma once
#include "mongo/base/disallow_copying.h"
#include "mongo/db/s/balancer/balancer_chunk_selection_policy.h"
#include "mongo/db/s/balancer/migration_manager.h"
#include "mongo/stdx/condition_variable.h"
#include "mongo/stdx/mutex.h"
#include "mongo/stdx/thread.h"
namespace mongo {
class ChunkType;
class ClusterStatistics;
class MigrationSecondaryThrottleOptions;
class OperationContext;
class ServiceContext;
class Status;
/**
* The balancer is a background task that tries to keep the number of chunks across all
* servers of the cluster even.
*
* The balancer does act continuously but in "rounds". At a given round, it would decide if
* there is an imbalance by checking the difference in chunks between the most and least
* loaded shards. It would issue a request for a chunk migration per round, if it found so.
*/ //ChunkManager chunk管理 balance负载均衡管理
//balance可以过程可以参考https://zhuanlan.zhihu.com/p/25938776 https://cloud.tencent.com/developer/article/1609526
class Balancer {
MONGO_DISALLOW_COPYING(Balancer);
public:
Balancer(ServiceContext* serviceContext);
~Balancer();
/**
* Instantiates an instance of the balancer and installs it on the specified service context.
* This method is not thread-safe and must be called only once when the service is starting.
*/
static void create(ServiceContext* serviceContext);
/**
* Retrieves the per-service instance of the Balancer.
*/
static Balancer* get(ServiceContext* serviceContext);
static Balancer* get(OperationContext* operationContext);
/**
* Invoked when the config server primary enters the 'PRIMARY' state and is invoked while the
* caller is holding the global X lock. Kicks off the main balancer thread and returns
* immediately. Auto-balancing (if enabled) should commence shortly, and manual migrations will
* be processed and run.
*
* Must only be called if the balancer is in the stopped state (i.e., just constructed or
* waitForBalancerToStop has been called before). Any code in this call must not try to acquire
* any locks or to wait on operations, which acquire locks.
*/
void initiateBalancer(OperationContext* opCtx);
/**
* Invoked when this node which is currently serving as a 'PRIMARY' steps down and is invoked
* while the global X lock is held. Requests the main balancer thread to stop and returns
* immediately without waiting for it to terminate. Once the balancer has stopped, manual
* migrations will be rejected.
*
* This method might be called multiple times in succession, which is what happens as a result
* of incomplete transition to primary so it is resilient to that.
*
* The waitForBalancerToStop method must be called afterwards in order to wait for the main
* balancer thread to terminate and to allow initiateBalancer to be called again.
*/
void interruptBalancer();
/**
* Invoked when a node on its way to becoming a primary finishes draining and is about to
* acquire the global X lock in order to allow writes. Waits for the balancer thread to
* terminate and primes the balancer so that initiateBalancer can be called.
*
* This must not be called while holding any locks!
*/
void waitForBalancerToStop();
/**
* Potentially blocking method, which will return immediately if the balancer is not running a
* balancer round and will block until the current round completes otherwise. If the operation
* context's deadline is exceeded, it will throw an ExceededTimeLimit exception.
*/
void joinCurrentRound(OperationContext* opCtx);
/**
* Blocking call, which requests the balancer to move a single chunk to a more appropriate
* shard, in accordance with the active balancer policy. It is not guaranteed that the chunk
* will actually move because it may already be at the best shard. An error will be returned if
* the attempt to find a better shard or the actual migration fail for any reason.
*/
Status rebalanceSingleChunk(OperationContext* opCtx, const ChunkType& chunk);
/**
* Blocking call, which requests the balancer to move a single chunk to the specified location
* in accordance with the active balancer policy. An error will be returned if the attempt to
* move fails for any reason.
*
* NOTE: This call disregards the balancer enabled/disabled status and will proceed with the
* move regardless. If should be used only for user-initiated moves.
*/
Status moveSingleChunk(OperationContext* opCtx,
const ChunkType& chunk,
const ShardId& newShardId,
uint64_t maxChunkSizeBytes,
const MigrationSecondaryThrottleOptions& secondaryThrottle,
bool waitForDelete);
/**
* Appends the runtime state of the balancer instance to the specified builder.
*/
void report(OperationContext* opCtx, BSONObjBuilder* builder);
private:
/**
* Possible runtime states of the balancer. The comments indicate the allowed next state.
*/
enum State {
kStopped, // kRunning
kRunning, // kStopping
kStopping, // kStopped
};
/**
* The main balancer loop, which runs in a separate thread.
*/
void _mainThread();
/**
* Checks whether the balancer main thread has been requested to stop.
*/
bool _stopRequested();
/**
* Signals the beginning and end of a balancing round.
*/
void _beginRound(OperationContext* opCtx);
void _endRound(OperationContext* opCtx, Seconds waitTimeout);
/**
* Blocks the caller for the specified timeout or until the balancer condition variable is
* signaled, whichever comes first.
*/
void _sleepFor(OperationContext* opCtx, Seconds waitTimeout);
/**
* Returns true if all the servers listed in configdb as being shards are reachable and are
* distinct processes (no hostname mixup).
*/
bool _checkOIDs(OperationContext* opCtx);
/**
* Iterates through all chunks in all collections and ensures that no chunks straddle tag
* boundary. If any do, they will be split.
*/
Status _enforceTagRanges(OperationContext* opCtx);
/**
* Schedules migrations for the specified set of chunks and returns how many chunks were
* successfully processed.
*/
int _moveChunks(OperationContext* opCtx,
const BalancerChunkSelectionPolicy::MigrateInfoVector& candidateChunks);
/**
* Performs a split on the chunk with min value "minKey". If the split fails, it is marked as
* jumbo.
*/
void _splitOrMarkJumbo(OperationContext* opCtx,
const NamespaceString& nss,
const BSONObj& minKey);
// Protects the state below
stdx::mutex _mutex;
// Indicates the current state of the balancer
//balance运行状态
State _state{kStopped};
// The main balancer thread
//balancer线程
stdx::thread _thread;
// The operation context of the main balancer thread. This value may only be available in the
// kRunning state and is used to force interrupt of any blocking calls made by the balancer
// thread.
OperationContext* _threadOperationContext{nullptr};
// This thread is only available in the kStopping state and is necessary for the migration
// manager shutdown to not deadlock with replica set step down. In particular, the migration
// manager's order of lock acquisition is mutex, then collection lock, whereas stepdown first
// acquires the global S lock and then acquires the migration manager's mutex.
//
// The interrupt thread is scheduled when the balancer enters the kStopping state (which is at
// step down) and is joined outside of lock, when the replica set leaves draining mode, outside
// of the global X lock.
stdx::thread _migrationManagerInterruptThread;
// Indicates whether the balancer is currently executing a balancer round
bool _inBalancerRound{false};
// Counts the number of balancing rounds performed since the balancer thread was first activated
int64_t _numBalancerRounds{0};
// Condition variable, which is signalled every time the above runtime state of the balancer
// changes (in particular, state/balancer round and number of balancer rounds).
stdx::condition_variable _condVar;
// Number of moved chunks in last round
//上次balancer线程循环中是否有迁移chunk
int _balancedLastTime;
// Source for cluster statistics
//统计信息
std::unique_ptr<ClusterStatistics> _clusterStats;
// Balancer policy. Depends on the cluster statistics instance above so it should be created
// after it and destroyed before it.
//chunks选择策略
std::unique_ptr<BalancerChunkSelectionPolicy> _chunkSelectionPolicy;
// Migration manager used to schedule and manage migrations
//迁移管理模块
MigrationManager _migrationManager;
};
} // namespace mongo
| {
"pile_set_name": "Github"
} |
@mixin box-shadow($shadow...) {
@if $enable-shadows {
$result: ();
@if (length($shadow) == 1) {
// We can pass `@include box-shadow(none);`
$result: $shadow;
} @else {
// Filter to avoid invalid properties for example `box-shadow: none, 1px 1px black;`
@for $i from 1 through length($shadow) {
@if nth($shadow, $i) != "none" {
$result: append($result, nth($shadow, $i), "comma");
}
}
}
@if (length($result) > 0) {
box-shadow: $result;
}
}
}
| {
"pile_set_name": "Github"
} |
import { arrayProp, prop, Ref, Typegoose } from '../../src/typegoose';
export class UserRef extends Typegoose {
@prop({ ref: UserRef, default: null })
public master?: Ref<UserRef>;
@arrayProp({ itemsRef: UserRef, default: [] })
public subAccounts!: Ref<UserRef>[];
@prop({ required: true })
public name!: string;
}
export const UserRefModel = new UserRef().getModelForClass(UserRef);
| {
"pile_set_name": "Github"
} |
# EOS整体介绍
</br>
## 一、EOS的插件式设计
</br>
EOS中,虽然编程的复杂度和设计较比特币大幅提高,但其核心的思想其实并没有多大改变,目前来看,仍然以BOOST的signal,boost::asio的信号消息机制来完成模块间的解耦。相比比特币来言,做得更优雅,封装也更良好。
</br>
先看一下插件设计的整体类图:
</br>

</br>
从上面的类图可以清楚的看到,整个插件的依赖和传导机制。然后在下面的流程分析中会详细说明一下它的具体的应用。
## 二、EOS的整体流程
</br>
EOS的版本做了一次比较大的更迭,至少从形式上看是,它的生成路径下,完成了以下几个目标:
</br>
cleos:客户端,用来处理和区块链通信。帐户钱包等的管理。
</br>
eosio-abigen:二进制ABI的生成程序。
</br>
eosio-launcher:简化了eosd节点跨局域网或者跨更宽泛的网络的分布。
</br>
keosd:钱包和帐户的实现控制程序
</br>
nodeos:核心的节点程序,这个和老版本不一样了,至少名字不一样了。
</br>
一般情况下会启动cleos调用keosd来创建帐户和钱包。然后通过启动nodeos来产生节点,进行通信并根据配置生成区块和验证。进入重点,直接看一下 nodeos的创建代码:
</br>
``` c++
int main(int argc, char** argv)
{
try {
app().set_version(eosio::nodeos::config::version);
auto root = fc::app_path();
app().set_default_data_dir(root / "eosio/nodeos/data" );
app().set_default_config_dir(root / "eosio/nodeos/config" );
//这里直接初始化了四个插件
if(!app().initialize<chain_plugin, http_plugin, net_plugin, producer_plugin>(argc, argv))
return -1;
initialize_logging();
ilog("nodeos version ${ver}", ("ver", eosio::nodeos::config::itoh(static_cast<uint32_t>(app().version()))));
ilog("eosio root is ${root}", ("root", root.string()));
app().startup();
app().exec();
} catch (const fc::exception& e) {
elog("${e}", ("e",e.to_detail_string()));
} catch (const boost::exception& e) {
elog("${e}", ("e",boost::diagnostic_information(e)));
} catch (const std::exception& e) {
elog("${e}", ("e",e.what()));
} catch (...) {
elog("unknown exception");
}
return 0;
}
```
</br>代码看上去并不多,当然,比之比特币最新中的几行代码来看,还是要稍有复杂的感觉,但是还可以承受,不过,随后可能c++技能的消耗水平会极剧增加。忽略开前几行的相关文件配置直接进行初始化代码看看去。
</br>
``` c++
template<typename... Plugin>
bool initialize(int argc, char** argv) {
return initialize_impl(argc, argv, {find_plugin<Plugin>()...});
}
```
</br>没啥,一个向量的初始化。不过有一个变参模板,如果想深入学习的得去看看相关资料。
</br>
``` c++
bool application::initialize_impl(int argc, char** argv, vector<abstract_plugin*> autostart_plugins) {
set_program_options();//设置命令选项
bpo::variables_map options;//声明保存结果变量
bpo::store(bpo::parse_command_line(argc, argv, my->_app_options), options);//分析参数并保存
if( options.count( "help" ) ) {
cout << my->_app_options << std::endl;
return false;
}
......
//分析配置文件
bpo::store(bpo::parse_config_file<char>(config_file_name.make_preferred().string().c_str(),
my->_cfg_options, true), options);
if(options.count("plugin") > 0)
{
auto plugins = options.at("plugin").as<std::vector<std::string>>();
for(auto& arg : plugins)
{
vector<string> names;
boost::split(names, arg, boost::is_any_of(" \t,"));
for(const std::string& name : names)
get_plugin(name).initialize(options);//分步初始化第一步,获取指定名插件并初始化,其它类同
}
}
//下面是注册插件,并查寻依赖的相关插件,然后调用,并初始化
for (auto plugin : autostart_plugins)
if (plugin != nullptr && plugin->get_state() == abstract_plugin::registered)
plugin->initialize(options);//分步初始化第一步,获取指定名插件并初始化,其它类同
bpo::notify(options);//更新最新参数至options
return true;
}
```
</br>里面反复的参数控制代码略过了。里面主要是使用了BOOST的参数解析和更新机制
</br>
这里的调用很简单,其实就是从map里查找相关的插件,用类名和字符串,这里面用到了BOOST中的一些库boost::core::demangle(typeid(Plugin).name()),用来返回类型的名字。然后再用名字的字符串查找出插件。这里面有一个问题,为什么从plugins这个map中可以查找出对象,仔细看一下有些插件的CPP文件中会有类似的代码:
</br>
``` c++
static appbase::abstract_plugin& _net_plugin = app().register_plugin<net_plugin>();
```
</br>
静态注册了啊。但是有一些插件里没有啊,怎么回事儿?其实接着看代码就发现了问题所在。如下:
</br>
``` c++
virtual void initialize(const variables_map& options) override {
if(\_state == registered) {
\_state = initialized;
//分步初始化,第二步
static_cast<Impl*>(this)->plugin_requires([&](auto& plug){ plug.initialize(options); });//初始化此插件依赖的插件,并递归调用依赖插件
static_cast<Impl*>(this)->plugin_initialize(options); //初始化插件
//ilog( "initializing plugin ${name}", ("name",name()) );
app().plugin_initialized(*this);//保存启动的插件
}
assert(\_state == initialized); /// if initial state was not registered, final state cannot be initiaized
}
```
</br>
plugin_requires,这个函数的定义就通过宏来产生了。
</br>
``` C++
//先看一个调用实现
class chain_plugin : public plugin<chain_plugin> {
public:
APPBASE_PLUGIN_REQUIRES()
......
};
#define APPBASE_PLUGIN_REQUIRES_VISIT( r, visitor, elem ) \
visitor( appbase::app().register_plugin<elem>() );
#define APPBASE_PLUGIN_REQUIRES( PLUGINS ) \
template<typename Lambda> \
void plugin_requires( Lambda&& l ) { \
BOOST_PP_SEQ_FOR_EACH( APPBASE_PLUGIN_REQUIRES_VISIT, l, PLUGINS ) \
}
//再看另外一个调用实现
class producer_plugin : public appbase::plugin<producer_plugin> {
public:
APPBASE_PLUGIN_REQUIRES((chain_plugin))
......
};
```
</br>
就这样,基础的插件和基础插件依赖的插件,就这么被一一加载初始化。
</br>
## 三、EOS的程序技术特点
</br>
### 1、使用了较多的宏,并配合BOOST库。
</br>
在EOS的代码中,可以隐约看到类似MFC的代码实现机制,举一个例子:
</br>
``` C++
#define FC_CAPTURE_AND_RETHROW( ... ) \
catch( fc::exception& er ) { \
FC_RETHROW_EXCEPTION( er, warn, "", FC_FORMAT_ARG_PARAMS(__VA_ARGS__) ); \
} catch( const std::exception& e ) { \
fc::exception fce( \
FC_LOG_MESSAGE( warn, "${what}: ",FC_FORMAT_ARG_PARAMS(__VA_ARGS__)("what",e.what())), \
fc::std_exception_code,\
BOOST_CORE_TYPEID(decltype(e)).name(), \
e.what() ) ; throw fce;\
} catch( ... ) { \
throw fc::unhandled_exception( \
FC_LOG_MESSAGE( warn, "",FC_FORMAT_ARG_PARAMS(__VA_ARGS__)), \
std::current_exception() ); \
}
FC_CAPTURE_AND_RETHROW( (t) )
```
</br>包括前面提到的递归调用插件化的宏定义,再通过上面的调用实现对比,基本上是以动态生成代码为主,在比特币也有类似的实现,但规模和应用要小得多。
</br>
### 2、模板的使用普及化
</br>
在工程代码上广泛使用了模板,看一下插件的例子:
</br>
``` c++
template<typename Impl>
class plugin : public abstract_plugin {
public:
plugin():\_name(boost::core::demangle(typeid(Impl).name())){}
virtual ~plugin(){}
virtual state get_state()const override { ... }
virtual const std::string& name()const override { ... }
virtual void register_dependencies() {
.......
}
virtual void initialize(const variables_map& options) override {
......
}
virtual void startup() override {
......
}
virtual void shutdown() override {
......
}
......
};
```
</br>
### 3、更深入的绑定使用了c++1X和BOOST
</br>
这个就非常明显了,试举一个简单的例子:
</br>
``` c++
//c++11语法糖
for (const auto& at: trx_trace.action_traces) {
for (const auto& auth: at.act.authorization) {
result.emplace_back(auth.actor);
}
result.emplace_back(at.receiver);
}
//BOOST的网络通信
using boost::asio::ip::tcp;
unique_ptr<tcp::acceptor> acceptor;
std::unique_ptr<class net_plugin_impl> my;
void net_plugin::plugin_startup() {
if( my->acceptor ) {
my->acceptor->open(my->listen_endpoint.protocol());
my->acceptor->set_option(tcp::acceptor::reuse_address(true));
my->acceptor->bind(my->listen_endpoint);
my->acceptor->listen();
ilog("starting listener, max clients is ${mc}",("mc",my->max_client_count));
my->start_listen_loop();
}
my->chain_plug->chain().on_pending_transaction.connect( &net_plugin_impl::transaction_ready);
my->start_monitors();
for( auto seed_node : my->supplied_peers ) {
connect( seed_node );
}
}
```
</br>目前初步看来,EOS对BOOST和c++14的依赖更深。
| {
"pile_set_name": "Github"
} |
package com.timeyang;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.config.server.EnableConfigServer;
/**
* 配置中心
* @author chaokunyang
*/
@SpringBootApplication
@EnableConfigServer
public class ConfigServiceApplication {
public static void main(String[] args) {
SpringApplication.run(ConfigServiceApplication.class, args);
}
}
| {
"pile_set_name": "Github"
} |
<?php
// OMG FIRST COMMENT!!!11!
// Copyright 2013 Toby Zerner, Simon Zerner
// This file is part of esoTalk. Please see the included license file for usage information.
define("IN_ESOTALK", 1);
define("PAGE_START_TIME", microtime(true));
define("PATH_ROOT", dirname(__FILE__));
define("PATH_CORE", PATH_ROOT."/core");
define("PATH_CACHE", PATH_ROOT."/cache");
define("PATH_CONFIG", PATH_ROOT."/config");
define("PATH_LANGUAGES", PATH_ROOT."/addons/languages");
define("PATH_PLUGINS", PATH_ROOT."/addons/plugins");
define("PATH_SKINS", PATH_ROOT."/addons/skins");
define("PATH_UPLOADS", PATH_ROOT."/uploads");
require PATH_CORE."/bootstrap.php"; | {
"pile_set_name": "Github"
} |
/**
* Copyright (c) 2017-present, Facebook, Inc. and its affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
#include "logdevice/server/message_handlers/DELETE_onReceived.h"
#include <memory>
#include "logdevice/common/Sender.h"
#include "logdevice/common/configuration/ServerConfig.h"
#include "logdevice/common/debug.h"
#include "logdevice/server/ServerProcessor.h"
#include "logdevice/server/ServerWorker.h"
#include "logdevice/server/locallogstore/WriteOps.h"
#include "logdevice/server/storage/DeleteStorageTask.h"
#include "logdevice/server/storage_tasks/PerWorkerStorageTaskQueue.h"
namespace facebook { namespace logdevice {
Message::Disposition DELETE_onReceived(DELETE_Message* msg,
const Address& from) {
const DELETE_Header& header = msg->getHeader();
ServerWorker* worker = ServerWorker::onThisThread();
shard_index_t shard_idx = header.shard;
ld_check(shard_idx != -1);
const shard_size_t n_shards = worker->getNodesConfiguration()->getNumShards();
if (shard_idx >= n_shards) {
RATELIMIT_ERROR(std::chrono::seconds(10),
10,
"Got DELETE message from %s with invalid shard %u, "
"this node only has %u shards",
Sender::describeConnection(from).c_str(),
shard_idx,
n_shards);
return Message::Disposition::NORMAL;
}
if (!from.isClientAddress()) {
RATELIMIT_ERROR(std::chrono::seconds(1),
10,
"PROTOCOL ERROR: got a DELETE %s from an outgoing (server) "
"connection to %s. DELETE messages can only arrive from "
"incoming (client) connections",
header.rid.toString().c_str(),
Sender::describeConnection(from).c_str());
err = E::PROTO;
return Message::Disposition::ERROR;
}
if (!worker->isAcceptingWork()) {
ld_debug("Ignoring DELETE message: not accepting more work");
return Message::Disposition::NORMAL;
}
// Check that we should even be processing this
if (!worker->processor_->runningOnStorageNode()) {
RATELIMIT_ERROR(std::chrono::seconds(1),
10,
"Got DELETE %s from %s but not configured as a "
"storage node",
header.rid.toString().c_str(),
Sender::describeConnection(from).c_str());
err = E::PROTO;
return Message::Disposition::ERROR;
}
DeleteWriteOp op = {// TODO #3041039
header.rid.logid,
compose_lsn(header.rid.epoch, header.rid.esn)};
worker->getStorageTaskQueueForShard(shard_idx)->putTask(
std::make_unique<DeleteStorageTask>(op));
return Message::Disposition::NORMAL;
}
}} // namespace facebook::logdevice
| {
"pile_set_name": "Github"
} |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
using System.Threading;
using System.Threading.Tasks;
namespace Microsoft.Azure.WebJobs.Host.Blobs.Listeners
{
internal interface IBlobTriggerQueueWriter
{
/// <summary>
/// Enqueue the message into the queue.
/// </summary>
/// <param name="message">The message.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>The name of the queue and the Id of the enqueued message.</returns>
Task<(string QueueName, string MessageId)> EnqueueAsync(BlobTriggerMessage message, CancellationToken cancellationToken);
}
}
| {
"pile_set_name": "Github"
} |
/*
* The internal libclocale header
*
* Copyright (C) 2008-2016, Joachim Metz <[email protected]>
*
* Refer to AUTHORS for acknowledgements.
*
* This software is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this software. If not, see <http://www.gnu.org/licenses/>.
*/
#if !defined( _LIBCPATH_LIBCLOCALE_H )
#define _LIBCPATH_LIBCLOCALE_H
#include <common.h>
/* Define HAVE_LOCAL_LIBCLOCALE for local use of libclocale
*/
#if defined( HAVE_LOCAL_LIBCLOCALE )
#include <libclocale_codepage.h>
#include <libclocale_definitions.h>
#include <libclocale_locale.h>
#include <libclocale_support.h>
#else
/* If libtool DLL support is enabled set LIBCLOCALE_DLL_IMPORT
* before including libclocale.h
*/
#if defined( _WIN32 ) && defined( DLL_IMPORT )
#define LIBCLOCALE_DLL_IMPORT
#endif
#include <libclocale.h>
#endif /* defined( HAVE_LOCAL_LIBCLOCALE ) */
#endif /* !defined( _LIBCPATH_LIBCLOCALE_H ) */
| {
"pile_set_name": "Github"
} |
# Generated by Django 2.2.14 on 2020-07-22 22:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('third_party_auth', '0001_squashed_0026_auto_20200401_1932'),
]
operations = [
migrations.AddField(
model_name='samlproviderconfig',
name='country',
field=models.CharField(blank=True, help_text=('URN of SAML attribute containing the user`s country.',), max_length=128),
),
]
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "https://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.8.15"/>
<meta name="viewport" content="width=device-width, initial-scale=1"/>
<title>Luwra: luwra::internal::MethodWrapperImpl< MethodPointer, Klass >::ImplementationNonVoid< Args >::SeqReceiver< Indices > Struct Template Reference</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 56px;">
<td id="projectalign" style="padding-left: 0.5em;">
<div id="projectname">Luwra
</div>
<div id="projectbrief">Minimal-overhead Lua wrapper for C++</div>
</td>
</tr>
</tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.8.15 -->
<script type="text/javascript" src="menudata.js"></script>
<script type="text/javascript" src="menu.js"></script>
<script type="text/javascript">
/* @license magnet:?xt=urn:btih:cf05388f2679ee054f2beb29a391d25f4e673ac3&dn=gpl-2.0.txt GPL-v2 */
$(function() {
initMenu('',false,false,'search.php','Search');
});
/* @license-end */</script>
<div id="main-nav"></div>
<div id="nav-path" class="navpath">
<ul>
<li class="navelem"><a class="el" href="namespaceluwra.html">luwra</a></li><li class="navelem"><a class="el" href="namespaceluwra_1_1internal.html">internal</a></li><li class="navelem"><a class="el" href="structluwra_1_1internal_1_1MethodWrapperImpl.html">MethodWrapperImpl</a></li><li class="navelem"><a class="el" href="structluwra_1_1internal_1_1MethodWrapperImpl_1_1ImplementationNonVoid.html">ImplementationNonVoid</a></li><li class="navelem"><a class="el" href="structluwra_1_1internal_1_1MethodWrapperImpl_1_1ImplementationNonVoid_1_1SeqReceiver.html">SeqReceiver</a></li> </ul>
</div>
</div><!-- top -->
<div class="header">
<div class="summary">
<a href="#pub-static-methods">Static Public Member Functions</a> |
<a href="structluwra_1_1internal_1_1MethodWrapperImpl_1_1ImplementationNonVoid_1_1SeqReceiver-members.html">List of all members</a> </div>
<div class="headertitle">
<div class="title">luwra::internal::MethodWrapperImpl< MethodPointer, Klass >::ImplementationNonVoid< Args >::SeqReceiver< Indices > Struct Template Reference</div> </div>
</div><!--header-->
<div class="contents">
<table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="pub-static-methods"></a>
Static Public Member Functions</h2></td></tr>
<tr class="memitem:a16d3aeba2906c8c42fbc1095a49d8ca6"><td class="memTemplParams" colspan="2">template<MethodPointer meth> </td></tr>
<tr class="memitem:a16d3aeba2906c8c42fbc1095a49d8ca6"><td class="memTemplItemLeft" align="right" valign="top">static int </td><td class="memTemplItemRight" valign="bottom"><a class="el" href="structluwra_1_1internal_1_1MethodWrapperImpl_1_1ImplementationNonVoid_1_1SeqReceiver.html#a16d3aeba2906c8c42fbc1095a49d8ca6">invoke</a> (<a class="el" href="namespaceluwra.html#a2c037b44385367826eb4e931b5b8197d">State</a> *state)</td></tr>
<tr class="separator:a16d3aeba2906c8c42fbc1095a49d8ca6"><td class="memSeparator" colspan="2"> </td></tr>
</table>
<h2 class="groupheader">Member Function Documentation</h2>
<a id="a16d3aeba2906c8c42fbc1095a49d8ca6"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a16d3aeba2906c8c42fbc1095a49d8ca6">◆ </a></span>invoke()</h2>
<div class="memitem">
<div class="memproto">
<div class="memtemplate">
template<typename MethodPointer, typename Klass = typename MemberInfo<MethodPointer>::MemberOf> </div>
<div class="memtemplate">
template<typename... Args> </div>
<div class="memtemplate">
template<size_t... Indices> </div>
<div class="memtemplate">
template<MethodPointer meth> </div>
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">static int <a class="el" href="structluwra_1_1internal_1_1MethodWrapperImpl.html">luwra::internal::MethodWrapperImpl</a>< MethodPointer, Klass >::<a class="el" href="structluwra_1_1internal_1_1MethodWrapperImpl_1_1ImplementationNonVoid.html">ImplementationNonVoid</a>< Args >::<a class="el" href="structluwra_1_1internal_1_1MethodWrapperImpl_1_1ImplementationNonVoid_1_1SeqReceiver.html">SeqReceiver</a>< Indices >::invoke </td>
<td>(</td>
<td class="paramtype"><a class="el" href="namespaceluwra.html#a2c037b44385367826eb4e931b5b8197d">State</a> * </td>
<td class="paramname"><em>state</em></td><td>)</td>
<td></td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">inline</span><span class="mlabel">static</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
</div>
</div>
</div><!-- contents -->
<!-- start footer part -->
<hr class="footer"/><address class="footer"><small>
Generated by  <a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/>
</a> 1.8.15
</small></address>
</body>
</html>
| {
"pile_set_name": "Github"
} |
[
{
"Unit": "CPU-M-CF",
"EventCode": "64",
"EventName": "PRNG_FUNCTIONS",
"BriefDescription": "PRNG Functions",
"PublicDescription": "Total number of the PRNG functions issued by the CPU"
},
{
"Unit": "CPU-M-CF",
"EventCode": "65",
"EventName": "PRNG_CYCLES",
"BriefDescription": "PRNG Cycles",
"PublicDescription": "Total number of CPU cycles when the DEA/AES coprocessor is busy performing PRNG functions issued by the CPU"
},
{
"Unit": "CPU-M-CF",
"EventCode": "66",
"EventName": "PRNG_BLOCKED_FUNCTIONS",
"BriefDescription": "PRNG Blocked Functions",
"PublicDescription": "Total number of the PRNG functions that are issued by the CPU and are blocked because the DEA/AES coprocessor is busy performing a function issued by another CPU"
},
{
"Unit": "CPU-M-CF",
"EventCode": "67",
"EventName": "PRNG_BLOCKED_CYCLES",
"BriefDescription": "PRNG Blocked Cycles",
"PublicDescription": "Total number of CPU cycles blocked for the PRNG functions issued by the CPU because the DEA/AES coprocessor is busy performing a function issued by another CPU"
},
{
"Unit": "CPU-M-CF",
"EventCode": "68",
"EventName": "SHA_FUNCTIONS",
"BriefDescription": "SHA Functions",
"PublicDescription": "Total number of SHA functions issued by the CPU"
},
{
"Unit": "CPU-M-CF",
"EventCode": "69",
"EventName": "SHA_CYCLES",
"BriefDescription": "SHA Cycles",
"PublicDescription": "Total number of CPU cycles when the SHA coprocessor is busy performing the SHA functions issued by the CPU"
},
{
"Unit": "CPU-M-CF",
"EventCode": "70",
"EventName": "SHA_BLOCKED_FUNCTIONS",
"BriefDescription": "SHA Blocked Functions",
"PublicDescription": "Total number of the SHA functions that are issued by the CPU and are blocked because the SHA coprocessor is busy performing a function issued by another CPU"
},
{
"Unit": "CPU-M-CF",
"EventCode": "71",
"EventName": "SHA_BLOCKED_CYCLES",
"BriefDescription": "SHA Bloced Cycles",
"PublicDescription": "Total number of CPU cycles blocked for the SHA functions issued by the CPU because the SHA coprocessor is busy performing a function issued by another CPU"
},
{
"Unit": "CPU-M-CF",
"EventCode": "72",
"EventName": "DEA_FUNCTIONS",
"BriefDescription": "DEA Functions",
"PublicDescription": "Total number of the DEA functions issued by the CPU"
},
{
"Unit": "CPU-M-CF",
"EventCode": "73",
"EventName": "DEA_CYCLES",
"BriefDescription": "DEA Cycles",
"PublicDescription": "Total number of CPU cycles when the DEA/AES coprocessor is busy performing the DEA functions issued by the CPU"
},
{
"Unit": "CPU-M-CF",
"EventCode": "74",
"EventName": "DEA_BLOCKED_FUNCTIONS",
"BriefDescription": "DEA Blocked Functions",
"PublicDescription": "Total number of the DEA functions that are issued by the CPU and are blocked because the DEA/AES coprocessor is busy performing a function issued by another CPU"
},
{
"Unit": "CPU-M-CF",
"EventCode": "75",
"EventName": "DEA_BLOCKED_CYCLES",
"BriefDescription": "DEA Blocked Cycles",
"PublicDescription": "Total number of CPU cycles blocked for the DEA functions issued by the CPU because the DEA/AES coprocessor is busy performing a function issued by another CPU"
},
{
"Unit": "CPU-M-CF",
"EventCode": "76",
"EventName": "AES_FUNCTIONS",
"BriefDescription": "AES Functions",
"PublicDescription": "Total number of AES functions issued by the CPU"
},
{
"Unit": "CPU-M-CF",
"EventCode": "77",
"EventName": "AES_CYCLES",
"BriefDescription": "AES Cycles",
"PublicDescription": "Total number of CPU cycles when the DEA/AES coprocessor is busy performing the AES functions issued by the CPU"
},
{
"Unit": "CPU-M-CF",
"EventCode": "78",
"EventName": "AES_BLOCKED_FUNCTIONS",
"BriefDescription": "AES Blocked Functions",
"PublicDescription": "Total number of AES functions that are issued by the CPU and are blocked because the DEA/AES coprocessor is busy performing a function issued by another CPU"
},
{
"Unit": "CPU-M-CF",
"EventCode": "79",
"EventName": "AES_BLOCKED_CYCLES",
"BriefDescription": "AES Blocked Cycles",
"PublicDescription": "Total number of CPU cycles blocked for the AES functions issued by the CPU because the DEA/AES coprocessor is busy performing a function issued by another CPU"
},
]
| {
"pile_set_name": "Github"
} |
/*jshint esnext:true*/
let { x, y, ...z } = { x: 1, y: 2, a: 3, b: 4 };
let n = { x, y, ...z };
// Array comprehensions
var results = [
for(c of customers)
if (c.city == "Seattle")
{ name: c.name, age: c.age }
]
// Generator comprehensions
var results = (
for(c of customers)
if (c.city == "Seattle")
{ name: c.name, age: c.age }
)
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2015-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.rds.model.transform;
import javax.xml.stream.events.XMLEvent;
import javax.annotation.Generated;
import com.amazonaws.services.rds.model.*;
import com.amazonaws.transform.Unmarshaller;
import com.amazonaws.transform.StaxUnmarshallerContext;
import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*;
/**
* DBInstanceRole StAX Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DBInstanceRoleStaxUnmarshaller implements Unmarshaller<DBInstanceRole, StaxUnmarshallerContext> {
public DBInstanceRole unmarshall(StaxUnmarshallerContext context) throws Exception {
DBInstanceRole dBInstanceRole = new DBInstanceRole();
int originalDepth = context.getCurrentDepth();
int targetDepth = originalDepth + 1;
if (context.isStartOfDocument())
targetDepth += 1;
while (true) {
XMLEvent xmlEvent = context.nextEvent();
if (xmlEvent.isEndDocument())
return dBInstanceRole;
if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) {
if (context.testExpression("RoleArn", targetDepth)) {
dBInstanceRole.setRoleArn(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("FeatureName", targetDepth)) {
dBInstanceRole.setFeatureName(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("Status", targetDepth)) {
dBInstanceRole.setStatus(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
} else if (xmlEvent.isEndElement()) {
if (context.getCurrentDepth() < originalDepth) {
return dBInstanceRole;
}
}
}
}
private static DBInstanceRoleStaxUnmarshaller instance;
public static DBInstanceRoleStaxUnmarshaller getInstance() {
if (instance == null)
instance = new DBInstanceRoleStaxUnmarshaller();
return instance;
}
}
| {
"pile_set_name": "Github"
} |
{
"title": "pluginTitle",
"descr": "",
"key": "pluginKey", // must be the name of the folder of your plugin, sample: app/views/plugins/<my_plugin> ==> 'my_plugin'
"helpers": [
"Plugins::PluginClass::MainHelper"
],
"hooks": {
"on_active": ["pluginKey_on_active"],
"on_inactive": ["pluginKey_on_inactive"],
"plugin_options": ["pluginKey_on_plugin_options"]
//here you can add all your hooks (read documentation)
}
} | {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2015 The WebM project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "./vp8_rtcd.h"
#include "vp8/common/mips/msa/vp8_macros_msa.h"
#define TRANSPOSE4x4_H(in0, in1, in2, in3, out0, out1, out2, out3) \
{ \
v8i16 s0_m, s1_m, tp0_m, tp1_m, tp2_m, tp3_m; \
\
ILVR_H2_SH(in2, in0, in3, in1, s0_m, s1_m); \
ILVRL_H2_SH(s1_m, s0_m, tp0_m, tp1_m); \
ILVL_H2_SH(in2, in0, in3, in1, s0_m, s1_m); \
ILVRL_H2_SH(s1_m, s0_m, tp2_m, tp3_m); \
PCKEV_D2_SH(tp2_m, tp0_m, tp3_m, tp1_m, out0, out2); \
PCKOD_D2_SH(tp2_m, tp0_m, tp3_m, tp1_m, out1, out3); \
}
#define SET_DOTP_VALUES(coeff, val0, val1, val2, const1, const2) \
{ \
v8i16 tmp0_m; \
\
SPLATI_H3_SH(coeff, val0, val1, val2, tmp0_m, const1, const2); \
ILVEV_H2_SH(tmp0_m, const1, const2, tmp0_m, const1, const2); \
}
#define RET_1_IF_NZERO_H(in0) \
({ \
v8i16 tmp0_m; \
v8i16 one_m = __msa_ldi_h(1); \
\
tmp0_m = __msa_ceqi_h(in0, 0); \
tmp0_m = tmp0_m ^ 255; \
tmp0_m = one_m & tmp0_m; \
\
tmp0_m; \
})
#define RET_1_IF_NZERO_W(in0) \
({ \
v4i32 tmp0_m; \
v4i32 one_m = __msa_ldi_w(1); \
\
tmp0_m = __msa_ceqi_w(in0, 0); \
tmp0_m = tmp0_m ^ 255; \
tmp0_m = one_m & tmp0_m; \
\
tmp0_m; \
})
#define RET_1_IF_NEG_W(in0) \
({ \
v4i32 tmp0_m; \
\
v4i32 one_m = __msa_ldi_w(1); \
tmp0_m = __msa_clti_s_w(in0, 0); \
tmp0_m = one_m & tmp0_m; \
\
tmp0_m; \
})
void vp8_short_fdct4x4_msa(int16_t *input, int16_t *output, int32_t pitch) {
v8i16 in0, in1, in2, in3;
v8i16 temp0, temp1;
v8i16 const0, const1;
v8i16 coeff = { 2217, 5352, -5352, 14500, 7500, 12000, 25000, 26000 };
v4i32 out0, out1, out2, out3;
v8i16 zero = { 0 };
LD_SH4(input, pitch / 2, in0, in1, in2, in3);
TRANSPOSE4x4_SH_SH(in0, in1, in2, in3, in0, in1, in2, in3);
BUTTERFLY_4(in0, in1, in2, in3, temp0, temp1, in1, in3);
SLLI_4V(temp0, temp1, in1, in3, 3);
in0 = temp0 + temp1;
in2 = temp0 - temp1;
SET_DOTP_VALUES(coeff, 0, 1, 2, const0, const1);
temp0 = __msa_ilvr_h(in3, in1);
in1 = __msa_splati_h(coeff, 3);
out0 = (v4i32)__msa_ilvev_h(zero, in1);
coeff = __msa_ilvl_h(zero, coeff);
out1 = __msa_splati_w((v4i32)coeff, 0);
DPADD_SH2_SW(temp0, temp0, const0, const1, out0, out1);
out0 >>= 12;
out1 >>= 12;
PCKEV_H2_SH(out0, out0, out1, out1, in1, in3);
TRANSPOSE4x4_SH_SH(in0, in1, in2, in3, in0, in1, in2, in3);
BUTTERFLY_4(in0, in1, in2, in3, temp0, temp1, in1, in3);
in0 = temp0 + temp1 + 7;
in2 = temp0 - temp1 + 7;
in0 >>= 4;
in2 >>= 4;
ILVR_H2_SW(zero, in0, zero, in2, out0, out2);
temp1 = RET_1_IF_NZERO_H(in3);
ILVR_H2_SH(zero, temp1, in3, in1, temp1, temp0);
SPLATI_W2_SW(coeff, 2, out3, out1);
out3 += out1;
out1 = __msa_splati_w((v4i32)coeff, 1);
DPADD_SH2_SW(temp0, temp0, const0, const1, out1, out3);
out1 >>= 16;
out3 >>= 16;
out1 += (v4i32)temp1;
PCKEV_H2_SH(out1, out0, out3, out2, in0, in2);
ST_SH2(in0, in2, output, 8);
}
void vp8_short_fdct8x4_msa(int16_t *input, int16_t *output, int32_t pitch) {
v8i16 in0, in1, in2, in3;
v8i16 temp0, temp1, tmp0, tmp1;
v8i16 const0, const1, const2;
v8i16 coeff = { 2217, 5352, -5352, 14500, 7500, 12000, 25000, 26000 };
v8i16 zero = { 0 };
v4i32 vec0_w, vec1_w, vec2_w, vec3_w;
LD_SH4(input, pitch / 2, in0, in1, in2, in3);
TRANSPOSE4x4_H(in0, in1, in2, in3, in0, in1, in2, in3);
BUTTERFLY_4(in0, in1, in2, in3, temp0, temp1, in1, in3);
SLLI_4V(temp0, temp1, in1, in3, 3);
in0 = temp0 + temp1;
in2 = temp0 - temp1;
SET_DOTP_VALUES(coeff, 0, 1, 2, const1, const2);
temp0 = __msa_splati_h(coeff, 3);
vec1_w = (v4i32)__msa_ilvev_h(zero, temp0);
coeff = __msa_ilvl_h(zero, coeff);
vec3_w = __msa_splati_w((v4i32)coeff, 0);
ILVRL_H2_SH(in3, in1, tmp1, tmp0);
vec0_w = vec1_w;
vec2_w = vec3_w;
DPADD_SH4_SW(tmp1, tmp0, tmp1, tmp0, const1, const1, const2, const2, vec0_w,
vec1_w, vec2_w, vec3_w);
SRA_4V(vec1_w, vec0_w, vec3_w, vec2_w, 12);
PCKEV_H2_SH(vec1_w, vec0_w, vec3_w, vec2_w, in1, in3);
TRANSPOSE4x4_H(in0, in1, in2, in3, in0, in1, in2, in3);
BUTTERFLY_4(in0, in1, in2, in3, temp0, temp1, in1, in3);
in0 = temp0 + temp1 + 7;
in2 = temp0 - temp1 + 7;
in0 >>= 4;
in2 >>= 4;
SPLATI_W2_SW(coeff, 2, vec3_w, vec1_w);
vec3_w += vec1_w;
vec1_w = __msa_splati_w((v4i32)coeff, 1);
const0 = RET_1_IF_NZERO_H(in3);
ILVRL_H2_SH(in3, in1, tmp1, tmp0);
vec0_w = vec1_w;
vec2_w = vec3_w;
DPADD_SH4_SW(tmp1, tmp0, tmp1, tmp0, const1, const1, const2, const2, vec0_w,
vec1_w, vec2_w, vec3_w);
SRA_4V(vec1_w, vec0_w, vec3_w, vec2_w, 16);
PCKEV_H2_SH(vec1_w, vec0_w, vec3_w, vec2_w, in1, in3);
in1 += const0;
PCKEV_D2_SH(in1, in0, in3, in2, temp0, temp1);
ST_SH2(temp0, temp1, output, 8);
PCKOD_D2_SH(in1, in0, in3, in2, in0, in2);
ST_SH2(in0, in2, output + 16, 8);
}
void vp8_short_walsh4x4_msa(int16_t *input, int16_t *output, int32_t pitch) {
v8i16 in0_h, in1_h, in2_h, in3_h;
v4i32 in0_w, in1_w, in2_w, in3_w, temp0, temp1, temp2, temp3;
LD_SH4(input, pitch / 2, in0_h, in1_h, in2_h, in3_h);
TRANSPOSE4x4_SH_SH(in0_h, in1_h, in2_h, in3_h, in0_h, in1_h, in2_h, in3_h);
UNPCK_R_SH_SW(in0_h, in0_w);
UNPCK_R_SH_SW(in1_h, in1_w);
UNPCK_R_SH_SW(in2_h, in2_w);
UNPCK_R_SH_SW(in3_h, in3_w);
BUTTERFLY_4(in0_w, in1_w, in3_w, in2_w, temp0, temp3, temp2, temp1);
SLLI_4V(temp0, temp1, temp2, temp3, 2);
BUTTERFLY_4(temp0, temp1, temp2, temp3, in0_w, in1_w, in2_w, in3_w);
temp0 = RET_1_IF_NZERO_W(temp0);
in0_w += temp0;
TRANSPOSE4x4_SW_SW(in0_w, in1_w, in2_w, in3_w, in0_w, in1_w, in2_w, in3_w);
BUTTERFLY_4(in0_w, in1_w, in3_w, in2_w, temp0, temp3, temp2, temp1);
BUTTERFLY_4(temp0, temp1, temp2, temp3, in0_w, in1_w, in2_w, in3_w);
in0_w += RET_1_IF_NEG_W(in0_w);
in1_w += RET_1_IF_NEG_W(in1_w);
in2_w += RET_1_IF_NEG_W(in2_w);
in3_w += RET_1_IF_NEG_W(in3_w);
ADD4(in0_w, 3, in1_w, 3, in2_w, 3, in3_w, 3, in0_w, in1_w, in2_w, in3_w);
SRA_4V(in0_w, in1_w, in2_w, in3_w, 3);
PCKEV_H2_SH(in1_w, in0_w, in3_w, in2_w, in0_h, in1_h);
ST_SH2(in0_h, in1_h, output, 8);
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2009 - 2012 Stephen F. Booth <[email protected]>
* All Rights Reserved
*/
#pragma once
#import <Cocoa/Cocoa.h>
// ========================================
// Generates multipart/form-data from the given dictionary using the specified boundary
// ========================================
NSData * GenerateFormData(NSDictionary *formValues, NSString *boundary);
| {
"pile_set_name": "Github"
} |
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
class ModuleDependencyTemplateAsId {
apply(dep, source, outputOptions, requestShortener) {
if(!dep.range) return;
const comment = outputOptions.pathinfo ?
`/*! ${requestShortener.shorten(dep.request)} */ ` : "";
let content;
if(dep.module)
content = comment + JSON.stringify(dep.module.id);
else
content = require("./WebpackMissingModule").module(dep.request);
source.replace(dep.range[0], dep.range[1] - 1, content);
}
}
module.exports = ModuleDependencyTemplateAsId;
| {
"pile_set_name": "Github"
} |
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.core.io.http.internal;
import org.openhab.core.io.http.HttpContextFactoryService;
import org.openhab.core.io.http.WrappingHttpContext;
import org.osgi.framework.Bundle;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Reference;
import org.osgi.service.component.annotations.ReferencePolicy;
import org.osgi.service.http.HttpContext;
import org.osgi.service.http.HttpService;
/**
* Create {@link HttpContext} instances when registering servlets, resources or filters using the
* {@link HttpService#registerServlet} and corresponding methods.
* The resulting {@link HttpContext} complies with the OSGi specification when it comes to resource resolving.
*
* @author Henning Treu - Initial contribution
*/
@Component(service = HttpContextFactoryService.class)
public class HttpContextFactoryServiceImpl implements HttpContextFactoryService {
private WrappingHttpContext httpContext;
@Override
public HttpContext createDefaultHttpContext(Bundle bundle) {
return httpContext.wrap(bundle);
}
@Reference(policy = ReferencePolicy.DYNAMIC)
public void setHttpContext(WrappingHttpContext httpContext) {
this.httpContext = httpContext;
}
public void unsetHttpContext(WrappingHttpContext httpContext) {
this.httpContext = null;
}
}
| {
"pile_set_name": "Github"
} |
package com.hjq.base;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.Application;
import android.content.Context;
import android.content.DialogInterface;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.os.Bundle;
import android.util.SparseArray;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.view.inputmethod.InputMethodManager;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import androidx.annotation.ColorInt;
import androidx.annotation.DrawableRes;
import androidx.annotation.FloatRange;
import androidx.annotation.IdRes;
import androidx.annotation.LayoutRes;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.StringRes;
import androidx.annotation.StyleRes;
import androidx.appcompat.app.AppCompatDialog;
import androidx.core.content.ContextCompat;
import androidx.lifecycle.Lifecycle;
import androidx.lifecycle.LifecycleOwner;
import androidx.lifecycle.LifecycleRegistry;
import com.hjq.base.action.ActivityAction;
import com.hjq.base.action.AnimAction;
import com.hjq.base.action.ClickAction;
import com.hjq.base.action.HandlerAction;
import com.hjq.base.action.ResourcesAction;
import java.lang.ref.SoftReference;
import java.util.ArrayList;
import java.util.List;
/**
* author : Android 轮子哥
* github : https://github.com/getActivity/AndroidProject
* time : 2018/11/24
* desc : Dialog 基类
*/
public class BaseDialog extends AppCompatDialog implements LifecycleOwner,
ActivityAction, ResourcesAction, HandlerAction, ClickAction, AnimAction,
DialogInterface.OnShowListener, DialogInterface.OnCancelListener, DialogInterface.OnDismissListener {
private final ListenersWrapper<BaseDialog> mListeners = new ListenersWrapper<>(this);
private final LifecycleRegistry mLifecycle = new LifecycleRegistry(this);
private List<BaseDialog.OnShowListener> mShowListeners;
private List<BaseDialog.OnCancelListener> mCancelListeners;
private List<BaseDialog.OnDismissListener> mDismissListeners;
public BaseDialog(Context context) {
this(context, R.style.BaseDialogStyle);
}
public BaseDialog(Context context, @StyleRes int themeResId) {
super(context, themeResId);
}
/**
* 获取 Dialog 的根布局
*/
public View getContentView() {
return findViewById(Window.ID_ANDROID_CONTENT);
}
/**
* 获取当前设置重心
*/
public int getGravity() {
Window window = getWindow();
if (window != null) {
WindowManager.LayoutParams params = window.getAttributes();
return params.gravity;
}
return Gravity.NO_GRAVITY;
}
/**
* 设置宽度
*/
public void setWidth(int width) {
Window window = getWindow();
if (window != null) {
WindowManager.LayoutParams params = window.getAttributes();
params.width = width;
window.setAttributes(params);
}
}
/**
* 设置高度
*/
public void setHeight(int height) {
Window window = getWindow();
if (window != null) {
WindowManager.LayoutParams params = window.getAttributes();
params.height = height;
window.setAttributes(params);
}
}
/**
* 设置 Dialog 重心
*/
public void setGravity(int gravity) {
Window window = getWindow();
if (window != null) {
window.setGravity(gravity);
}
}
/**
* 设置 Dialog 的动画
*/
public void setWindowAnimations(@StyleRes int id) {
Window window = getWindow();
if (window != null) {
window.setWindowAnimations(id);
}
}
/**
* 获取 Dialog 的动画
*/
public int getWindowAnimations() {
Window window = getWindow();
if (window != null) {
return window.getAttributes().windowAnimations;
}
return BaseDialog.ANIM_DEFAULT;
}
/**
* 设置背景遮盖层开关
*/
public void setBackgroundDimEnabled(boolean enabled) {
Window window = getWindow();
if (window != null) {
if (enabled) {
window.addFlags(WindowManager.LayoutParams.FLAG_DIM_BEHIND);
} else {
window.clearFlags(WindowManager.LayoutParams.FLAG_DIM_BEHIND);
}
}
}
/**
* 设置背景遮盖层的透明度(前提条件是背景遮盖层开关必须是为开启状态)
*/
public void setBackgroundDimAmount(@FloatRange(from = 0.0, to = 1.0) float dimAmount) {
Window window = getWindow();
if (window != null) {
window.setDimAmount(dimAmount);
}
}
@Override
public void dismiss() {
removeCallbacks();
View focusView = getCurrentFocus();
if (focusView != null) {
getSystemService(InputMethodManager.class).hideSoftInputFromWindow(focusView.getWindowToken(), 0);
}
super.dismiss();
}
@NonNull
@Override
public Lifecycle getLifecycle() {
return mLifecycle;
}
/**
* 设置一个显示监听器
*
* @param listener 显示监听器对象
* @deprecated 请使用 {@link #addOnShowListener(BaseDialog.OnShowListener)}}
*/
@Deprecated
@Override
public void setOnShowListener(@Nullable DialogInterface.OnShowListener listener) {
if (listener == null) {
return;
}
addOnShowListener(new ShowListenerWrapper(listener));
}
/**
* 设置一个取消监听器
*
* @param listener 取消监听器对象
* @deprecated 请使用 {@link #addOnCancelListener(BaseDialog.OnCancelListener)}
*/
@Deprecated
@Override
public void setOnCancelListener(@Nullable DialogInterface.OnCancelListener listener) {
if (listener == null) {
return;
}
addOnCancelListener(new CancelListenerWrapper(listener));
}
/**
* 设置一个销毁监听器
*
* @param listener 销毁监听器对象
* @deprecated 请使用 {@link #addOnDismissListener(BaseDialog.OnDismissListener)}
*/
@Deprecated
@Override
public void setOnDismissListener(@Nullable DialogInterface.OnDismissListener listener) {
if (listener == null) {
return;
}
addOnDismissListener(new DismissListenerWrapper(listener));
}
/**
* 设置一个按键监听器
*
* @param listener 按键监听器对象
* @deprecated 请使用 {@link #setOnKeyListener(BaseDialog.OnKeyListener)}
*/
@Deprecated
@Override
public void setOnKeyListener(@Nullable DialogInterface.OnKeyListener listener) {
super.setOnKeyListener(listener);
}
public void setOnKeyListener(@Nullable BaseDialog.OnKeyListener listener) {
super.setOnKeyListener(new KeyListenerWrapper(listener));
}
/**
* 添加一个显示监听器
*
* @param listener 监听器对象
*/
public void addOnShowListener(@Nullable BaseDialog.OnShowListener listener) {
if (mShowListeners == null) {
mShowListeners = new ArrayList<>();
super.setOnShowListener(mListeners);
}
mShowListeners.add(listener);
}
/**
* 添加一个取消监听器
*
* @param listener 监听器对象
*/
public void addOnCancelListener(@Nullable BaseDialog.OnCancelListener listener) {
if (mCancelListeners == null) {
mCancelListeners = new ArrayList<>();
super.setOnCancelListener(mListeners);
}
mCancelListeners.add(listener);
}
/**
* 添加一个销毁监听器
*
* @param listener 监听器对象
*/
public void addOnDismissListener(@Nullable BaseDialog.OnDismissListener listener) {
if (mDismissListeners == null) {
mDismissListeners = new ArrayList<>();
super.setOnDismissListener(mListeners);
}
mDismissListeners.add(listener);
}
/**
* 移除一个显示监听器
*
* @param listener 监听器对象
*/
public void removeOnShowListener(@Nullable BaseDialog.OnShowListener listener) {
if (mShowListeners != null) {
mShowListeners.remove(listener);
}
}
/**
* 移除一个取消监听器
*
* @param listener 监听器对象
*/
public void removeOnCancelListener(@Nullable BaseDialog.OnCancelListener listener) {
if (mCancelListeners != null) {
mCancelListeners.remove(listener);
}
}
/**
* 移除一个销毁监听器
*
* @param listener 监听器对象
*/
public void removeOnDismissListener(@Nullable BaseDialog.OnDismissListener listener) {
if (mDismissListeners != null) {
mDismissListeners.remove(listener);
}
}
/**
* 设置显示监听器集合
*/
private void setOnShowListeners(@Nullable List<BaseDialog.OnShowListener> listeners) {
super.setOnShowListener(mListeners);
mShowListeners = listeners;
}
/**
* 设置取消监听器集合
*/
private void setOnCancelListeners(@Nullable List<BaseDialog.OnCancelListener> listeners) {
super.setOnCancelListener(mListeners);
mCancelListeners = listeners;
}
/**
* 设置销毁监听器集合
*/
private void setOnDismissListeners(@Nullable List<BaseDialog.OnDismissListener> listeners) {
super.setOnDismissListener(mListeners);
mDismissListeners = listeners;
}
/**
* {@link DialogInterface.OnShowListener}
*/
@Override
public void onShow(DialogInterface dialog) {
mLifecycle.handleLifecycleEvent(Lifecycle.Event.ON_RESUME);
if (mShowListeners != null) {
for (int i = 0; i < mShowListeners.size(); i++) {
mShowListeners.get(i).onShow(this);
}
}
}
/**
* {@link DialogInterface.OnCancelListener}
*/
@Override
public void onCancel(DialogInterface dialog) {
if (mCancelListeners != null) {
for (int i = 0; i < mCancelListeners.size(); i++) {
mCancelListeners.get(i).onCancel(this);
}
}
}
/**
* {@link DialogInterface.OnDismissListener}
*/
@Override
public void onDismiss(DialogInterface dialog) {
mLifecycle.handleLifecycleEvent(Lifecycle.Event.ON_DESTROY);
if (mDismissListeners != null) {
for (int i = 0; i < mDismissListeners.size(); i++) {
mDismissListeners.get(i).onDismiss(this);
}
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mLifecycle.handleLifecycleEvent(Lifecycle.Event.ON_CREATE);
}
@Override
protected void onStart() {
super.onStart();
mLifecycle.handleLifecycleEvent(Lifecycle.Event.ON_START);
}
@Override
protected void onStop() {
super.onStop();
mLifecycle.handleLifecycleEvent(Lifecycle.Event.ON_STOP);
}
@SuppressWarnings("unchecked")
public static class Builder<B extends Builder> implements LifecycleOwner, ActivityAction, ResourcesAction, ClickAction {
/** 上下文对象 */
private final Context mContext;
/** Dialog 对象 */
private BaseDialog mDialog;
/** Dialog 布局 */
private View mContentView;
/** 主题样式 */
private int mThemeId = R.style.BaseDialogStyle;
/** 动画样式 */
private int mAnimStyle = BaseDialog.ANIM_DEFAULT;
/** 重心位置 */
private int mGravity = Gravity.NO_GRAVITY;
/** 水平偏移 */
private int mXOffset;
/** 垂直偏移 */
private int mYOffset;
/** 宽度和高度 */
private int mWidth = ViewGroup.LayoutParams.WRAP_CONTENT;
private int mHeight = ViewGroup.LayoutParams.WRAP_CONTENT;
/** 背景遮盖层开关 */
private boolean mBackgroundDimEnabled = true;
/** 背景遮盖层透明度 */
private float mBackgroundDimAmount = 0.5f;
/** 是否能够被取消 */
private boolean mCancelable = true;
/** 点击空白是否能够取消 前提是这个对话框可以被取消 */
private boolean mCanceledOnTouchOutside = true;
/** Dialog Show 监听 */
private List<BaseDialog.OnShowListener> mOnShowListeners;
/** Dialog Cancel 监听 */
private List<BaseDialog.OnCancelListener> mOnCancelListeners;
/** Dialog Dismiss 监听 */
private List<BaseDialog.OnDismissListener> mOnDismissListeners;
/** Dialog Key 监听 */
private BaseDialog.OnKeyListener mOnKeyListener;
/** 点击事件集合 */
private SparseArray<BaseDialog.OnClickListener> mClickArray;
public Builder(Activity activity) {
this((Context) activity);
}
public Builder(Context context) {
mContext = context;
}
/**
* 设置主题 id
*/
public B setThemeStyle(@StyleRes int id) {
if (isCreated()) {
// Dialog 创建之后不能再设置主题 id
throw new IllegalStateException("are you ok?");
}
mThemeId = id;
return (B) this;
}
/**
* 设置布局
*/
public B setContentView(@LayoutRes int id) {
// 这里解释一下,为什么要传 new FrameLayout,因为如果不传的话,XML 的根布局获取到的 LayoutParams 对象会为空,也就会导致宽高参数解析不出来
return setContentView(LayoutInflater.from(mContext).inflate(id, new FrameLayout(mContext), false));
}
public B setContentView(View view) {
mContentView = view;
if (isCreated()) {
mDialog.setContentView(view);
} else {
if (mContentView != null) {
ViewGroup.LayoutParams layoutParams = mContentView.getLayoutParams();
if (layoutParams != null && mWidth == ViewGroup.LayoutParams.WRAP_CONTENT && mHeight == ViewGroup.LayoutParams.WRAP_CONTENT) {
// 如果当前 Dialog 的宽高设置了自适应,就以布局中设置的宽高为主
setWidth(layoutParams.width);
setHeight(layoutParams.height);
}
// 如果当前没有设置重心,就自动获取布局重心
if (mGravity == Gravity.NO_GRAVITY) {
if (layoutParams instanceof FrameLayout.LayoutParams) {
setGravity(((FrameLayout.LayoutParams) layoutParams).gravity);
} else if (layoutParams instanceof LinearLayout.LayoutParams) {
setGravity(((LinearLayout.LayoutParams) layoutParams).gravity);
} else {
// 默认重心是居中
setGravity(Gravity.CENTER);
}
}
}
}
return (B) this;
}
/**
* 设置重心位置
*/
public B setGravity(int gravity) {
// 适配 Android 4.2 新特性,布局反方向(开发者选项 - 强制使用从右到左的布局方向)
mGravity = gravity;
if (isCreated()) {
mDialog.setGravity(gravity);
}
return (B) this;
}
/**
* 设置水平偏移
*/
public B setXOffset(int offset) {
mXOffset = offset;
return (B) this;
}
/**
* 设置垂直偏移
*/
public B setYOffset(int offset) {
mYOffset = offset;
return (B) this;
}
/**
* 设置宽度
*/
public B setWidth(int width) {
mWidth = width;
if (isCreated()) {
mDialog.setWidth(width);
} else {
ViewGroup.LayoutParams params = mContentView != null ? mContentView.getLayoutParams() : null;
if (params != null) {
params.width = width;
mContentView.setLayoutParams(params);
}
}
return (B) this;
}
/**
* 设置高度
*/
public B setHeight(int height) {
mHeight = height;
if (isCreated()) {
mDialog.setHeight(height);
} else {
// 这里解释一下为什么要重新设置 LayoutParams
// 因为如果不这样设置的话,第一次显示的时候会按照 Dialog 宽高显示
// 但是 Layout 内容变更之后就不会按照之前的设置宽高来显示
// 所以这里我们需要对 View 的 LayoutParams 也进行设置
ViewGroup.LayoutParams params = mContentView != null ? mContentView.getLayoutParams() : null;
if (params != null) {
params.height = height;
mContentView.setLayoutParams(params);
}
}
return (B) this;
}
/**
* 是否可以取消
*/
public B setCancelable(boolean cancelable) {
mCancelable = cancelable;
if (isCreated()) {
mDialog.setCancelable(cancelable);
}
return (B) this;
}
/**
* 是否可以通过点击空白区域取消
*/
public B setCanceledOnTouchOutside(boolean cancel) {
mCanceledOnTouchOutside = cancel;
if (isCreated() && mCancelable) {
mDialog.setCanceledOnTouchOutside(cancel);
}
return (B) this;
}
/**
* 设置动画,已经封装好几种样式,具体可见{@link AnimAction}类
*/
public B setAnimStyle(@StyleRes int id) {
mAnimStyle = id;
if (isCreated()) {
mDialog.setWindowAnimations(id);
}
return (B) this;
}
/**
* 设置背景遮盖层开关
*/
public B setBackgroundDimEnabled(boolean enabled) {
mBackgroundDimEnabled = enabled;
if (isCreated()) {
mDialog.setBackgroundDimEnabled(enabled);
}
return (B) this;
}
/**
* 设置背景遮盖层的透明度(前提条件是背景遮盖层开关必须是为开启状态)
*/
public B setBackgroundDimAmount(@FloatRange(from = 0.0, to = 1.0) float dimAmount) {
mBackgroundDimAmount = dimAmount;
if (isCreated()) {
mDialog.setBackgroundDimAmount(dimAmount);
}
return (B) this;
}
/**
* 添加显示监听
*/
public B addOnShowListener(@NonNull BaseDialog.OnShowListener listener) {
if (isCreated()) {
mDialog.addOnShowListener(listener);
} else {
if (mOnShowListeners == null) {
mOnShowListeners = new ArrayList<>();
}
mOnShowListeners.add(listener);
}
return (B) this;
}
/**
* 添加取消监听
*/
public B addOnCancelListener(@NonNull BaseDialog.OnCancelListener listener) {
if (isCreated()) {
mDialog.addOnCancelListener(listener);
} else {
if (mOnCancelListeners == null) {
mOnCancelListeners = new ArrayList<>();
}
mOnCancelListeners.add(listener);
}
return (B) this;
}
/**
* 添加销毁监听
*/
public B addOnDismissListener(@NonNull BaseDialog.OnDismissListener listener) {
if (isCreated()) {
mDialog.addOnDismissListener(listener);
} else {
if (mOnDismissListeners == null) {
mOnDismissListeners = new ArrayList<>();
}
mOnDismissListeners.add(listener);
}
return (B) this;
}
/**
* 设置按键监听
*/
public B setOnKeyListener(@NonNull BaseDialog.OnKeyListener listener) {
if (isCreated()) {
mDialog.setOnKeyListener(listener);
} else {
mOnKeyListener = listener;
}
return (B) this;
}
/**
* 设置文本
*/
public B setText(@IdRes int viewId, @StringRes int stringId) {
return setText(viewId, getString(stringId));
}
public B setText(@IdRes int id, CharSequence text) {
((TextView) findViewById(id)).setText(text);
return (B) this;
}
/**
* 设置文本颜色
*/
public B setTextColor(@IdRes int id, @ColorInt int color) {
((TextView) findViewById(id)).setTextColor(color);
return (B) this;
}
/**
* 设置提示
*/
public B setHint(@IdRes int viewId, @StringRes int stringId) {
return setHint(viewId, getString(stringId));
}
public B setHint(@IdRes int id, CharSequence text) {
((TextView) findViewById(id)).setHint(text);
return (B) this;
}
/**
* 设置可见状态
*/
public B setVisibility(@IdRes int id, int visibility) {
findViewById(id).setVisibility(visibility);
return (B) this;
}
/**
* 设置背景
*/
public B setBackground(@IdRes int viewId, @DrawableRes int drawableId) {
return setBackground(viewId, ContextCompat.getDrawable(mContext, drawableId));
}
public B setBackground(@IdRes int id, Drawable drawable) {
findViewById(id).setBackground(drawable);
return (B) this;
}
/**
* 设置图片
*/
public B setImageDrawable(@IdRes int viewId, @DrawableRes int drawableId) {
return setBackground(viewId, ContextCompat.getDrawable(mContext, drawableId));
}
public B setImageDrawable(@IdRes int id, Drawable drawable) {
((ImageView) findViewById(id)).setImageDrawable(drawable);
return (B) this;
}
/**
* 设置点击事件
*/
public B setOnClickListener(@IdRes int id, @NonNull BaseDialog.OnClickListener listener) {
if (isCreated()) {
View view = mDialog.findViewById(id);
if (view != null) {
view.setOnClickListener(new ViewClickWrapper(mDialog, listener));
}
} else {
if (mClickArray == null) {
mClickArray = new SparseArray<>();
}
mClickArray.put(id, listener);
}
return (B) this;
}
/**
* 创建
*/
@SuppressLint("RtlHardcoded")
public BaseDialog create() {
// 判断布局是否为空
if (mContentView == null) {
throw new IllegalArgumentException("are you ok?");
}
// 如果当前没有设置重心,就设置一个默认的重心
if (mGravity == Gravity.NO_GRAVITY) {
mGravity = Gravity.CENTER;
}
// 如果当前没有设置动画效果,就设置一个默认的动画效果
if (mAnimStyle == BaseDialog.ANIM_DEFAULT) {
switch (mGravity) {
case Gravity.TOP:
mAnimStyle = BaseDialog.ANIM_TOP;
break;
case Gravity.BOTTOM:
mAnimStyle = BaseDialog.ANIM_BOTTOM;
break;
case Gravity.LEFT:
mAnimStyle = BaseDialog.ANIM_LEFT;
break;
case Gravity.RIGHT:
mAnimStyle = BaseDialog.ANIM_RIGHT;
break;
default:
mAnimStyle = BaseDialog.ANIM_DEFAULT;
break;
}
}
mDialog = createDialog(mContext, mThemeId);
mDialog.setContentView(mContentView);
mDialog.setCancelable(mCancelable);
if (mCancelable) {
mDialog.setCanceledOnTouchOutside(mCanceledOnTouchOutside);
}
// 设置参数
Window window = mDialog.getWindow();
if (window != null) {
WindowManager.LayoutParams params = window.getAttributes();
params.width = mWidth;
params.height = mHeight;
params.gravity = mGravity;
params.x = mXOffset;
params.y = mYOffset;
params.windowAnimations = mAnimStyle;
window.setAttributes(params);
if (mBackgroundDimEnabled) {
window.addFlags(WindowManager.LayoutParams.FLAG_DIM_BEHIND);
window.setDimAmount(mBackgroundDimAmount);
} else {
window.clearFlags(WindowManager.LayoutParams.FLAG_DIM_BEHIND);
}
}
if (mOnShowListeners != null) {
mDialog.setOnShowListeners(mOnShowListeners);
}
if (mOnCancelListeners != null) {
mDialog.setOnCancelListeners(mOnCancelListeners);
}
if (mOnDismissListeners != null) {
mDialog.setOnDismissListeners(mOnDismissListeners);
}
if (mOnKeyListener != null) {
mDialog.setOnKeyListener(mOnKeyListener);
}
for (int i = 0; mClickArray != null && i < mClickArray.size(); i++) {
mContentView.findViewById(mClickArray.keyAt(i)).setOnClickListener(new ViewClickWrapper(mDialog, mClickArray.valueAt(i)));
}
Activity activity = getActivity();
if (activity != null) {
DialogLifecycle.with(activity, mDialog);
}
return mDialog;
}
/**
* 显示
*/
public BaseDialog show() {
if (!isCreated()) {
create();
}
mDialog.show();
return mDialog;
}
/**
* 销毁当前 Dialog
*/
public void dismiss() {
if (mDialog != null) {
mDialog.dismiss();
}
}
@Override
public Context getContext() {
return mContext;
}
/**
* 当前 Dialog 是否创建了
*/
public boolean isCreated() {
return mDialog != null;
}
/**
* 当前 Dialog 是否显示了
*/
public boolean isShowing() {
return mDialog != null && mDialog.isShowing();
}
/**
* 创建 Dialog 对象(子类可以重写此方法来改变 Dialog 类型)
*/
protected BaseDialog createDialog(Context context, @StyleRes int themeId) {
return new BaseDialog(context, themeId);
}
/**
* 延迟执行
*/
public final void post(Runnable r) {
if (isShowing()) {
mDialog.post(r);
} else {
addOnShowListener(new ShowPostWrapper(r));
}
}
/**
* 延迟一段时间执行
*/
public final void postDelayed(Runnable r, long delayMillis) {
if (isShowing()) {
mDialog.postDelayed(r, delayMillis);
} else {
addOnShowListener(new ShowPostDelayedWrapper(r, delayMillis));
}
}
/**
* 在指定的时间执行
*/
public final void postAtTime(Runnable r, long uptimeMillis) {
if (isShowing()) {
mDialog.postAtTime(r, uptimeMillis);
} else {
addOnShowListener(new ShowPostAtTimeWrapper(r, uptimeMillis));
}
}
/**
* 获取 Dialog 的根布局
*/
public View getContentView() {
return mContentView;
}
/**
* 根据 id 查找 View
*/
@Override
public <V extends View> V findViewById(@IdRes int id) {
if (mContentView == null) {
// 没有 setContentView 就想 findViewById ?
throw new IllegalStateException("are you ok?");
}
return mContentView.findViewById(id);
}
/**
* 获取当前 Dialog 对象
*/
@Nullable
public BaseDialog getDialog() {
return mDialog;
}
@Nullable
@Override
public Lifecycle getLifecycle() {
if (mDialog != null) {
return mDialog.getLifecycle();
}
return null;
}
}
/**
* Dialog 生命周期管理
*/
private static final class DialogLifecycle implements
Application.ActivityLifecycleCallbacks,
BaseDialog.OnShowListener,
BaseDialog.OnDismissListener {
private static void with(Activity activity, BaseDialog dialog) {
new DialogLifecycle(activity, dialog);
}
private BaseDialog mDialog;
private Activity mActivity;
/** Dialog 动画样式(避免 Dialog 从后台返回到前台后再次触发动画效果) */
private int mDialogAnim;
private DialogLifecycle(Activity activity, BaseDialog dialog) {
mActivity = activity;
dialog.addOnShowListener(this);
dialog.addOnDismissListener(this);
}
@Override
public void onActivityCreated(@NonNull Activity activity, @Nullable Bundle savedInstanceState) {}
@Override
public void onActivityStarted(@NonNull Activity activity) {}
@Override
public void onActivityResumed(@NonNull Activity activity) {
if (mActivity != activity) {
return;
}
if (mDialog != null && mDialog.isShowing()) {
// 还原 Dialog 动画样式(这里必须要使用延迟设置,否则还是有一定几率会出现)
mDialog.postDelayed(() -> {
if (mDialog != null && mDialog.isShowing()) {
mDialog.setWindowAnimations(mDialogAnim);
}
}, 100);
}
}
@Override
public void onActivityPaused(@NonNull Activity activity) {
if (mActivity != activity) {
return;
}
if (mDialog != null && mDialog.isShowing()) {
// 获取 Dialog 动画样式
mDialogAnim = mDialog.getWindowAnimations();
// 设置 Dialog 无动画效果
mDialog.setWindowAnimations(BaseDialog.ANIM_EMPTY);
}
}
@Override
public void onActivityStopped(@NonNull Activity activity) {}
@Override
public void onActivitySaveInstanceState(@NonNull Activity activity, @NonNull Bundle outState) {}
@Override
public void onActivityDestroyed(@NonNull Activity activity) {
if (mActivity != activity) {
return;
}
if (mDialog != null) {
mDialog.removeOnShowListener(this);
mDialog.removeOnDismissListener(this);
if (mDialog.isShowing()) {
mDialog.dismiss();
}
mDialog = null;
}
unregisterActivityLifecycleCallbacks();
// 释放 Activity 对象
mActivity = null;
}
@Override
public void onShow(BaseDialog dialog) {
mDialog = dialog;
registerActivityLifecycleCallbacks();
}
@Override
public void onDismiss(BaseDialog dialog) {
mDialog = null;
unregisterActivityLifecycleCallbacks();
}
/**
* 注册 Activity 生命周期监听
*/
private void registerActivityLifecycleCallbacks() {
if (mActivity == null) {
return;
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
mActivity.registerActivityLifecycleCallbacks(this);
} else {
mActivity.getApplication().registerActivityLifecycleCallbacks(this);
}
}
/**
* 反注册 Activity 生命周期监听
*/
private void unregisterActivityLifecycleCallbacks() {
if (mActivity == null) {
return;
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
mActivity.unregisterActivityLifecycleCallbacks(this);
} else {
mActivity.getApplication().unregisterActivityLifecycleCallbacks(this);
}
}
}
/**
* Dialog 监听包装类(修复原生 Dialog 监听器对象导致的内存泄漏)
*/
private static final class ListenersWrapper<T extends DialogInterface.OnShowListener & DialogInterface.OnCancelListener & DialogInterface.OnDismissListener>
extends SoftReference<T> implements DialogInterface.OnShowListener, DialogInterface.OnCancelListener, DialogInterface.OnDismissListener {
private ListenersWrapper(T referent) {
super(referent);
}
@Override
public void onShow(DialogInterface dialog) {
if (get() != null) {
get().onShow(dialog);
}
}
@Override
public void onCancel(DialogInterface dialog) {
if (get() != null) {
get().onCancel(dialog);
}
}
@Override
public void onDismiss(DialogInterface dialog) {
if (get() != null) {
get().onDismiss(dialog);
}
}
}
/**
* 点击事件包装类
*/
private static final class ViewClickWrapper
implements View.OnClickListener {
private final BaseDialog mDialog;
private final BaseDialog.OnClickListener mListener;
private ViewClickWrapper(BaseDialog dialog, BaseDialog.OnClickListener listener) {
mDialog = dialog;
mListener = listener;
}
@SuppressWarnings("unchecked")
@Override
public final void onClick(View v) {
mListener.onClick(mDialog, v);
}
}
/**
* 显示监听包装类
*/
private static final class ShowListenerWrapper
extends SoftReference<DialogInterface.OnShowListener>
implements BaseDialog.OnShowListener {
private ShowListenerWrapper(DialogInterface.OnShowListener referent) {
super(referent);
}
@Override
public void onShow(BaseDialog dialog) {
// 在横竖屏切换后监听对象会为空
if (get() != null) {
get().onShow(dialog);
}
}
}
/**
* 取消监听包装类
*/
private static final class CancelListenerWrapper
extends SoftReference<DialogInterface.OnCancelListener>
implements BaseDialog.OnCancelListener {
private CancelListenerWrapper(DialogInterface.OnCancelListener referent) {
super(referent);
}
@Override
public void onCancel(BaseDialog dialog) {
// 在横竖屏切换后监听对象会为空
if (get() != null) {
get().onCancel(dialog);
}
}
}
/**
* 销毁监听包装类
*/
private static final class DismissListenerWrapper
extends SoftReference<DialogInterface.OnDismissListener>
implements BaseDialog.OnDismissListener {
private DismissListenerWrapper(DialogInterface.OnDismissListener referent) {
super(referent);
}
@Override
public void onDismiss(BaseDialog dialog) {
// 在横竖屏切换后监听对象会为空
if (get() != null) {
get().onDismiss(dialog);
}
}
}
/**
* 按键监听包装类
*/
private static final class KeyListenerWrapper
implements DialogInterface.OnKeyListener {
private final BaseDialog.OnKeyListener mListener;
private KeyListenerWrapper(BaseDialog.OnKeyListener listener) {
mListener = listener;
}
@Override
public boolean onKey(DialogInterface dialog, int keyCode, KeyEvent event) {
// 在横竖屏切换后监听对象会为空
if (mListener != null && dialog instanceof BaseDialog) {
mListener.onKey((BaseDialog) dialog, event);
}
return false;
}
}
/**
* post 任务包装类
*/
private static final class ShowPostWrapper implements OnShowListener {
private final Runnable mRunnable;
private ShowPostWrapper(Runnable r) {
mRunnable = r;
}
@Override
public void onShow(BaseDialog dialog) {
if (mRunnable != null) {
dialog.removeOnShowListener(this);
dialog.post(mRunnable);
}
}
}
/**
* postDelayed 任务包装类
*/
private static final class ShowPostDelayedWrapper implements OnShowListener {
private final Runnable mRunnable;
private final long mDelayMillis;
private ShowPostDelayedWrapper(Runnable r, long delayMillis) {
mRunnable = r;
mDelayMillis = delayMillis;
}
@Override
public void onShow(BaseDialog dialog) {
if (mRunnable != null) {
dialog.removeOnShowListener(this);
dialog.postDelayed(mRunnable, mDelayMillis);
}
}
}
/**
* postAtTime 任务包装类
*/
private static final class ShowPostAtTimeWrapper implements OnShowListener {
private final Runnable mRunnable;
private final long mUptimeMillis;
private ShowPostAtTimeWrapper(Runnable r, long uptimeMillis) {
mRunnable = r;
mUptimeMillis = uptimeMillis;
}
@Override
public void onShow(BaseDialog dialog) {
if (mRunnable != null) {
dialog.removeOnShowListener(this);
dialog.postAtTime(mRunnable, mUptimeMillis);
}
}
}
/**
* 点击监听器
*/
public interface OnClickListener<V extends View> {
void onClick(BaseDialog dialog, V view);
}
/**
* 显示监听器
*/
public interface OnShowListener {
/**
* Dialog 显示了
*/
void onShow(BaseDialog dialog);
}
/**
* 取消监听器
*/
public interface OnCancelListener {
/**
* Dialog 取消了
*/
void onCancel(BaseDialog dialog);
}
/**
* 销毁监听器
*/
public interface OnDismissListener {
/**
* Dialog 销毁了
*/
void onDismiss(BaseDialog dialog);
}
/**
* 按键监听器
*/
public interface OnKeyListener {
/**
* 触发了按键
*/
boolean onKey(BaseDialog dialog, KeyEvent event);
}
} | {
"pile_set_name": "Github"
} |
quota=INF
quota=1
quota=INF
quota22222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222=1
quota=1
quota=uotta=1
quota=uota
| {
"pile_set_name": "Github"
} |
package org.jeecgframework.minidao.aop;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import ognl.Ognl;
import ognl.OgnlException;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jeecgframework.minidao.annotation.Arguments;
import org.jeecgframework.minidao.annotation.IdAutoGenerator;
import org.jeecgframework.minidao.annotation.ResultType;
import org.jeecgframework.minidao.annotation.Sql;
import org.jeecgframework.minidao.aspect.EmptyInterceptor;
import org.jeecgframework.minidao.def.MiniDaoConstants;
import org.jeecgframework.minidao.pojo.MiniDaoPage;
import org.jeecgframework.minidao.spring.rowMapper.MiniColumnMapRowMapper;
import org.jeecgframework.minidao.spring.rowMapper.MiniColumnOriginalMapRowMapper;
import org.jeecgframework.minidao.util.FreemarkerParseFactory;
import org.jeecgframework.minidao.util.MiniDaoUtil;
import org.jeecgframework.minidao.util.ParameterNameUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.EmptyResultDataAccessException;
import org.springframework.jdbc.core.BeanPropertyRowMapper;
import org.springframework.jdbc.core.ColumnMapRowMapper;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.jdbc.core.namedparam.MapSqlParameterSource;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import org.springframework.jdbc.support.GeneratedKeyHolder;
import org.springframework.jdbc.support.KeyHolder;
/**
*
* @Title:MiniDaoHandler
* @description:MiniDAO 拦截器
* @author 张代浩
* @mail [email protected]
* @category www.jeecg.org
* @date 20130817
* @version V1.0
*/
@SuppressWarnings("rawtypes")
public class MiniDaoHandler implements InvocationHandler {
private static final Log logger = LogFactory.getLog(MiniDaoHandler.class);
@Autowired
private JdbcTemplate jdbcTemplate;
@Autowired
private NamedParameterJdbcTemplate namedParameterJdbcTemplate;
private String UPPER_KEY = "upper";
private String LOWER_KEY = "lower";
/**
* map的关键字类型 三个值
*/
private String keyType = "origin";
private boolean formatSql = false;
private boolean showSql = false;
private String dbType;
/**
* minidao拦截器
*/
private EmptyInterceptor emptyInterceptor;
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
// 返回结果
Object returnObj = null;
// SQL模板
String templateSql = null;
// SQL模板参数
Map<String, Object> sqlParamsMap = new HashMap<String, Object>();
// 分页参数
MiniDaoPage pageSetting = new MiniDaoPage();
// Step.0 判断是否是Hiber实体维护方法,如果是执行Hibernate方式实体维护
// Map<String, Object> rs = new HashMap<String, Object>();
// Step.1装载SQL模板,所需参数
templateSql = installDaoMetaData(pageSetting, method, sqlParamsMap, args);
// Step.3解析SQL模板,返回可执行SQL
String executeSql = parseSqlTemplate(method, templateSql, sqlParamsMap);
// Step.4 组装SQL占位符参数
Map<String, Object> sqlMap = installPlaceholderSqlParam(executeSql, sqlParamsMap);
// Step.5 获取SQL执行返回值
try {
returnObj = getReturnMinidaoResult(dbType, pageSetting, method, executeSql, sqlMap);
} catch (Exception e) {
returnObj = null;
if(e instanceof EmptyResultDataAccessException){
//数据查询为空,不抛出Spring异常
}else{
e.printStackTrace();
throw e;
}
}
if (showSql) {
//System.out.println("MiniDao-SQL:\n\n" + executeSql);
logger.info("MiniDao-SQL:\n\n" + executeSql);
}
return returnObj;
}
/**
* 判斷是否是執行的方法(非查詢)
*
* @param methodName
* @return
*/
private static boolean checkActiveKey(String methodName) {
String keys[] = MiniDaoConstants.INF_METHOD_ACTIVE.split(",");
for (String s : keys) {
if (methodName.startsWith(s))
return true;
}
return false;
}
/**
* 判斷SQL是否(非查詢)
*
* @param methodName
* @return
*/
private static boolean checkActiveSql(String sql) {
sql = sql.trim().toLowerCase();
String keys[] = MiniDaoConstants.INF_METHOD_ACTIVE.split(",");
for (String s : keys) {
if (sql.startsWith(s))
return true;
}
return false;
}
/**
* 判斷是否批處理
*
* @param methodName
* @return
*/
private static boolean checkBatchKey(String methodName) {
String keys[] = MiniDaoConstants.INF_METHOD_BATCH.split(",");
for (String s : keys) {
if (methodName.startsWith(s))
return true;
}
return false;
}
/**
* 把批量处理的结果拼接起来
*
* @Author JueYue
* @date 2013-11-17
*/
private void addResulArray(int[] result, int index, int[] arr) {
int length = arr.length;
for (int i = 0; i < length; i++) {
result[index - length + i] = arr[i];
}
}
/**
* 批处理
*
* @Author JueYue
* @date 2013-11-17
* @return
*/
private int[] batchUpdate(String executeSql) {
String[] sqls = executeSql.split(";");
if (sqls.length < 100) {
return jdbcTemplate.batchUpdate(sqls);
}
int[] result = new int[sqls.length];
List<String> sqlList = new ArrayList<String>();
for (int i = 0; i < sqls.length; i++) {
sqlList.add(sqls[i]);
if (i % 100 == 0) {
addResulArray(result, i + 1, jdbcTemplate.batchUpdate(sqlList.toArray(new String[0])));
sqlList.clear();
}
}
addResulArray(result, sqls.length, jdbcTemplate.batchUpdate(sqlList.toArray(new String[0])));
return result;
}
/**
* 根据参数设置map的key大小写
**/
private RowMapper<Map<String, Object>> getColumnMapRowMapper() {
if (getKeyType().equalsIgnoreCase(LOWER_KEY)) {
return new MiniColumnMapRowMapper();
} else if (getKeyType().equalsIgnoreCase(UPPER_KEY)) {
return new ColumnMapRowMapper();
} else {
return new MiniColumnOriginalMapRowMapper();
}
}
/**
* 获取总数sql - 如果要支持其他数据库,修改这里就可以
*
* @param sql
* @return
*/
private String getCountSql(String sql) {
//update-begin---author:scott----date:20170803------for:分页count去掉排序,兼容SqlServer,同时提高效率--------
sql = removeOrderBy(sql);
//update-end---author:scott----date:20170803------for:分页count去掉排序,兼容SqlServer,同时提高效率--------
return "select count(0) from (" + sql + ") tmp_count";
}
/**
* 去除子查询中的order by (特别是SQLServer)
* @param sql
* @return
*/
public String removeOrderBy(String sql) {
if(sql==null){
return null;
}
sql = sql.replaceAll("(?i)order by [\\s|\\S]+$", "");
return sql;
}
public String getDbType() {
return dbType;
}
public JdbcTemplate getJdbcTemplate() {
return jdbcTemplate;
}
public String getKeyType() {
return keyType;
}
public NamedParameterJdbcTemplate getNamedParameterJdbcTemplate() {
return namedParameterJdbcTemplate;
}
/**
* 获取MiniDao处理结果集
*
* @param dbType
* @param pageSetting
* @param jdbcTemplate
* @param method
* @param executeSql
* @return 结果集
*/
@SuppressWarnings("unchecked")
private Object getReturnMinidaoResult(String dbType, MiniDaoPage pageSetting, Method method, String executeSql, Map<String, Object> paramMap) {
// step.4.调用SpringJdbc引擎,执行SQL返回值
// 5.1获取返回值类型[Map/Object/List<Object>/List<Map>/基本类型]
String methodName = method.getName();
//update-begin---author:scott----date:20160906------for:增加通过sql判断是否非查询操作--------
// 判斷是否非查詢方法
if (checkActiveKey(methodName) || checkActiveSql(executeSql)) {
//update-end---author:scott----date:20160906------for:增加通过sql判断是否非查询操作--------
//update-begin---author:scott----date:20180104------for:支持ID自增策略生成并返回主键ID--------
boolean idGenerators_flag = method.isAnnotationPresent(IdAutoGenerator.class);
if (idGenerators_flag) {
KeyHolder keyHolder = new GeneratedKeyHolder();
if (paramMap != null) {
MapSqlParameterSource paramSource = new MapSqlParameterSource(paramMap);
namedParameterJdbcTemplate.update(executeSql, paramSource,keyHolder,new String[]{"id"});
return keyHolder.getKey().intValue();
} else {
jdbcTemplate.update(executeSql,keyHolder);
return keyHolder.getKey().intValue();
}
}else{
if (paramMap != null) {
return namedParameterJdbcTemplate.update(executeSql, paramMap);
} else {
return jdbcTemplate.update(executeSql);
}
}
//update-end---author:scott----date:20180104------for:支持ID自增策略生成并返回主键ID--------
} else if (checkBatchKey(methodName)) {
return batchUpdate(executeSql);
} else {
// 如果是查詢操作
Class<?> returnType = method.getReturnType();
if (returnType.isPrimitive()) {
//update-begin---author:scott----date:20160906------for:修复非包装类型,无法传参数问题--------
Number number = namedParameterJdbcTemplate.queryForObject(executeSql, paramMap, BigDecimal.class);
//jdbcTemplate.queryForObject(executeSql, BigDecimal.class);
//update-begin---author:scott----date:20160906------for:修复非包装类型,无法传参数问题--------
if ("int".equals(returnType.getCanonicalName())) {
return number.intValue();
} else if ("long".equals(returnType.getCanonicalName())) {
return number.longValue();
} else if ("double".equals(returnType.getCanonicalName())) {
return number.doubleValue();
}
} else if (returnType.isAssignableFrom(List.class) || returnType.isAssignableFrom(MiniDaoPage.class)) {
int page = pageSetting.getPage();
int rows = pageSetting.getRows();
if (page != 0 && rows != 0) {
if (returnType.isAssignableFrom(MiniDaoPage.class)) {
if (paramMap != null) {
pageSetting.setTotal(namedParameterJdbcTemplate.queryForObject(getCountSql(executeSql), paramMap, Integer.class));
} else {
pageSetting.setTotal(jdbcTemplate.queryForObject(getCountSql(executeSql), Integer.class));
}
}
executeSql = MiniDaoUtil.createPageSql(dbType, executeSql, page, rows);
}
RowMapper resultType = getListRealType(method);
List list;
if (paramMap != null) {
list = namedParameterJdbcTemplate.query(executeSql, paramMap, resultType);
} else {
list = jdbcTemplate.query(executeSql, resultType);
}
if (returnType.isAssignableFrom(MiniDaoPage.class)) {
pageSetting.setResults(list);
return pageSetting;
} else {
return list;
}
} else if (returnType.isAssignableFrom(Map.class)) {
// Map类型
if (paramMap != null) {
return namedParameterJdbcTemplate.queryForObject(executeSql, paramMap, getColumnMapRowMapper());
} else {
return jdbcTemplate.queryForObject(executeSql, getColumnMapRowMapper());
}
} else if (returnType.isAssignableFrom(String.class)) {
if (paramMap != null) {
return namedParameterJdbcTemplate.queryForObject(executeSql, paramMap, String.class);
} else {
return jdbcTemplate.queryForObject(executeSql, String.class);
}
} else if (MiniDaoUtil.isWrapClass(returnType)) {
if (paramMap != null) {
return namedParameterJdbcTemplate.queryForObject(executeSql, paramMap, returnType);
} else {
return jdbcTemplate.queryForObject(executeSql, returnType);
}
} else {
//---update-begin--author:scott---date:20160909----for:支持spring4---------
// 对象类型
RowMapper<?> rm = BeanPropertyRowMapper.newInstance(returnType);
//RowMapper<?> rm = ParameterizedBeanPropertyRowMapper.newInstance(returnType);
//---update-end--author:scott---date:20160909----for:支持spring4---------
if (paramMap != null) {
return namedParameterJdbcTemplate.queryForObject(executeSql, paramMap, rm);
} else {
return jdbcTemplate.queryForObject(executeSql, rm);
}
}
}
return null;
}
/**
* 获取真正的类型
*
* @param genericReturnType
* @param rowMapper
* @return
*/
private RowMapper<?> getListRealType(Method method) {
ResultType resultType = method.getAnnotation(ResultType.class);
if (resultType != null) {
if (resultType.value().equals(Map.class)) {
return getColumnMapRowMapper();
}
//---update-begin--author:scott---date:20160909----for:支持spring4---------
return BeanPropertyRowMapper.newInstance(resultType.value());
//---update-end--author:scott---date:20160909----for:支持spring4---------
}
String genericReturnType = method.getGenericReturnType().toString();
String realType = genericReturnType.replace("java.util.List", "").replace("<", "").replace(">", "");
if (realType.contains("java.util.Map")) {
return getColumnMapRowMapper();
} else if (realType.length() > 0) {
try {
//---update-begin--author:scott---date:20160909----for:支持spring4---------
return BeanPropertyRowMapper.newInstance(Class.forName(realType));
//---update-end--author:scott---date:20160909----for:支持spring4---------
} catch (ClassNotFoundException e) {
logger.error(e.getMessage(), e.fillInStackTrace());
throw new RuntimeException("minidao get class error ,class name is:" + realType);
}
}
return getColumnMapRowMapper();
}
/**
* 装载SQL模板参数
*
* @param pageSetting
*
* @param method
* @param sqlParamsMap
* 返回(装载模板参数)
* @param args
* @return templateSql(@SQL标签的SQL)
* @throws Exception
*/
private String installDaoMetaData(MiniDaoPage pageSetting, Method method, Map<String, Object> sqlParamsMap, Object[] args) throws Exception {
//update-begin---author:scott----date:20160511------for:minidao拦截器逻辑--------
//System.out.println(" -- methodName -- "+ methodName );
if(emptyInterceptor!=null && args!= null && args.length==1){
String methodName = method.getName();
Object obj = args[0];
Field[] fields = obj.getClass().getDeclaredFields();
if(methodName.startsWith("insert")){
if(emptyInterceptor!=null){
emptyInterceptor.onInsert(fields, obj);
}
}
if(methodName.startsWith("update")){
if(emptyInterceptor!=null){
emptyInterceptor.onUpdate(fields, obj);
}
}
//reflect(obj);
}
//update-begin---author:scott----date:20160511------for:minidao拦截器逻辑--------
String templateSql = null;
// 如果方法参数大于1个的话,方法必须使用注释标签Arguments
boolean arguments_flag = method.isAnnotationPresent(Arguments.class);
if (arguments_flag) {
// [1].获取方法的参数标签
Arguments arguments = method.getAnnotation(Arguments.class);
logger.debug("@Arguments------------------------------------------" + Arrays.toString(arguments.value()));
if (arguments.value().length != args.length) {
// 校验机制-如果注释标签参数数目大于方法的参数,则抛出异常
throw new Exception("注释标签@Arguments参数数目,与方法参数数目不相等~");
}
// step.2.将args转换成键值对,封装成Map对象
int args_num = 0;
for (String v : arguments.value()) {
// update-begin--Author:fancq Date:20140102 for:支持多数据分页
if (v.equalsIgnoreCase("page")) {
pageSetting.setPage(Integer.parseInt(args[args_num].toString()));
}
if (v.equalsIgnoreCase("rows")) {
pageSetting.setRows(Integer.parseInt(args[args_num].toString()));
}
// update-end--Author:fancq Date:20140102 for:支持多数据分页
sqlParamsMap.put(v, args[args_num]);
args_num++;
}
} else {
// 如果未使用[参数标签]
if (args != null && args.length >= 1) {
//---update-begin----author:scott-----date:20160302-----for:支持新参数注解写法--------------
String[] params = ParameterNameUtils.getMethodParameterNamesByAnnotation(method);
if(params==null || params.length==0){
throw new Exception("方法参数数目>=2,必须使用:方法标签@Arguments 或 参数标签@param");
}
if (params.length != args.length) {
throw new Exception("方法参数数目>=2,参数必须使用:标签@param");
}
int args_num = 0;
for (String v : params) {
if(v==null){
throw new Exception("Dao接口定义,所有参数必须使用@param标签~");
}
if (v.equalsIgnoreCase("page")) {
pageSetting.setPage(Integer.parseInt(args[args_num].toString()));
}
if (v.equalsIgnoreCase("rows")) {
pageSetting.setRows(Integer.parseInt(args[args_num].toString()));
}
sqlParamsMap.put(v, args[args_num]);
args_num++;
}
//---update-end----author:scott-----date:20160302-----for:支持新参数注解写法--------------
} else if (args != null && args.length == 1) {
// step.2.将args转换成键值对,封装成Map对象
sqlParamsMap.put(MiniDaoConstants.SQL_FTL_DTO, args[0]);
}
}
// [2].获取方法的SQL标签
if (method.isAnnotationPresent(Sql.class)) {
Sql sql = method.getAnnotation(Sql.class);
// 如果用户采用自定义标签SQL,则SQL文件无效
if (StringUtils.isNotEmpty(sql.value())) {
templateSql = sql.value();
}
logger.debug("@Sql------------------------------------------" + sql.value());
}
return templateSql;
}
/**
* 组装占位符参数 -> Map
*
* @param executeSql
* @return
* @throws OgnlException
*/
private Map<String, Object> installPlaceholderSqlParam(String executeSql, Map sqlParamsMap) throws OgnlException {
Map<String, Object> map = new HashMap<String, Object>();
//update-begin---author:scott----date:20160906------for:参数不支持下划线解决--------
String regEx = ":[ tnx0Bfr]*[0-9a-z.A-Z_]+"; // 表示以:开头,[0-9或者.或者A-Z大小都写]的任意字符,超过一个
//update-begin---author:scott----date:20160906------for:参数不支持下划线解决--------
Pattern pat = Pattern.compile(regEx);
Matcher m = pat.matcher(executeSql);
while (m.find()) {
logger.debug(" Match [" + m.group() + "] at positions " + m.start() + "-" + (m.end() - 1));
String ognl_key = m.group().replace(":", "").trim();
logger.debug(" --- minidao --- 解析参数 --- " + ognl_key);
map.put(ognl_key, Ognl.getValue(ognl_key, sqlParamsMap));
}
return map;
}
public boolean isFormatSql() {
return formatSql;
}
/**
* 解析SQL模板
*
* @param method
* @param templateSql
* @param sqlParamsMap
* @return 可执行SQL
* @throws Exception
*/
private String parseSqlTemplate(Method method, String templateSql, Map<String, Object> sqlParamsMap) throws Exception {
// step.1.根据命名规范[接口名_方法名.sql],获取SQL模板文件的路径
String executeSql = null;
// step.2.获取SQL模板内容
// step.3.通过模板引擎给SQL模板装载参数,解析生成可执行SQL
if (StringUtils.isNotEmpty(templateSql)) {
executeSql = FreemarkerParseFactory.parseTemplateContent(templateSql, sqlParamsMap);
} else {
String sqlTempletPath = method.getDeclaringClass().getName().replace(".", "/").replace("/dao/", "/sql/") + "_" + method.getName() + ".sql";
if (!FreemarkerParseFactory.isExistTemplate(sqlTempletPath)) {
sqlTempletPath = method.getDeclaringClass().getName().replace(".", "/") + "_" + method.getName() + ".sql";
}
logger.debug("MiniDao-SQL-Path:" + sqlTempletPath);
executeSql = FreemarkerParseFactory.parseTemplate(sqlTempletPath, sqlParamsMap);
}
return executeSql;
}
//update-begin--Author:luobaoli Date:20150710 for:增加存储过程入参解析方法
/**
* 将解析参数的代码单独抽取出来
* @param method
* @param args
* @return
* @throws Exception
*/
public List<Object> procedureParamsList(Method method,Object[] args) throws Exception{
List<Object> procedureParamsList = new ArrayList<Object>();
//如果方法参数大于1个的话,方法必须使用注释标签Arguments
boolean arguments_flag = method.isAnnotationPresent(Arguments.class);
if(arguments_flag){
//[1].获取方法的参数标签
Arguments arguments = method.getAnnotation(Arguments.class);
logger.debug("@Arguments------------------------------------------"+Arrays.toString(arguments.value()));
if(arguments.value().length > args.length){
//校验机制-如果注释标签参数数目大于方法的参数,则抛出异常
throw new Exception("[注释标签]参数数目,不能大于[方法参数]参数数目");
}
// step.2.将args转换成键值对,封装成Map对象
for(int i=0;i<arguments.value().length;i++){
procedureParamsList.add(args[i]);
}
}else{
//System.out.println(StringUtils.join(args));
procedureParamsList = Arrays.asList(args);
}
return procedureParamsList;
}
//update-begin--Author:luobaoli Date:20150710 for:增加存储过程入参解析方法
public void setDbType(String dbType) {
this.dbType = dbType;
}
public void setFormatSql(boolean formatSql) {
this.formatSql = formatSql;
}
public void setJdbcTemplate(JdbcTemplate jdbcTemplate) {
this.jdbcTemplate = jdbcTemplate;
}
public void setKeyType(String keyType) {
this.keyType = keyType;
}
public void setShowSql(boolean showSql) {
this.showSql = showSql;
}
public EmptyInterceptor getEmptyInterceptor() {
return emptyInterceptor;
}
public void setEmptyInterceptor(EmptyInterceptor emptyInterceptor) {
this.emptyInterceptor = emptyInterceptor;
}
}
| {
"pile_set_name": "Github"
} |
DIP_DISTRIBUTED true
DIP_QUERY_NAME hyracks
DIP_TOPOLOGY_NAME_PREFIX avitorovic
DIP_TOPOLOGY_NAME 5G_hyracks_parallel_t2
# the following two are optional, by default they use topology.workers and topology.ackers from storm.yaml
#DIP_NUM_WORKERS 176
#DIP_NUM_ACKERS 0
DIP_DATA_PATH /export/home/avitorovic/queries/tpch/5G/
DIP_EXTENSION .tbl
CUSTOMER_PAR 32
ORDERS_PAR 32
CUSTOMER_ORDERS_PAR 32
# below are unlikely to change
DIP_READ_SPLIT_DELIMITER \|
DIP_GLOBAL_ADD_DELIMITER |
DIP_GLOBAL_SPLIT_DELIMITER \|
DIP_KILL_AT_THE_END true | {
"pile_set_name": "Github"
} |
/*******************************************************************************
* Copyright (c) 2004 Actuate Corporation.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Actuate Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.birt.report.model.api;
import org.eclipse.birt.report.model.api.activity.SemanticException;
import org.eclipse.birt.report.model.core.DesignElement;
import org.eclipse.birt.report.model.core.Module;
import org.eclipse.birt.report.model.elements.interfaces.IDataSourceModel;
/**
* This abstract class represents a data source element: a connection to an
* external data provider such as an SQL database.
* <p>
* The application can implement methods to execute code on the two primary data
* source events: open and close.
*
* @see org.eclipse.birt.report.model.elements.DataSource
*
*/
public abstract class DataSourceHandle extends ReportElementHandle
implements
IDataSourceModel
{
/**
* Constructs a handle of DataSource with the given design and element. The
* application generally does not create handles directly. Instead, it uses
* one of the navigation methods available on other element handles.
*
* @param module
* the module
* @param element
* the model representation of the element
*/
public DataSourceHandle( Module module, DesignElement element )
{
super( module, element );
}
/**
* Returns the code of the beforeOpen method. This is the script called just
* before opening the data source.
*
* @return the code of the method
*/
public String getBeforeOpen( )
{
return getStringProperty( IDataSourceModel.BEFORE_OPEN_METHOD );
}
/**
* Sets the code for the beforeOpen method. This is the script called just
* before opening the data source.
*
* @param code
* the code for the method
* @throws SemanticException
* if the method is locked.
*/
public void setBeforeOpen( String code ) throws SemanticException
{
setProperty( IDataSourceModel.BEFORE_OPEN_METHOD, code );
}
/**
* Returns the code of the beforeClose method. This is the script called
* just before closing the data source.
*
* @return the code of the method
*/
public String getBeforeClose( )
{
return getStringProperty( IDataSourceModel.BEFORE_CLOSE_METHOD );
}
/**
* Sets the code for the beforeClose method. This is the script called just
* before closing the data source.
*
* @param code
* the code for the method
* @throws SemanticException
* if the method is locked.
*/
public void setBeforeClose( String code ) throws SemanticException
{
setProperty( IDataSourceModel.BEFORE_CLOSE_METHOD, code );
}
/**
* Returns the code of the afterOpen method. This is the script called just
* after opening the data source.
*
* @return the code of the method
*/
public String getAfterOpen( )
{
return getStringProperty( IDataSourceModel.AFTER_OPEN_METHOD );
}
/**
* Sets the code for the afterOpen method. This is the script called just
* after opening the data source.
*
* @param code
* the code for the method
* @throws SemanticException
* if the method is locked.
*/
public void setAfterOpen( String code ) throws SemanticException
{
setProperty( IDataSourceModel.AFTER_OPEN_METHOD, code );
}
/**
* Returns the code of the afterClose method. This is the script called just
* after closing the data source.
*
* @return the code of the method
*/
public String getAfterClose( )
{
return getStringProperty( IDataSourceModel.AFTER_CLOSE_METHOD );
}
/**
* Sets the code for the afterClose method. This is the script called just
* after closing the data source.
*
* @param code
* the code for the method
* @throws SemanticException
* if the method is locked.
*/
public void setAfterClose( String code ) throws SemanticException
{
setProperty( IDataSourceModel.AFTER_CLOSE_METHOD, code );
}
} | {
"pile_set_name": "Github"
} |
Self1_2D
O> crt 10
count turtles with [self = turtle who] => 10
count patches with [self = patch pxcor pycor] = count patches => true
Self1_3D
O> crt 10
count turtles with [self = turtle who] => 10
count patches with [self = patch pxcor pycor pzcor ] = count patches => true
Myself1_2D
patch 0 0 = [[myself] of patch 3 5] of patch 0 0 => true
O> ask patch 0 0 [ ask patches in-radius 3 [ set plabel myself ] ]
count patches with [plabel = patch 0 0] => 29
Myself1_3D
patch 0 0 0 = [[myself] of patch 3 5 0] of patch 0 0 0 => true
O> ask patch 0 0 0 [ ask patches in-radius 3 [ set plabel myself ] ]
count patches with [plabel = patch 0 0 0] => 123
SelfResetsAfterException
to-report selfish-self let my-who who carefully [ ask other turtles [ error "Derp" ] ] [ ] report who end
O> crt 2
[selfish-self] of turtle 0 => 0
| {
"pile_set_name": "Github"
} |
using UnityEngine;
using System.Collections.Generic;
// 角色的AI
public abstract class ICharacterAI
{
protected ICharacter m_Character = null;
protected float m_AttackRange = 2;
protected IAIState m_AIState = null;
protected const float ATTACK_COOLD_DOWN = 1f; // 攻擊的CoolDown
protected float m_CoolDown = ATTACK_COOLD_DOWN;
public ICharacterAI( ICharacter Character)
{
m_Character = Character;
m_AttackRange = Character.GetAttackRange() ;
}
// 更換AI狀態
public virtual void ChangeAIState( IAIState NewAIState)
{
m_AIState = NewAIState;
m_AIState.SetCharacterAI( this );
}
// 攻擊目標
public virtual void Attack( ICharacter Target )
{
// 時間到了才攻擊
m_CoolDown -= Time.deltaTime;
if( m_CoolDown >0)
return ;
m_CoolDown = ATTACK_COOLD_DOWN;
//Debug.Log ("攻擊目標:"+Target.GetGameObject().gameObject.name);
m_Character.Attack( Target );
}
// 是否在攻擊距離內
public bool TargetInAttackRange( ICharacter Target )
{
float dist = Vector3.Distance( m_Character.GetPosition() ,
Target.GetPosition() );
return ( dist <= m_AttackRange );
}
// 目前的位置
public Vector3 GetPosition()
{
return m_Character.GetGameObject().transform.position;
}
// 移動
public void MoveTo( Vector3 Position )
{
m_Character.MoveTo( Position );
}
// 停止移動
public void StopMove()
{
m_Character.StopMove();
}
// 設定陣亡
public void Killed()
{
m_Character.Killed();
}
// 是否陣亡
public bool IsKilled()
{
return m_Character.IsKilled();
}
// 目標移除
public void RemoveAITarget( ICharacter Target )
{
m_AIState.RemoveTarget( Target);
}
// 更新AI
public void Update(List<ICharacter> Targets)
{
m_AIState.Update( Targets );
}
// 是否可以攻擊Heart
public abstract bool CanAttackHeart();
}
| {
"pile_set_name": "Github"
} |
load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "go_default_library",
srcs = ["validation.go"],
importpath = "k8s.io/kubernetes/pkg/apis/policy/validation",
visibility = ["//visibility:public"],
deps = [
"//vendor/k8s.io/apimachinery/pkg/apis/meta/v1/validation:go_default_library",
"//vendor/k8s.io/apimachinery/pkg/util/validation/field:go_default_library",
"//vendor/k8s.io/kubernetes/pkg/apis/core/validation:go_default_library",
"//vendor/k8s.io/kubernetes/pkg/apis/extensions/validation:go_default_library",
"//vendor/k8s.io/kubernetes/pkg/apis/policy:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.matchers
import org.scalatest._
import org.scalactic.Prettifier
import scala.reflect.ClassTag
/**
* Trait extended by matcher objects that can match a value of the specified type.
* <code>AMatcher</code> represents a noun that appears after the word <code>a</code>, thus a nounName is required.
*
* <p>
* The value to match is passed to the <code>AMatcher</code>'s <code>apply</code> method. The result is a <code>MatchResult</code>.
* An <code>AMatcher</code> is, therefore, a function from the specified type, <code>T</code>, to a <code>MatchResult</code>.
* </p>
*
* <p>
* Although <code>AMatcher</code>
* and <code>Matcher</code> represent very similar concepts, they have no inheritance relationship
* because <code>Matcher</code> is intended for use right after <code>should</code> or <code>must</code>
* whereas <code>AMatcher</code> is intended for use right after <code>a</code>.
* </p>
*
* <p>
* As an example, you could create <code>AMatcher[Int]</code>
* called <code>positiveNumber</code> that would match any positive <code>Int</code>, and one called <code>negativeNumber</code> that would match
* any negative <code>Int</code>.
* Given this pair of <code>AMatcher</code>s, you could check whether an <code>Int</code> was positive or negative with expressions like:
* </p>
*
* <pre class="stHighlight">
* num should be a positiveNumber
* num should not be a negativeNumber
* </pre>
*
* <p>
* Here's is how you might define the positiveNumber and negativeNumber <code>AMatchers</code>:
* </p>
*
* <pre class="stHighlight">
* // Using AMatcher.apply method
* val positiveNumber = AMatcher[Int]("positive number"){ _ > 0 }
*
* // Or by extending AMatcher trait
* val negativeNumber = new AMatcher[Int] {
* val nounName = "negative number"
* def apply(left: Int): MatchResult =
* MatchResult(
* left < 0,
* left + " was not a " + nounName,
* left + " was a " + nounName
* )
* }
* </pre>
*
* <p>
* Here's an rather contrived example of how you might use <code>positiveNumber</code> and <code>negativeNumber</code>:
* </p>
*
* <pre class="stHighlight">
*
* val num1 = 1
* num1 should be a positiveNumber
*
* val num2 = num1 * -1
* num2 should be a negativeNumber
*
* num1 should be a negativeNumber
* </pre>
*
* <p>
* The last assertion in the above test will fail with this failure message:
* </p>
*
* <pre class="stHighlight">
* 1 was not a negative number
* </pre>
*
* <p>
* For more information on <code>MatchResult</code> and the meaning of its fields, please
* see the documentation for <a href="MatchResult.html"><code>MatchResult</code></a>. To understand why <code>AMatcher</code>
* is contravariant in its type parameter, see the section entitled "Matcher's variance" in the
* documentation for <a href="Matcher.html"><code>Matcher</code></a>.
* </p>
*
* @tparam T The type used by this AMatcher's apply method.
* @author Bill Venners
* @author Chee Seng
*/
private[scalatest] trait AMatcher[-T] extends Function1[T, MatchResult] {
/**
* The name of the noun that this <code>AMatcher</code> represents.
*/
val nounName: String
/**
* Check to see if the specified object, <code>left</code>, matches, and report the result in
* the returned <code>MatchResult</code>. The parameter is named <code>left</code>, because it is
* usually the value to the left of a <code>should</code> or <code>must</code> invocation. For example,
* in:
*
* <pre class="stHighlight">
* num should be a positiveNumber
* </pre>
*
* The <code>num should be</code> expression results in a regular <a href="../Matchers$ResultOfBeWordForAny.html"><code>ResultOfBeWordForAny</code></a> that hold
* a reference to <code>num</code> and has a method named <code>a</code> that takes a <code>AMatcher</code>. The <code>a</code> method
* calls <code>AMatcher</code>'s apply method by passing in the <code>num</code>, and check if <code>num</code> matches.
*
* @param left the value against which to match
* @return the <code>MatchResult</code> that represents the result of the match
*/
def apply(left: T): MatchResult
}
/**
* Companion object for trait <code>AMatcher</code> that provides a
* factory method that creates a <code>AMatcher[T]</code> from a
* passed noun name and function of type <code>(T => MatchResult)</code>.
*
* @author Bill Venners
* @author Chee Seng
*/
private[scalatest] object AMatcher {
/**
* Factory method that creates a <code>AMatcher[T]</code> from a
* passed noun name and function of type <code>(T => MatchResult)</code>.
*
* @param name the noun name
* @param fun the function of type <code>(T => MatchResult)</code>
* @return <code>AMatcher</code> instance that has the passed noun name and matches using the passed function
* @author Bill Venners
* @author Chee Seng
*/
def apply[T](name: String)(fun: T => Boolean)(implicit ev: ClassTag[T]) =
new AMatcher[T] {
val nounName = name
def apply(left: T): MatchResult =
MatchResult(
fun(left),
Resources.rawWasNotA,
Resources.rawWasA,
Vector(left, UnquotedString(nounName))
)
override def toString: String = "AMatcher[" + ev.runtimeClass.getName + "](" + Prettifier.default(name) + ", " + ev.runtimeClass.getName + " => Boolean)"
}
}
| {
"pile_set_name": "Github"
} |
---
blog: https://www.kik.com/news
colors:
- '#5DCD11'
facebook: https://www.facebook.com/kikplaynice
github: kikinteractive
images:
- kik-icon.svg
- kik-tile.svg
- kik-official.svg
- kik-ar21.svg
logohandle: kik
posted: true
sort: kik
title: kik
twitter: kik
website: https://www.kik.com/
wikipedia: https://en.wikipedia.org/wiki/Kik_Messenger
---
| {
"pile_set_name": "Github"
} |
finished:
6:
taxonomy:
- core
- taxonomy
7:
taxonomy:
- core
- taxonomy
| {
"pile_set_name": "Github"
} |
package oauth2
import (
"net/http"
"net/http/httptest"
"testing"
"time"
)
type tokenSource struct{ token *Token }
func (t *tokenSource) Token() (*Token, error) {
return t.token, nil
}
func TestTransportNilTokenSource(t *testing.T) {
tr := &Transport{}
server := newMockServer(func(w http.ResponseWriter, r *http.Request) {})
defer server.Close()
client := &http.Client{Transport: tr}
resp, err := client.Get(server.URL)
if err == nil {
t.Errorf("got no errors, want an error with nil token source")
}
if resp != nil {
t.Errorf("Response = %v; want nil", resp)
}
}
func TestTransportTokenSource(t *testing.T) {
ts := &tokenSource{
token: &Token{
AccessToken: "abc",
},
}
tr := &Transport{
Source: ts,
}
server := newMockServer(func(w http.ResponseWriter, r *http.Request) {
if got, want := r.Header.Get("Authorization"), "Bearer abc"; got != want {
t.Errorf("Authorization header = %q; want %q", got, want)
}
})
defer server.Close()
client := &http.Client{Transport: tr}
res, err := client.Get(server.URL)
if err != nil {
t.Fatal(err)
}
res.Body.Close()
}
// Test for case-sensitive token types, per https://github.com/golang/oauth2/issues/113
func TestTransportTokenSourceTypes(t *testing.T) {
const val = "abc"
tests := []struct {
key string
val string
want string
}{
{key: "bearer", val: val, want: "Bearer abc"},
{key: "mac", val: val, want: "MAC abc"},
{key: "basic", val: val, want: "Basic abc"},
}
for _, tc := range tests {
ts := &tokenSource{
token: &Token{
AccessToken: tc.val,
TokenType: tc.key,
},
}
tr := &Transport{
Source: ts,
}
server := newMockServer(func(w http.ResponseWriter, r *http.Request) {
if got, want := r.Header.Get("Authorization"), tc.want; got != want {
t.Errorf("Authorization header (%q) = %q; want %q", val, got, want)
}
})
defer server.Close()
client := &http.Client{Transport: tr}
res, err := client.Get(server.URL)
if err != nil {
t.Fatal(err)
}
res.Body.Close()
}
}
func TestTokenValidNoAccessToken(t *testing.T) {
token := &Token{}
if token.Valid() {
t.Errorf("got valid with no access token; want invalid")
}
}
func TestExpiredWithExpiry(t *testing.T) {
token := &Token{
Expiry: time.Now().Add(-5 * time.Hour),
}
if token.Valid() {
t.Errorf("got valid with expired token; want invalid")
}
}
func newMockServer(handler func(w http.ResponseWriter, r *http.Request)) *httptest.Server {
return httptest.NewServer(http.HandlerFunc(handler))
}
| {
"pile_set_name": "Github"
} |
<Type Name="RelationType" FullName="Atk.RelationType">
<TypeSignature Language="C#" Maintainer="auto" Value="public enum RelationType" />
<TypeSignature Language="ILAsm" Value=".class public auto ansi sealed RelationType extends System.Enum" />
<AssemblyInfo>
<AssemblyName>atk-sharp</AssemblyName>
<AssemblyPublicKey>
</AssemblyPublicKey>
</AssemblyInfo>
<ThreadSafetyStatement>Gtk# is thread aware, but not thread safe; See the <link location="node:gtk-sharp/programming/threads">Gtk# Thread Programming</link> for details.</ThreadSafetyStatement>
<Base>
<BaseTypeName>System.Enum</BaseTypeName>
</Base>
<Attributes>
<Attribute>
<AttributeName>GLib.GType(typeof(Atk.RelationTypeGType))</AttributeName>
</Attribute>
</Attributes>
<Docs>
<summary>Describes the type of the relation</summary>
<remarks />
</Docs>
<Members>
<Member MemberName="ControlledBy">
<MemberSignature Language="C#" Value="ControlledBy" />
<MemberSignature Language="ILAsm" Value=".field public static literal valuetype Atk.RelationType ControlledBy = int32(1)" />
<MemberType>Field</MemberType>
<ReturnValue>
<ReturnType>Atk.RelationType</ReturnType>
</ReturnValue>
<Parameters />
<Docs>
<summary> Indicates an object controlled by one or more target objects.</summary>
<remarks>
</remarks>
</Docs>
</Member>
<Member MemberName="ControllerFor">
<MemberSignature Language="C#" Value="ControllerFor" />
<MemberSignature Language="ILAsm" Value=".field public static literal valuetype Atk.RelationType ControllerFor = int32(2)" />
<MemberType>Field</MemberType>
<ReturnValue>
<ReturnType>Atk.RelationType</ReturnType>
</ReturnValue>
<Parameters />
<Docs>
<summary> Indicates an object is an controller for one or more target objects.</summary>
<remarks>
</remarks>
</Docs>
</Member>
<Member MemberName="DescribedBy">
<MemberSignature Language="C#" Value="DescribedBy" />
<MemberSignature Language="ILAsm" Value=".field public static literal valuetype Atk.RelationType DescribedBy = int32(14)" />
<MemberType>Field</MemberType>
<ReturnValue>
<ReturnType>Atk.RelationType</ReturnType>
</ReturnValue>
<Docs>
<summary>To be added.</summary>
<since version="Gtk# 2.12" />
</Docs>
</Member>
<Member MemberName="DescriptionFor">
<MemberSignature Language="C#" Value="DescriptionFor" />
<MemberSignature Language="ILAsm" Value=".field public static literal valuetype Atk.RelationType DescriptionFor = int32(15)" />
<MemberType>Field</MemberType>
<ReturnValue>
<ReturnType>Atk.RelationType</ReturnType>
</ReturnValue>
<Docs>
<summary>To be added.</summary>
<since version="Gtk# 2.12" />
</Docs>
</Member>
<Member MemberName="EmbeddedBy">
<MemberSignature Language="C#" Value="EmbeddedBy" />
<MemberSignature Language="ILAsm" Value=".field public static literal valuetype Atk.RelationType EmbeddedBy = int32(11)" />
<MemberType>Field</MemberType>
<ReturnValue>
<ReturnType>Atk.RelationType</ReturnType>
</ReturnValue>
<Parameters>
</Parameters>
<Docs>
<summary> Inverse of <see cref="F:Atk.Relation.Embeds" />, indicates that this object's content is visualy embedded in another object.</summary>
<remarks>
</remarks>
</Docs>
</Member>
<Member MemberName="Embeds">
<MemberSignature Language="C#" Value="Embeds" />
<MemberSignature Language="ILAsm" Value=".field public static literal valuetype Atk.RelationType Embeds = int32(10)" />
<MemberType>Field</MemberType>
<ReturnValue>
<ReturnType>Atk.RelationType</ReturnType>
</ReturnValue>
<Parameters>
</Parameters>
<Docs>
<summary> Indicates that the object visually embeds another object's content, i.e. this object's content flows around another's content.</summary>
<remarks>
</remarks>
</Docs>
</Member>
<Member MemberName="FlowsFrom">
<MemberSignature Language="C#" Value="FlowsFrom" />
<MemberSignature Language="ILAsm" Value=".field public static literal valuetype Atk.RelationType FlowsFrom = int32(8)" />
<MemberType>Field</MemberType>
<ReturnValue>
<ReturnType>Atk.RelationType</ReturnType>
</ReturnValue>
<Parameters>
</Parameters>
<Docs>
<summary> Indicates that the object has content that flows logically from another AtkObject in a sequential way, (for instance text-flow).</summary>
<remarks>
</remarks>
</Docs>
</Member>
<Member MemberName="FlowsTo">
<MemberSignature Language="C#" Value="FlowsTo" />
<MemberSignature Language="ILAsm" Value=".field public static literal valuetype Atk.RelationType FlowsTo = int32(7)" />
<MemberType>Field</MemberType>
<ReturnValue>
<ReturnType>Atk.RelationType</ReturnType>
</ReturnValue>
<Parameters>
</Parameters>
<Docs>
<summary>Indicates that the object has content that flows logically to another AtkObject in a sequential way, (for instance text-flow).</summary>
<remarks>
</remarks>
</Docs>
</Member>
<Member MemberName="LabelFor">
<MemberSignature Language="C#" Value="LabelFor" />
<MemberSignature Language="ILAsm" Value=".field public static literal valuetype Atk.RelationType LabelFor = int32(3)" />
<MemberType>Field</MemberType>
<ReturnValue>
<ReturnType>Atk.RelationType</ReturnType>
</ReturnValue>
<Parameters />
<Docs>
<summary> Indicates an object is a label for one or more target objects.</summary>
<remarks>
</remarks>
</Docs>
</Member>
<Member MemberName="LabelledBy">
<MemberSignature Language="C#" Value="LabelledBy" />
<MemberSignature Language="ILAsm" Value=".field public static literal valuetype Atk.RelationType LabelledBy = int32(4)" />
<MemberType>Field</MemberType>
<ReturnValue>
<ReturnType>Atk.RelationType</ReturnType>
</ReturnValue>
<Parameters />
<Docs>
<summary> Indicates an object is labelled by one or more target objects.</summary>
<remarks>
</remarks>
</Docs>
</Member>
<Member MemberName="LastDefined">
<MemberSignature Language="C#" Value="LastDefined" />
<MemberSignature Language="ILAsm" Value=".field public static literal valuetype Atk.RelationType LastDefined = int32(17)" />
<MemberType>Field</MemberType>
<ReturnValue>
<ReturnType>Atk.RelationType</ReturnType>
</ReturnValue>
<Parameters />
<Docs>
<summary>To be added</summary>
<remarks>
</remarks>
</Docs>
</Member>
<Member MemberName="MemberOf">
<MemberSignature Language="C#" Value="MemberOf" />
<MemberSignature Language="ILAsm" Value=".field public static literal valuetype Atk.RelationType MemberOf = int32(5)" />
<MemberType>Field</MemberType>
<ReturnValue>
<ReturnType>Atk.RelationType</ReturnType>
</ReturnValue>
<Parameters />
<Docs>
<summary> Indicates an object is a member of a group of one or more target objects.</summary>
<remarks>
</remarks>
</Docs>
</Member>
<Member MemberName="NodeChildOf">
<MemberSignature Language="C#" Value="NodeChildOf" />
<MemberSignature Language="ILAsm" Value=".field public static literal valuetype Atk.RelationType NodeChildOf = int32(6)" />
<MemberType>Field</MemberType>
<ReturnValue>
<ReturnType>Atk.RelationType</ReturnType>
</ReturnValue>
<Parameters />
<Docs>
<summary>Indicates an object is a cell in a treetable which is displayed because a cell in the same column is expanded and identifies that cell.</summary>
<remarks>
</remarks>
</Docs>
</Member>
<Member MemberName="NodeParentOf">
<MemberSignature Language="C#" Value="NodeParentOf" />
<MemberSignature Language="ILAsm" Value=".field public static literal valuetype Atk.RelationType NodeParentOf = int32(16)" />
<MemberType>Field</MemberType>
<ReturnValue>
<ReturnType>Atk.RelationType</ReturnType>
</ReturnValue>
<Docs>
<summary>To be added.</summary>
<since version="Gtk# 3.0" />
</Docs>
</Member>
<Member MemberName="Null">
<MemberSignature Language="C#" Value="Null" />
<MemberSignature Language="ILAsm" Value=".field public static literal valuetype Atk.RelationType Null = int32(0)" />
<MemberType>Field</MemberType>
<ReturnValue>
<ReturnType>Atk.RelationType</ReturnType>
</ReturnValue>
<Parameters />
<Docs>
<summary>No Relation</summary>
<remarks>
</remarks>
</Docs>
</Member>
<Member MemberName="ParentWindowOf">
<MemberSignature Language="C#" Value="ParentWindowOf" />
<MemberSignature Language="ILAsm" Value=".field public static literal valuetype Atk.RelationType ParentWindowOf = int32(13)" />
<MemberType>Field</MemberType>
<ReturnValue>
<ReturnType>Atk.RelationType</ReturnType>
</ReturnValue>
<Parameters>
</Parameters>
<Docs>
<summary>To be added</summary>
<remarks>To be added</remarks>
<since version="Gtk# 2.6" />
</Docs>
</Member>
<Member MemberName="PopupFor">
<MemberSignature Language="C#" Value="PopupFor" />
<MemberSignature Language="ILAsm" Value=".field public static literal valuetype Atk.RelationType PopupFor = int32(12)" />
<MemberType>Field</MemberType>
<ReturnValue>
<ReturnType>Atk.RelationType</ReturnType>
</ReturnValue>
<Parameters>
</Parameters>
<Docs>
<summary>To be added</summary>
<remarks>To be added</remarks>
<since version="Gtk# 2.4" />
</Docs>
</Member>
<Member MemberName="SubwindowOf">
<MemberSignature Language="C#" Value="SubwindowOf" />
<MemberSignature Language="ILAsm" Value=".field public static literal valuetype Atk.RelationType SubwindowOf = int32(9)" />
<MemberType>Field</MemberType>
<ReturnValue>
<ReturnType>Atk.RelationType</ReturnType>
</ReturnValue>
<Parameters>
</Parameters>
<Docs>
<summary>To be added</summary>
<remarks>
</remarks>
</Docs>
</Member>
</Members>
</Type>
| {
"pile_set_name": "Github"
} |
if(VISP_INITIAL_PASS)
# generator for JNI/JAR source code and documentation signatures
add_subdirectory(generator)
endif()
if(APPLE_FRAMEWORK OR WINRT OR NOT PYTHON_DEFAULT_AVAILABLE OR NOT (ANT_EXECUTABLE OR ANDROID_PROJECTS_BUILD_TYPE STREQUAL "GRADLE")
OR NOT (JNI_FOUND OR (ANDROID AND (NOT DEFINED ANDROID_NATIVE_API_LEVEL OR ANDROID_NATIVE_API_LEVEL GREATER 7))))
vp_module_disable(java)
endif()
# Java wrapping support for each module is specified in its <mod-name>/CMakeLists.txt file
# Below we add those modules which need to be built(compile their C++ code) for the visp_java module
# Understand that above 2 lines are totally different.
set(the_description "The java bindings")
vp_add_module(java BINDINGS visp_core PRIVATE_REQUIRED visp_java_bindings_generator)
include(${CMAKE_CURRENT_SOURCE_DIR}/common.cmake)
# UTILITY: glob specific sources and append them to list (type is in H, CPP, JAVA, AIDL)
macro(glob_more_specific_sources _type _root _output)
unset(_masks)
if(${_type} STREQUAL "H")
set(_masks "${_root}/cpp/*.h" "${root}/cpp/*.hpp")
elseif(${_type} STREQUAL "CPP")
set(_masks "${_root}/cpp/*.cpp")
elseif(${_type} STREQUAL "JAVA")
set(_masks "${_root}/java/*.java" "${_root}/java/*.java.in")
elseif(${_type} STREQUAL "AIDL")
set(_masks "${_root}/java/*.aidl")
endif()
if (_masks)
file(GLOB _result ${_masks})
list(APPEND ${_output} ${_result})
else()
message(WARNING "Bad argument passed to macro: skipped")
endif()
endmacro()
# UTILITY: copy common java test files and add them to _deps
# copy_common_tests(<source-folder> <destination-folder> <variable-to-store-deps>)
macro(copy_common_tests _src_location _dst_location _deps)
set(_src ${_src_location})
set(_dst ${_dst_location})
file(GLOB_RECURSE _files RELATIVE "${_src}" "${_src}/res/*" "${_src}/src/*")
foreach(f ${_files})
add_custom_command(
OUTPUT "${_dst}/${f}"
COMMAND ${CMAKE_COMMAND} -E copy_if_different "${_src}/${f}" "${_dst}/${f}"
MAIN_DEPENDENCY "${_src}/${f}"
COMMENT "Copying ${f}")
list(APPEND ${_deps} "${_src}/${f}" "${_dst}/${f}")
endforeach()
unset(_files)
unset(_src)
unset(_dst)
endmacro()
add_subdirectory(jni) # generates ${the_module} target (${the_module}_jni doesn't work properly with Android samples)
if(ANDROID)
add_subdirectory(android_sdk) # generates ${the_module}_android target
else()
add_subdirectory(jar) # generates ${the_module}_jar target
endif()
#if(BUILD_TESTS)
# if(ANDROID)
# add_subdirectory(test/android_test)
# else()
# add_subdirectory(test/pure_test)
# endif()
#endif()
| {
"pile_set_name": "Github"
} |
# These are supported funding model platforms
github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
patreon: # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username
ko_fi: # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
| {
"pile_set_name": "Github"
} |
# coding=utf-8
from django.core.paginator import Paginator
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from xadmin.dutils import force_unicode
from .base import SiteView, filter_hook
class ModelPage(SiteView):
"""
基于 Model 的 页面
注册后,用户可以通过访问 ``/%(app_label)s/%(module_name)s/123/test`` 访问到该view
"""
opts = None
model = None #: 绑定的 Model 类,在注册 Model 时,该项会自动附在 OptionClass 中,见方法 :meth:`AdminSite.register`
app_label = None
module_name = None
model_info = None
remove_permissions = []
exclude = None #用在编辑页或详情页
fields = None #用在编辑页或详情页
def __init__(self, request, *args, **kwargs):
self.opts = self.model._meta
self.app_label = self.app_label or self.model._meta.app_label
self.module_name = self.model._meta.module_name
self.model_info = (self.model._meta.app_label, self.module_name)
super(ModelAdminView, self).__init__(request, *args, **kwargs)
@filter_hook
def get_context(self):
new_context = {
"opts": self.opts,
"app_label": self.app_label,
"module_name": self.module_name,
"verbose_name": force_unicode(self.opts.verbose_name),
'model_icon': self.get_model_icon(self.model),
}
context = super(ModelAdminView, self).get_context()
context.update(new_context)
return context
@filter_hook
def get_breadcrumb(self):
u'''
导航链接基础部分
'''
bcs = super(ModelAdminView, self).get_breadcrumb()
item = {'title': self.opts.verbose_name_plural}
if self.has_view_permission():
item['url'] = self.model_admin_url('changelist')
bcs.append(item)
return bcs
@filter_hook
def get_object(self, object_id):
u"""
根据 object_id 获得唯一的 Model 实例
"""
queryset = self.queryset()
model = queryset.model
try:
object_id = model._meta.pk.to_python(object_id)
return queryset.get(pk=object_id)
except (model.DoesNotExist, ValidationError):
return None
@filter_hook
def get_object_url(self, obj):
u'''
对象链接
'''
if self.has_change_permission(obj):
return self.model_admin_url("change", getattr(obj, self.opts.pk.attname))
elif self.has_view_permission(obj):
return self.model_admin_url("detail", getattr(obj, self.opts.pk.attname))
else:
return None
def get_url(self, name, *args, **kwargs):
u'''
模型相关url
eg get_url( 'change', id ) get_url( 'detail', id ) get_url( 'chart', id ) get_url( 'patch', id ) get_url( 'revision', id, vid)
get_url( 'changelist' ) get_url( 'add' ) get_url( 'delete', id )
'''
return reverse(
"%s:%s_%s_%s" % (self.admin_site.app_name, self.opts.app_label,
self.module_name, name), args=args, kwargs=kwargs)
def get_model_url(self, model, name, *args, **kwargs):
u'''
模型相关url
'''
opts = model._meta
return reverse(
"%s:%s_%s_%s" % (self.admin_site.app_name, opts.app_label,
opts.module_name, name), args=args, kwargs=kwargs)
def model_admin_url(self, name, *args, **kwargs):
return self.get_url(name, *args, **kwargs)
def get_template_list(self, template_name):
opts = self.opts
return (
"xadmin/%s/%s/%s" % (opts.app_label, opts.object_name.lower(), template_name),
"xadmin/%s/%s" % (opts.app_label, template_name),
"xadmin/%s" % template_name,
)
def get_ordering(self):
u"""
模型的默认数据集排序规则
"""
return self.ordering or ()
@filter_hook
def queryset(self):
u"""
模型的默认数据集
"""
_manager = self.model._default_manager
if hasattr(_manager, 'get_query_set'):
return _manager.get_query_set()
else:
return _manager.get_queryset()
def has_view_permission(self, obj=None):
return ('view' not in self.remove_permissions) and (self.user.has_perm('%s.view_%s' % self.model_info) or self.user.has_perm('%s.change_%s' % self.model_info))
def has_add_permission(self):
return ('add' not in self.remove_permissions) and self.user.has_perm('%s.add_%s' % self.model_info)
def has_change_permission(self, obj=None):
return ('change' not in self.remove_permissions) and self.user.has_perm('%s.change_%s' % self.model_info)
def has_delete_permission(self, obj=None):
return ('delete' not in self.remove_permissions) and self.user.has_perm('%s.delete_%s' % self.model_info)
def has_permission(self, perm_code):
raw_code = perm_code[:]
if perm_code in ('view', 'add', 'change', 'delete'):
perm_code = '%s.%s_%s' %(self.model._meta.app_label, perm_code ,self.module_name)
return (raw_code not in self.remove_permissions) and self.user.has_perm(perm_code)
def has_model_permission(self, model, perm_code):
opts = model._meta
raw_code = perm_code[:]
if perm_code in ('view', 'add', 'change', 'delete'):
perm_code = '%s.%s_%s' %(opts.app_label, perm_code ,opts.module_name)
return (raw_code not in self.remove_permissions) and self.user.has_perm(perm_code)
def get_model_perms(self):
return {
'view': self.has_view_permission(),
'add': self.has_add_permission(),
'change': self.has_change_permission(),
'delete': self.has_delete_permission(),
}
@property
def pk_name(self):
return self.opts.pk.attname
ModelAdminView = ModelPage
ModelView = ModelAdminView
class ModelAdmin(object):
# 【列表页】相关配置项
list_display = ('__str__',) #: 列表字段
list_exclude = () #: 排除显示的列
list_display_links = () #: 链接字段
list_display_links_details = False #: 链接到详情页面而非编辑页
list_select_related = None #: 是否提前加载关联数据, 使用 ``select_related``
list_per_page = 50 #: 每页数
list_max_show_all = 200 #: 当点“显示全部”每页显示的最大条数
paginator_class = Paginator #: 默认的分页类
search_fields = () #: 按照这些列搜索数据
ordering = None #: 默认的数据排序
list_template = None #: 显示数据的模板 默认为 views/grid.html
pop = False # 是否为弹窗页
search_sphinx_ins = None # 使用的搜索引擎
relfield_style = 'fk-ajax'
# 【列表页】相关可获取项
page_num = 0 # 当前第几页
paginator = None #分页类实例
result_count = None #总记录数
list_tabs = [] #列表页tab配置
| {
"pile_set_name": "Github"
} |
# PartiQL Language Specification
This is the LaTeX source for the [PartiQL] specification.
## Building
As a prerequisite, you need the following:
* A LaTeX distribution such as [TeX Live][texlive]. Debian based linux distributions should install the package `texlive-full`.
* GNU Make.
To build a PDF:
```
$ make
```
To clean up the various build files including the PDF:
```
$ make clean
```
## License
This library is licensed under the [PartiQL Specification License][license].
[partiql]: https://partiql.org/
[texlive]: https://www.tug.org/texlive/
[license]: LICENSE
| {
"pile_set_name": "Github"
} |
+098
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2006, Swedish Institute of Computer Science
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*
*/
#include <stdio.h>
#include <ctype.h>
#include <stdlib.h>
#include <stdarg.h>
#include <string.h>
#include <sys/types.h>
#include <unistd.h>
#include <errno.h>
#include <fcntl.h>
#include <signal.h>
#include <termios.h>
#include <sys/ioctl.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <err.h>
#define SLIP_END 0300
#define SLIP_ESC 0333
#define SLIP_ESC_END 0334
#define SLIP_ESC_ESC 0335
#ifndef BAUDRATE
#define BAUDRATE B115200
#endif
speed_t b_rate = BAUDRATE;
void
stty_telos(int fd)
{
struct termios tty;
int i;
if(tcflush(fd, TCIOFLUSH) == -1) err(1, "tcflush");
if(tcgetattr(fd, &tty) == -1) err(1, "tcgetattr");
cfmakeraw(&tty);
/* Blocking read. */
tty.c_cc[VTIME] = 0;
tty.c_cc[VMIN] = 1;
tty.c_cflag &= ~CRTSCTS;
tty.c_cflag &= ~HUPCL;
tty.c_cflag &= ~CLOCAL;
cfsetispeed(&tty, b_rate);
cfsetospeed(&tty, b_rate);
if(tcsetattr(fd, TCSAFLUSH, &tty) == -1) err(1, "tcsetattr");
tty.c_cflag |= CLOCAL;
if(tcsetattr(fd, TCSAFLUSH, &tty) == -1) err(1, "tcsetattr");
i = TIOCM_DTR;
if(ioctl(fd, TIOCMBIS, &i) == -1) err(1, "ioctl");
usleep(10*1000); /* Wait for hardware 10ms. */
/* Flush input and output buffers. */
if(tcflush(fd, TCIOFLUSH) == -1) err(1, "tcflush");
}
int
main(int argc, char **argv)
{
int c;
int slipfd;
FILE *inslip;
const char *siodev;
int baudrate = -2;
while ((c = getopt(argc, argv, "B:")) != -1) {
switch (c) {
case 'B':
baudrate = atoi(optarg);
break;
case '?':
case 'h':
default:
err(1, "usage: scat [-B baudrate] device-file");
break;
}
}
argc -= (optind - 1);
argv += (optind - 1);
switch (baudrate) {
case -2:
break; /* Use default. */
case 9600:
b_rate = B9600;
break;
case 19200:
b_rate = B19200;
break;
case 38400:
b_rate = B38400;
break;
case 57600:
b_rate = B57600;
break;
case 115200:
b_rate = B115200;
break;
default:
err(1, "unknown baudrate %d", baudrate);
break;
}
if (argc != 2)
err(1, "usage: scat device-file");
siodev = argv[1];
setvbuf(stdout, NULL, _IOLBF, 0); /* Line buffered output. */
slipfd = open(siodev, O_RDWR);
if (slipfd == -1) err(1, "can't open '%s'", siodev);
stty_telos(slipfd);
inslip = fdopen(slipfd, "r");
if(inslip == NULL) err(1, "main: fdopen");
while (1) {
int c = getc(inslip);
while (c == SLIP_END)
c = getc(inslip);
do {
if (c == SLIP_ESC) {
c = getc(inslip);
if (c == SLIP_ESC_ESC)
c = SLIP_ESC;
else if (c == SLIP_ESC_END)
c = SLIP_END;
}
switch (c) {
case EOF:
err(1, "getc(inslip)");
break;
case '\007':
case '\b':
case '\f':
case '\n':
case '\r':
case '\t':
case '\v':
putchar(c);
break;
default:
if (isprint(c))
putchar(c);
else
printf("%02x ", c);
break;
}
c = getc(inslip);
} while (c != SLIP_END);
}
return 0;
}
| {
"pile_set_name": "Github"
} |
//
// LicenseGetConfigUtils.h
// AppEngine
//
// Created by Makara Khloth on 11/2/12.
// Copyright 2012 __MyCompanyName__. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "DeliveryListener.h"
@protocol DataDelivery;
@class LicenseManager;
@interface LicenseGetConfigUtils : NSObject <DeliveryListener> {
@private
id <DataDelivery> mDataDelivery; // Not own
LicenseManager *mLicenseManager; // Not own
NSTimer *mXHours;
NSInteger mNumberOfRetry;
}
@property (nonatomic, assign) id <DataDelivery> mDataDelivery;
@property (nonatomic, assign) LicenseManager *mLicenseManager;
- (id) initWithDataDelivery: (id <DataDelivery>) aDataDelivery;
- (void) start;
- (void) stop;
- (void) prerelease;
@end
| {
"pile_set_name": "Github"
} |
{
"ext": "ext.",
"country": "Paese del numero di telefono",
"phone": "Telefono",
"AB": "Abkhazia",
"AC": "Ascension Island",
"AD": "Andorra",
"AE": "Emirati Arabi Uniti",
"AF": "Afghanistan",
"AG": "Antigua e Barbuda",
"AI": "Anguilla",
"AL": "Albania",
"AM": "Armenia",
"AO": "Angola",
"AQ": "Antartide",
"AR": "Argentina",
"AS": "Samoa Americane",
"AT": "Austria",
"AU": "Australia",
"AW": "Aruba",
"AX": "Isole Åland",
"AZ": "Azerbaigian",
"BA": "Bosnia-Erzegovina",
"BB": "Barbados",
"BD": "Bangladesh",
"BE": "Belgio",
"BF": "Burkina Faso",
"BG": "Bulgaria",
"BH": "Bahrain",
"BI": "Burundi",
"BJ": "Benin",
"BL": "San Bartolomeo",
"BM": "Bermuda",
"BN": "Brunei Darussalam",
"BO": "Bolivia, stato plurinazionale di",
"BQ": "Bonaire, Sint Eustatius e Saba",
"BR": "Brasile",
"BS": "Bahamas",
"BT": "Bhutan",
"BV": "Bouvet Island",
"BW": "Botswana",
"BY": "Bielorussia",
"BZ": "Belize",
"CA": "Canada",
"CC": "Isole Cocos (Keeling)",
"CD": "Congo, Repubblica Democratica del",
"CF": "Repubblica Centrafricana",
"CG": "Congo",
"CH": "Svizzera",
"CI": "Costa d'Avorio",
"CK": "Cook Islands",
"CL": "Cile",
"CM": "Camerun",
"CN": "Cina",
"CO": "Colombia",
"CR": "Costa Rica",
"CU": "Cuba",
"CV": "Capo Verde",
"CW": "Curaçao",
"CX": "Isola di Natale",
"CY": "Cipro",
"CZ": "Repubblica Ceca",
"DE": "Germania",
"DJ": "Gibuti",
"DK": "Danimarca",
"DM": "Dominica",
"DO": "Repubblica Dominicana",
"DZ": "Algeria",
"EC": "Ecuador",
"EE": "Estonia",
"EG": "Egitto",
"EH": "Sahara Occidentale",
"ER": "Eritrea",
"ES": "Spagna",
"ET": "Etiopia",
"FI": "Finlandia",
"FJ": "Fiji",
"FK": "Isole Falkland (Malvinas)",
"FM": "Micronesia, Stati Federati di",
"FO": "Isole Faroe",
"FR": "Francia",
"GA": "Gabon",
"GB": "Regno Unito",
"GD": "Grenada",
"GE": "Georgia",
"GF": "Guiana Francese",
"GG": "Guernsey",
"GH": "Ghana",
"GI": "Gibilterra",
"GL": "Groenlandia",
"GM": "Gambia",
"GN": "Guinea",
"GP": "Guadalupa",
"GQ": "Guinea Equatoriale",
"GR": "Grecia",
"GS": "Georgia del Sud e le isole Sandwich del Sud",
"GT": "Guatemala",
"GU": "Guam",
"GW": "Guinea-Bissau",
"GY": "Guyana",
"HK": "Hong Kong",
"HM": "Heard Island e McDonald Islands",
"HN": "Honduras",
"HR": "Croazia",
"HT": "Haiti",
"HU": "Ungheria",
"ID": "Indonesia",
"IE": "Irlanda",
"IL": "Israele",
"IM": "Isola di Man",
"IN": "India",
"IO": "Territorio Britannico dell'Oceano Indiano",
"IQ": "Iraq",
"IR": "Iran, Repubblica Islamica dell'Iran",
"IS": "Islanda",
"IT": "Italia",
"JE": "Maglia",
"JM": "Giamaica",
"JO": "Jordan",
"JP": "Giappone",
"KE": "Kenya",
"KG": "Kirghizistan",
"KH": "Cambogia",
"KI": "Kiribati",
"KM": "Comore",
"KN": "Saint Kitts e Nevis",
"KP": "Corea, Repubblica Democratica Popolare di",
"KR": "Corea, Repubblica di",
"KW": "Kuwait",
"KY": "Isole Cayman",
"KZ": "Kazakistan",
"LA": "Repubblica Democratica Popolare del Laos",
"LB": "Libano",
"LC": "Santa Lucia",
"LI": "Liechtenstein",
"LK": "Sri Lanka",
"LR": "Liberia",
"LS": "Lesotho",
"LT": "Lituania",
"LU": "Lussemburgo",
"LV": "Lettonia",
"LY": "Giamahiriya araba libica",
"MA": "Marocco",
"MC": "Monaco",
"MD": "Moldavia",
"ME": "Montenegro",
"MF": "Saint Martin (parte francese)",
"MG": "Madagascar",
"MH": "Isole Marshall",
"MK": "Macedonia, l'ex Repubblica Jugoslava di",
"ML": "Mali",
"MM": "Birmania",
"MN": "Mongolia",
"MO": "Macao",
"MP": "Marianne settentrionali",
"MQ": "Martinica",
"MR": "Mauritania",
"MS": "Montserrat",
"MT": "Malta",
"MU": "Mauritius",
"MV": "Maldive",
"MW": "Malawi",
"MX": "Messico",
"MY": "Malesia",
"MZ": "Mozambico",
"NA": "Namibia",
"NC": "Nuova Caledonia",
"NE": "Niger",
"NF": "Norfolk Island",
"NG": "Nigeria",
"NI": "Nicaragua",
"NL": "Paesi Bassi",
"NO": "Norvegia",
"NP": "Nepal",
"NR": "Nauru",
"NU": "Niue",
"NZ": "Nuova Zelanda",
"OM": "Oman",
"OS": "Ossezia del Sud",
"PA": "Panama",
"PE": "Perù",
"PF": "Polinesia Francese",
"PG": "Papua Nuova Guinea",
"PH": "Filippine",
"PK": "Pakistan",
"PL": "Polonia",
"PM": "Saint Pierre e Miquelon",
"PN": "Pitcairn",
"PR": "Puerto Rico",
"PS": "Territorio palestinese occupato",
"PT": "Portogallo",
"PW": "Palau",
"PY": "Paraguay",
"QA": "Qatar",
"RE": "Riunione",
"RO": "Romania",
"RS": "Serbia",
"RU": "Federazione Russa",
"RW": "Rwanda",
"SA": "Arabia Saudita",
"SB": "Isole Salomone",
"SC": "Seychelles",
"SD": "Sudan",
"SE": "Svezia",
"SG": "Singapore",
"SH": "Santa Elena, Ascensione e Tristan Da Cunha",
"SI": "Slovenia",
"SJ": "Svalbard e Jan Mayen",
"SK": "Slovacchia",
"SL": "Sierra Leone",
"SM": "San Marino",
"SN": "Senegal",
"SO": "Somalia",
"SR": "Suriname",
"SS": "Sud Sudan",
"ST": "Sao Tome e Principe",
"SV": "El Salvador",
"SX": "Sint Maarten",
"SY": "Repubblica Araba Siriana",
"SZ": "Swaziland",
"TA": "Tristan da Cunha",
"TC": "Isole Turks e Caicos",
"TD": "Chad",
"TF": "Territori francesi del sud",
"TG": "Togo",
"TH": "Thailandia",
"TJ": "Tagikistan",
"TK": "Tokelau",
"TL": "Timor Est",
"TM": "Turkmenistan",
"TN": "Tunisia",
"TO": "Tonga",
"TR": "Turchia",
"TT": "Trinidad e Tobago",
"TV": "Tuvalu",
"TW": "Taiwan, provincia di Cina",
"TZ": "Tanzania, Repubblica Unita di",
"UA": "Ucraina",
"UG": "Uganda",
"UM": "Isole Minori Esterne degli Stati Uniti",
"US": "Stati Uniti",
"UY": "Uruguay",
"UZ": "Uzbekistan",
"VA": "Santa Sede (Stato della Città del Vaticano)",
"VC": "Saint Vincent e le Grenadine",
"VE": "Venezuela",
"VG": "Isole Vergini Britanniche",
"VI": "Isole Vergini Americane",
"VN": "Vietnam",
"VU": "Vanuatu",
"WF": "Wallis e Futuna",
"WS": "Samoa",
"XK": "Kosovo",
"YE": "Yemen",
"YT": "Mayotte",
"ZA": "Sudafrica",
"ZM": "Zambia",
"ZW": "Zimbabwe",
"ZZ": "Internazionale"
} | {
"pile_set_name": "Github"
} |
package yio.tro.antiyoy.menu.scenes.gameplay;
import yio.tro.antiyoy.menu.Animation;
import yio.tro.antiyoy.menu.MenuControllerYio;
import yio.tro.antiyoy.menu.diplomatic_dialogs.PrepareForAttackPropositionDialog;
import yio.tro.antiyoy.menu.slider.SliderBehavior;
import yio.tro.antiyoy.menu.slider.SliderYio;
import yio.tro.antiyoy.stuff.GraphicsYio;
import yio.tro.antiyoy.stuff.RectangleYio;
public class ScenePrepareForAttackProposition extends AbstractModalScene{
public PrepareForAttackPropositionDialog dialog;
public SliderYio moneySlider;
private int moneyValues[];
public ScenePrepareForAttackProposition(MenuControllerYio menuControllerYio) {
super(menuControllerYio);
dialog = null;
moneySlider = null;
initMoneyValues();
}
private void initMoneyValues() {
moneyValues = new int[]{0, 10, 25, 50, 100, 150, 200, 300, 400, 500, 750, 1000, 2500, 5000, 10000};
}
@Override
public void create() {
if (dialog == null) {
initDialog();
initSlider();
}
dialog.appear();
moneySlider.appear();
moneySlider.setValueIndex(4);
}
private void initDialog() {
dialog = new PrepareForAttackPropositionDialog(menuControllerYio);
dialog.setPosition(generateRectangle(0, 0.15, 1, GraphicsYio.convertToHeight(0.9)));
menuControllerYio.addElementToScene(dialog);
}
public int convertSliderIndexIntoMoneyValue(int sliderIndex) {
return moneyValues[sliderIndex];
}
public int getCurrentChosenMoneyValue() {
return convertSliderIndexIntoMoneyValue(moneySlider.getValueIndex());
}
private void initSlider() {
if (moneySlider != null) return;
double sWidth = 0.8;
RectangleYio pos = generateRectangle((1 - sWidth) / 2, 0, sWidth, 0);
moneySlider = new SliderYio(menuControllerYio, -1);
moneySlider.setValues(0, 0, moneyValues.length - 1, Animation.down);
moneySlider.setPosition(pos);
moneySlider.setParentElement(dialog, GraphicsYio.convertToHeight(0.45));
moneySlider.setTitle("money");
moneySlider.setInternalSegmentsHidden(true);
menuControllerYio.addElementToScene(moneySlider);
moneySlider.setVerticalTouchOffset(0.05f * GraphicsYio.height);
moneySlider.setTitleOffset(0.125f * GraphicsYio.width);
moneySlider.setBehavior(new SliderBehavior() {
@Override
public String getValueString(SliderYio sliderYio) {
return "$" + convertSliderIndexIntoMoneyValue(sliderYio.getValueIndex());
}
});
}
@Override
public void hide() {
dialog.destroy();
moneySlider.destroy();
}
}
| {
"pile_set_name": "Github"
} |
{
"children": [
{
"attr": {
"value": "1"
},
"type": "text"
}
],
"type": "container"
} | {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html>
<!-- Test: if select is required and has all selected option value set to the
string string, :-moz-ui-valid should not apply. -->
<link rel='stylesheet' type='text/css' href='style.css'>
<body>
<select class='notvalid' required multiple>
<option selected></option>
<option selected value="">foo</option>
</select>
</body>
</html>
| {
"pile_set_name": "Github"
} |
{{<base}}
{{$content}}
<p>This is override page 2 content.</p>
{{/content}}
{{$footer_base}}
{{/footer_base}}
{{/base}}
| {
"pile_set_name": "Github"
} |
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by Steve Nygard.
//
#import "UIView.h"
@class UIActivityIndicatorView, UIButton, UIImageView, UILabel;
@interface RecordView : UIView
{
UIView *_recordLeftView;
UIImageView *_recordImageView;
UIImageView *_recordSignalView;
UIButton *_recordGarbageView;
UIImageView *_recordWarningView;
UIImageView *_recordBkgView;
UILabel *_timeoutCountingLabel;
UILabel *_warningMessageLabel;
UIActivityIndicatorView *_indicatorView;
_Bool _bPreparing;
_Bool _bTimeOutCounting;
}
@property(retain, nonatomic) UILabel *timeoutCountingLabel; // @synthesize timeoutCountingLabel=_timeoutCountingLabel;
@property(nonatomic) _Bool bPreparing; // @synthesize bPreparing=_bPreparing;
@property(retain, nonatomic) UIImageView *recordBkgView; // @synthesize recordBkgView=_recordBkgView;
@property(retain, nonatomic) UILabel *warningMessageLabel; // @synthesize warningMessageLabel=_warningMessageLabel;
@property(retain, nonatomic) UIImageView *recordWarningView; // @synthesize recordWarningView=_recordWarningView;
@property(retain, nonatomic) UIButton *recordGarbageView; // @synthesize recordGarbageView=_recordGarbageView;
@property(retain, nonatomic) UIImageView *recordSignalView; // @synthesize recordSignalView=_recordSignalView;
@property(retain, nonatomic) UIImageView *recordImageView; // @synthesize recordImageView=_recordImageView;
- (void).cxx_destruct;
- (void)SetPeakPower:(float)arg1;
- (_Bool)isRecordBkgShow;
- (void)ShowRecordingTimeoutCount:(long long)arg1 isShow:(_Bool)arg2;
- (void)HideRecordAnimated:(_Bool)arg1;
- (void)StartTooLongTipsAnimated:(_Bool)arg1;
- (void)StartTooShortTipsAnimated:(_Bool)arg1;
- (void)StartRecordCancelAnimated:(_Bool)arg1;
- (void)StartRecordAnimated:(_Bool)arg1;
- (void)ShowPreparing:(_Bool)arg1;
- (void)reset;
- (id)initWithFrame:(struct CGRect)arg1;
- (id)init;
@end
| {
"pile_set_name": "Github"
} |
/*
* SET VERSION NUMBERS BEFORE MAKING MAKEFILES; also, you'll need to install
* util/scripts/bsdinstall.sh before doing a "make install"
*/
#define SystemV YES
#define OSName HP-UX 9.0
#define OSMajorVersion 9
#define OSMinorVersion 0
#define StandardDefines -DSYSV
#define LintOpts -ax -DSYSV
#define OptimizedCDebugFlags
#define OptimizedCplusplusDebugFlags
#define CplusplusCcCmd CC
#define ExtraLibraries -lC
#define LnCmd ln -s
#define MvCmd mv -f
#define ExecableScripts YES
#define HasSockets YES
#define HasVFork YES
#define HasVoidSignalReturn YES
#define RemoveTargetProgramByMoving YES
/* This defines the server you want */
#define XhpServer Xhp
#define BuildServer YES
#define NeedBerklibInXlib YES /* snag Berklib for Xlib */
/**/# platform: $XConsortium: hp.cf,v 1.21 89/10/11 18:55:02 jim Exp $
/**/# operating system: OSName
/*
****************************************************************************
*/
/* define this as you like for server compilation */
/*
* The normal malloc in libc returns a real pointer on malloc of 0;
* the malloc in libmalloc returns null on malloc of 0.
*/
/* #define ServerMallocLib YES *//* use -lmalloc for server */
/* take out -DMALLOC_0_RETURNS_NULL if you have ServerMallocLib NO */
#define UNCOMPRESSPATH /usr/bin/uncompress
#ifdef hp9000s300
#define HardwareDefines -Dhp9000s300
#else
#define HardwareDefines
#endif
#if OSMajorVersion >= 7 || (OSMajorVersion == 6 && OSMinorVersion >= 2)
#define OsDefines -DHAS_IFREQ
#else
#define OsDefines
#endif
#if OSMajorVersion < 6 || (OSMajorVersion == 6 && OSMinorVersion < 2)
#define ConnectionFlags -DTCPCONN /* no unix sockets */
#endif
#define ExtensionDefines -DSHAPE -DMULTIBUFFER -DMITSHM -DMITMISC
/*
* Xhp-specific parameters
*/
#define WritableBlackWhite /* want BlackPixel and WhitePixel rw */
#define ServerDefines StandardDefines ExtensionDefines -DXDMCP -DXOS -DMERGE OsDefines HardwareDefines
#define InstallCmd $$TAEBIN/all/bsdinst_hp800
| {
"pile_set_name": "Github"
} |
package net.corda.core.flows
import co.paralleluniverse.fibers.Suspendable
import net.corda.core.crypto.TransactionSignature
import net.corda.core.crypto.isFulfilledBy
import net.corda.core.crypto.toStringShort
import net.corda.core.identity.AbstractParty
import net.corda.core.identity.AnonymousParty
import net.corda.core.identity.Party
import net.corda.core.identity.groupPublicKeysByWellKnownParty
import net.corda.core.node.ServiceHub
import net.corda.core.transactions.SignedTransaction
import net.corda.core.transactions.WireTransaction
import net.corda.core.utilities.ProgressTracker
import net.corda.core.utilities.unwrap
import java.security.PublicKey
/**
* The [CollectSignaturesFlow] is used to automate the collection of counterparty signatures for a given transaction.
*
* You would typically use this flow after you have built a transaction with the TransactionBuilder and signed it with
* your key pair. If there are additional signatures to collect then they can be collected using this flow. Signatures
* are collected based upon the [WireTransaction.requiredSigningKeys] property which contains the union of all the PublicKeys
* listed in the transaction's commands as well as a notary's public key, if required. This flow returns a
* [SignedTransaction] which can then be passed to the [FinalityFlow] for notarisation. The other side of this flow is
* the [SignTransactionFlow].
*
* **WARNING**: This flow ONLY works with [ServiceHub.legalIdentityKey]s and WILL break if used with randomly generated
* keys by the [ServiceHub.keyManagementService].
*
* Usage:
*
* - Call the [CollectSignaturesFlow] flow as a [subFlow] and pass it a [SignedTransaction] which has at least been
* signed by the transaction creator (and possibly an oracle, if required)
* - The flow expects that the calling node has signed the provided transaction, if not the flow will fail
* - The flow will also fail if:
* 1. The provided transaction is invalid
* 2. Any of the required signing parties cannot be found in the [ServiceHub.networkMapCache] of the initiator
* 3. If the wrong key has been used by a counterparty to sign the transaction
* 4. The counterparty rejects the provided transaction
* - The flow will return a [SignedTransaction] with all the counterparty signatures (but not the notary's!)
* - If the provided transaction has already been signed by all counterparties then this flow simply returns the
* provided transaction without contacting any counterparties
* - Call the [FinalityFlow] with the return value of this flow
*
* Example - issuing a multi-lateral agreement which requires N signatures:
*
* val builder = TransactionBuilder(notaryRef)
* val issueCommand = Command(Agreement.Commands.Issue(), state.participants)
*
* builder.withItems(state, issueCommand)
* builder.toWireTransaction().toLedgerTransaction(serviceHub).verify()
*
* // Transaction creator signs transaction.
* val ptx = serviceHub.signInitialTransaction(builder)
*
* // Call to CollectSignaturesFlow.
* // The returned signed transaction will have all signatures appended apart from the notary's.
* val stx = subFlow(CollectSignaturesFlow(ptx))
*
* @param partiallySignedTx Transaction to collect the remaining signatures for
* @param sessionsToCollectFrom A session for every party we need to collect a signature from. Must be an exact match.
* @param myOptionalKeys set of keys in the transaction which are owned by this node. This includes keys used on commands, not
* just in the states. If null, the default well known identity of the node is used.
*/
// TODO: AbstractStateReplacementFlow needs updating to use this flow.
class CollectSignaturesFlow @JvmOverloads constructor(val partiallySignedTx: SignedTransaction,
val sessionsToCollectFrom: Collection<FlowSession>,
val myOptionalKeys: Iterable<PublicKey>?,
override val progressTracker: ProgressTracker = CollectSignaturesFlow.tracker()) : FlowLogic<SignedTransaction>() {
@JvmOverloads
constructor(
partiallySignedTx: SignedTransaction,
sessionsToCollectFrom: Collection<FlowSession>,
progressTracker: ProgressTracker = CollectSignaturesFlow.tracker()
) : this(partiallySignedTx, sessionsToCollectFrom, null, progressTracker)
companion object {
object COLLECTING : ProgressTracker.Step("Collecting signatures from counterparties.")
object VERIFYING : ProgressTracker.Step("Verifying collected signatures.")
@JvmStatic
fun tracker() = ProgressTracker(COLLECTING, VERIFYING)
// TODO: Make the progress tracker adapt to the number of counterparties to collect from.
}
@Suspendable
override fun call(): SignedTransaction {
// Check the signatures which have already been provided and that the transaction is valid.
// Usually just the Initiator and possibly an oracle would have signed at this point.
val myKeys: Iterable<PublicKey> = myOptionalKeys ?: listOf(ourIdentity.owningKey)
val signed = partiallySignedTx.sigs.map { it.by }
val notSigned = partiallySignedTx.tx.requiredSigningKeys - signed
// One of the signatures collected so far MUST be from the initiator of this flow.
require(partiallySignedTx.sigs.any { it.by in myKeys }) {
"The Initiator of CollectSignaturesFlow must have signed the transaction."
}
// The signatures must be valid and the transaction must be valid.
partiallySignedTx.verifySignaturesExcept(notSigned)
partiallySignedTx.tx.toLedgerTransaction(serviceHub).verify()
// Determine who still needs to sign.
progressTracker.currentStep = COLLECTING
val notaryKey = partiallySignedTx.tx.notary?.owningKey
// If present, we need to exclude the notary's PublicKey as the notary signature is collected separately with
// the FinalityFlow.
val unsigned = if (notaryKey != null) notSigned - notaryKey else notSigned
// If the unsigned counterparties list is empty then we don't need to collect any more signatures here.
if (unsigned.isEmpty()) return partiallySignedTx
val wellKnownSessions = sessionsToCollectFrom.filter { it.destination is Party }
val anonymousSessions = sessionsToCollectFrom.filter { it.destination is AnonymousParty }
require(wellKnownSessions.size + anonymousSessions.size == sessionsToCollectFrom.size) {
"Unrecognized Destination type used to initiate a flow session"
}
val wellKnownPartyToSessionMap: Map<Party, List<FlowSession>> = wellKnownSessions.groupBy { (it.destination as Party) }
val anonymousPartyToSessionMap: Map<AnonymousParty, List<FlowSession>> = anonymousSessions
.groupBy { (it.destination as AnonymousParty) }
//check that there is at most one session for each not well known party
for (entry in anonymousPartyToSessionMap) {
require(entry.value.size == 1) {
"There are multiple sessions initiated for Anonymous Party ${entry.key.owningKey.toStringShort()}"
}
}
//all keys that were used to initate a session must be sent to that session
val keysToSendToAnonymousSessions: Set<PublicKey> = unsigned.intersect(anonymousPartyToSessionMap.keys.map { it.owningKey })
//all keys that are left over MUST map back to a
val keysThatMustMapToAWellKnownSession: Set<PublicKey> = unsigned - keysToSendToAnonymousSessions
//if a key does not have a well known identity associated with it, it does not map to a wellKnown session
val keysThatDoNotMapToAWellKnownSession: List<PublicKey> = keysThatMustMapToAWellKnownSession
.filter { serviceHub.identityService.wellKnownPartyFromAnonymous(AnonymousParty(it)) == null }
//ensure that no keys are impossible to map to a session
require(keysThatDoNotMapToAWellKnownSession.isEmpty()) {
" Unable to match key(s): $keysThatDoNotMapToAWellKnownSession to a session to collect signatures from"
}
//we now know that all the keys are either related to a specific session due to being used as a Destination for that session
//OR map back to a wellKnown party
//now we must check that each wellKnown party has a session passed for it
val groupedByPartyKeys = groupPublicKeysByWellKnownParty(serviceHub, keysThatMustMapToAWellKnownSession)
for (entry in groupedByPartyKeys) {
require(wellKnownPartyToSessionMap.contains(entry.key)) {
"${entry.key} is a required signer, but no session has been passed in for them"
}
}
//so we now know that all keys are linked to a session in some way
//we need to check that there are no extra sessions
val extraNotWellKnownSessions = anonymousSessions.filterNot { (it.destination as AnonymousParty).owningKey in unsigned }
val extraWellKnownSessions = wellKnownSessions.filterNot { it.counterparty in groupedByPartyKeys }
require(extraNotWellKnownSessions.isEmpty() && extraWellKnownSessions.isEmpty()) {
"The Initiator of CollectSignaturesFlow must pass in exactly the sessions required to sign the transaction, " +
"the following extra sessions were passed in: " +
(extraWellKnownSessions.map { it.counterparty.name.toString() } +
extraNotWellKnownSessions.map { (it.destination as AbstractParty).owningKey.toString() })
}
//OK let's collect some signatures!
val sigsFromNotWellKnownSessions = anonymousSessions.flatMap { flowSession ->
//anonymous sessions will only ever sign for their own key
subFlow(CollectSignatureFlow(partiallySignedTx, flowSession, (flowSession.destination as AbstractParty).owningKey))
}
val sigsFromWellKnownSessions = wellKnownSessions.flatMap { flowSession ->
val keysToAskThisSessionFor = groupedByPartyKeys[flowSession.counterparty] ?: emptyList()
subFlow(CollectSignatureFlow(partiallySignedTx, flowSession, keysToAskThisSessionFor))
}
val stx = partiallySignedTx + (sigsFromNotWellKnownSessions + sigsFromWellKnownSessions).toSet()
// Verify all but the notary's signature if the transaction requires a notary, otherwise verify all signatures.
progressTracker.currentStep = VERIFYING
if (notaryKey != null) stx.verifySignaturesExcept(notaryKey) else stx.verifyRequiredSignatures()
return stx
}
}
// DOCSTART 1
/**
* Get and check the required signature.
*
* @param partiallySignedTx the transaction to sign.
* @param session the [FlowSession] to connect to to get the signature.
* @param signingKeys the list of keys the party should use to sign the transaction.
*/
@Suspendable
class CollectSignatureFlow(val partiallySignedTx: SignedTransaction, val session: FlowSession, val signingKeys: List<PublicKey>) : FlowLogic<List<TransactionSignature>>() {
constructor(partiallySignedTx: SignedTransaction, session: FlowSession, vararg signingKeys: PublicKey) :
this(partiallySignedTx, session, listOf(*signingKeys))
@Suspendable
override fun call(): List<TransactionSignature> {
// SendTransactionFlow allows counterparty to access our data to resolve the transaction.
subFlow(SendTransactionFlow(session, partiallySignedTx))
// Send the key we expect the counterparty to sign with - this is important where they may have several
// keys to sign with, as it makes it faster for them to identify the key to sign with, and more straight forward
// for us to check we have the expected signature returned.
session.send(signingKeys)
return session.receive<List<TransactionSignature>>().unwrap { signatures ->
require(signatures.size == signingKeys.size) { "Need signature for each signing key" }
signatures.forEachIndexed { index, signature ->
require(signingKeys[index].isFulfilledBy(signature.by)) { "Not signed by the required signing key." }
}
signatures
}
}
}
// DOCEND 1
/**
* The [SignTransactionFlow] should be called in response to the [CollectSignaturesFlow]. It automates the signing of
* a transaction providing the transaction:
*
* 1. Should actually be signed by the [Party] invoking this flow
* 2. Is valid as per the contracts referenced in the transaction
* 3. Has been, at least, signed by the counterparty which created it
* 4. Conforms to custom checking provided in the [checkTransaction] method of the [SignTransactionFlow]
*
* Usage:
*
* - Subclass [SignTransactionFlow] - this can be done inside an existing flow (as shown below)
* - Override the [checkTransaction] method to add some custom verification logic
* - Call the flow via [FlowLogic.subFlow]
* - The flow returns the transaction signed with the additional signature.
*
* Example - checking and signing a transaction involving a [net.corda.core.contracts.DummyContract], see
* CollectSignaturesFlowTests.kt for further examples:
*
* class Responder(val otherPartySession: FlowSession): FlowLogic<SignedTransaction>() {
* @Suspendable override fun call(): SignedTransaction {
* // [SignTransactionFlow] sub-classed as a singleton object.
* val flow = object : SignTransactionFlow(otherPartySession) {
* @Suspendable override fun checkTransaction(stx: SignedTransaction) = requireThat {
* val tx = stx.tx
* val magicNumberState = tx.outputs.single().data as DummyContract.MultiOwnerState
* "Must be 1337 or greater" using (magicNumberState.magicNumber >= 1337)
* }
* }
*
* // Invoke the subFlow, in response to the counterparty calling [CollectSignaturesFlow].
* val expectedTxId = subFlow(flow).id
*
* return subFlow(ReceiveFinalityFlow(otherPartySession, expectedTxId))
* }
* }
*
* @param otherSideSession The session which is providing you a transaction to sign.
*/
abstract class SignTransactionFlow @JvmOverloads constructor(val otherSideSession: FlowSession,
override val progressTracker: ProgressTracker = SignTransactionFlow.tracker()) : FlowLogic<SignedTransaction>() {
companion object {
object RECEIVING : ProgressTracker.Step("Receiving transaction proposal for signing.")
object VERIFYING : ProgressTracker.Step("Verifying transaction proposal.")
object SIGNING : ProgressTracker.Step("Signing transaction proposal.")
@JvmStatic
fun tracker() = ProgressTracker(RECEIVING, VERIFYING, SIGNING)
}
@Suspendable
override fun call(): SignedTransaction {
progressTracker.currentStep = RECEIVING
// Receive transaction and resolve dependencies, check sufficient signatures is disabled as we don't have all signatures.
val stx = subFlow(ReceiveTransactionFlow(otherSideSession, checkSufficientSignatures = false))
// Receive the signing key that the party requesting the signature expects us to sign with. Having this provided
// means we only have to check we own that one key, rather than matching all keys in the transaction against all
// keys we own.
val signingKeys = otherSideSession.receive<List<PublicKey>>().unwrap { keys ->
// TODO: We should have a faster way of verifying we own a single key
serviceHub.keyManagementService.filterMyKeys(keys)
}
progressTracker.currentStep = VERIFYING
// Check that the Responder actually needs to sign.
checkMySignaturesRequired(stx, signingKeys)
// Check the signatures which have already been provided. Usually the Initiators and possibly an Oracle's.
checkSignatures(stx)
stx.tx.toLedgerTransaction(serviceHub).verify()
// Perform some custom verification over the transaction.
try {
checkTransaction(stx)
} catch (e: Exception) {
if (e is IllegalStateException || e is IllegalArgumentException || e is AssertionError)
throw FlowException(e)
else
throw e
}
// Sign and send back our signature to the Initiator.
progressTracker.currentStep = SIGNING
val mySignatures = signingKeys.map { key ->
serviceHub.createSignature(stx, key)
}
otherSideSession.send(mySignatures)
// Return the additionally signed transaction.
return stx + mySignatures
}
@Suspendable
private fun checkSignatures(stx: SignedTransaction) {
val signed = stx.sigs.map { it.by }
val allSigners = stx.tx.requiredSigningKeys
val notSigned = allSigners - signed
stx.verifySignaturesExcept(notSigned)
}
/**
* The [checkTransaction] method allows the caller of this flow to provide some additional checks over the proposed
* transaction received from the counterparty. For example:
*
* - Ensuring that the transaction you are receiving is the transaction you *EXPECT* to receive. I.e. is has the
* expected type and number of inputs and outputs
* - Checking that the properties of the outputs are as you would expect. Linking into any reference data sources
* might be appropriate here
* - Checking that the transaction is not incorrectly spending (perhaps maliciously) one of your asset states, as
* potentially the transaction creator has access to some of your state references
*
* **WARNING**: If appropriate checks, such as the ones listed above, are not defined then it is likely that your
* node will sign any transaction if it conforms to the contract code in the transaction's referenced contracts.
*
* [IllegalArgumentException], [IllegalStateException] and [AssertionError] will be caught and rethrown as flow
* exceptions i.e. the other side will be given information about what exact check failed.
*
* @param stx a partially signed transaction received from your counterparty.
* @throws FlowException if the proposed transaction fails the checks.
*/
@Suspendable
@Throws(FlowException::class)
protected abstract fun checkTransaction(stx: SignedTransaction)
@Suspendable
private fun checkMySignaturesRequired(stx: SignedTransaction, signingKeys: Iterable<PublicKey>) {
require(signingKeys.all { it in stx.tx.requiredSigningKeys }) {
"A signature was requested for a key that isn't part of the required signing keys for transaction ${stx.id}"
}
}
}
| {
"pile_set_name": "Github"
} |
package com.gs.fw.common.mithra.generator;
import com.gs.fw.common.mithra.generator.filesystem.*;
import com.gs.fw.common.mithra.generator.metamodel.*;
import com.gs.fw.common.mithra.generator.util.*;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Semaphore;
import java.util.zip.CRC32;
public class MithraXMLObjectTypeParser implements MithraObjectTypeParser
{
protected String mithraClassListXml;
private CRC32 crc32 = new CRC32();
private static final int IO_THREADS = 1;
private AwaitingThreadExecutor executor;
private Throwable executorError;
private ChopAndStickResource chopAndStickResource = new ChopAndStickResource(new Semaphore(Runtime.getRuntime().availableProcessors()),
new Semaphore(IO_THREADS), new SerialResource());
private Map<String, MithraObjectTypeWrapper> mithraObjects = new ConcurrentHashMap<String, MithraObjectTypeWrapper>();
private List<MithraObjectTypeWrapper> sortedMithraObjects;
private Map<String, MithraEmbeddedValueObjectTypeWrapper> mithraEmbeddedValueObjects = new ConcurrentHashMap<String, MithraEmbeddedValueObjectTypeWrapper>();
private List<MithraEmbeddedValueObjectTypeWrapper> sortedMithraEmbeddedValueObjects;
private Map<String, MithraInterfaceType> mithraInterfaces = new ConcurrentHashMap<String, MithraInterfaceType>();
private boolean generateFileHeaders = false;
private boolean ignoreNonGeneratedAbstractClasses = false;
private boolean ignoreTransactionalMethods = false;
private boolean ignorePackageNamingConvention = false;
private boolean defaultFinalGetters = false;
private boolean forceOffHeap = false;
protected FauxFileSystem fauxFileSystem;
private ThreadLocal<FullFileBuffer> fullFileBufferThreadLocal = new ThreadLocal<FullFileBuffer>();
private Logger logger;
public MithraXMLObjectTypeParser(String mithraClassListXml)
{
this.mithraClassListXml = mithraClassListXml;
}
public void setMithraClassListXml(String mithraClassListXml)
{
this.mithraClassListXml = mithraClassListXml;
}
public void setLogger(Logger logger)
{
this.logger = logger;
}
@Override
public void setForceOffHeap(boolean forceOffHeap)
{
this.forceOffHeap = forceOffHeap;
}
@Override
public void setDefaultFinalGetters(boolean defaultFinalGetters)
{
this.defaultFinalGetters = defaultFinalGetters;
}
@Override
public void setFauxFileSystem(FauxFileSystem fauxFileSystem)
{
this.fauxFileSystem = fauxFileSystem;
}
public Map<String, MithraObjectTypeWrapper> getMithraObjects()
{
return this.mithraObjects;
}
public Map<String, MithraEmbeddedValueObjectTypeWrapper> getMithraEmbeddedValueObjects()
{
return this.mithraEmbeddedValueObjects;
}
@Override
public String getChecksum()
{
String result = Long.toHexString(crc32.getValue());
while(result.length() < 8)
{
result = "0" + result;
}
return result;
}
public Map<String,MithraInterfaceType> getMithraInterfaces()
{
return mithraInterfaces;
}
public String parse() throws MithraGeneratorException
{
try
{
FauxFile file = this.fauxFileSystem.newFile(mithraClassListXml);
parseMithraXml(file.getName(), null, new DirectoryFileProvider(this.fauxFileSystem, file.getParent()));
return file.getPath();
}
catch (Throwable e)
{
throw new MithraGeneratorException(e);
}
}
public void parseMithraXml(String fileName, String importSource, FileProvider fileProvider) throws FileNotFoundException
{
MithraType result;
InputStream mithraFileIs = null;
try
{
FileInputStreamWithSize streamWithSize = fileProvider.getFileInputStream(fileName);
FullFileBuffer ffb = new FullFileBuffer();
ffb.bufferFile(streamWithSize.getInputStream(), (int) streamWithSize.getSize());
ffb.updateCrc(crc32);
mithraFileIs = ffb.getBufferedInputStream();
Object obj = new MithraGeneratorUnmarshaller().parse(mithraFileIs, fileName);
this.logger.debug(obj.getClass().getName() + ": " + MithraType.class.getName());
result = (MithraType)obj;
long start = System.currentTimeMillis();
int normalObjects = this.parseMithraObjects(result, importSource, fileProvider);
int pureObjects = this.parseMithraPureObjects(result, importSource, fileProvider);
int tempObjects = this.parseMithraTempObjects(result, importSource, fileProvider);
int embeddedObjects = this.parseMithraEmbeddedValueObjects(result, importSource, fileProvider);
int mithraInterfaceObjects = this.parseMithraInterfaceObjects(result, importSource, fileProvider);
String msg = fileName + ": parsed ";
msg = concatParsed(msg, normalObjects, "normal");
msg = concatParsed(msg, pureObjects, "pure");
msg = concatParsed(msg, tempObjects, "temp");
msg = concatParsed(msg, embeddedObjects, "embedded");
msg = concatParsed(msg, mithraInterfaceObjects, "mithraInterface");
msg += " Mithra objects in "+(System.currentTimeMillis() - start)+" ms.";
this.logger.info(msg);
}
catch (MithraGeneratorParserException e)
{
throw new MithraGeneratorException("Unable to parse "+ fileName, e);
}
catch (IOException e)
{
throw new MithraGeneratorException("Unable to read file "+ fileName, e);
}
finally
{
closeIs(mithraFileIs);
fileProvider.close();
}
}
private int parseMithraObjects(final MithraType mithraType, final String importSource,
final FileProvider fileProvider) throws FileNotFoundException
{
List<MithraObjectResourceType> mithraObjectList = mithraType.getMithraObjectResources();
chopAndStickResource.resetSerialResource();
for (int i = 0; i <mithraObjectList.size(); i++)
{
final MithraObjectResourceType mithraObjectResourceType = mithraObjectList.get(i);
final String objectName = mithraObjectResourceType.getName();
getExecutor().submit(new GeneratorTask(i)
{
public void run()
{
MithraBaseObjectType mithraObject = parseMithraObject(objectName, mithraObjects, fileProvider, this, "normal");
if (mithraObject != null)
{
String objectFileName = objectName + ".xml";
boolean isGenerateInterfaces = !mithraObjectResourceType.isGenerateInterfacesSet() ? mithraType.isGenerateInterfaces() : mithraObjectResourceType.isGenerateInterfaces();
boolean enableOffHeap = !mithraObjectResourceType.isEnableOffHeapSet() ? mithraType.isEnableOffHeap() || forceOffHeap : mithraObjectResourceType.isEnableOffHeap();
MithraObjectTypeWrapper wrapper = new MithraObjectTypeWrapper(mithraObject, objectFileName, importSource, isGenerateInterfaces, ignorePackageNamingConvention, MithraXMLObjectTypeParser.this.logger);
wrapper.setGenerateFileHeaders(generateFileHeaders);
wrapper.setReplicated(mithraObjectResourceType.isReplicated());
wrapper.setIgnoreNonGeneratedAbstractClasses(ignoreNonGeneratedAbstractClasses);
wrapper.setIgnoreTransactionalMethods(ignoreTransactionalMethods);
wrapper.setReadOnlyInterfaces(mithraObjectResourceType.isReadOnlyInterfacesSet() ? mithraObjectResourceType.isReadOnlyInterfaces() : mithraType.isReadOnlyInterfaces());
wrapper.setDefaultFinalGetters(defaultFinalGetters);
wrapper.setEnableOffHeap(enableOffHeap);
mithraObjects.put(mithraObjectResourceType.getName(), wrapper);
}
}
});
}
waitForExecutorWithCheck();
return mithraObjectList.size();
}
private int parseMithraPureObjects(final MithraType mithraType, final String importSource,
final FileProvider fileProvider) throws FileNotFoundException
{
List<MithraPureObjectResourceType> mithraPureObjectList = mithraType.getMithraPureObjectResources();
if (!mithraPureObjectList.isEmpty())
{
chopAndStickResource.resetSerialResource();
for (int i=0; i< mithraPureObjectList.size();i++)
{
final MithraPureObjectResourceType mithraPureObjectResourceType = mithraPureObjectList.get(i);
getExecutor().submit(new GeneratorTask(i)
{
public void run()
{
String objectName = mithraPureObjectResourceType.getName();
MithraBaseObjectType mithraObject = parseMithraObject(objectName, mithraObjects, fileProvider, this, "pure");
if (mithraObject != null)
{
String objectFileName = objectName + ".xml";
boolean enableOffHeap = !mithraPureObjectResourceType.isEnableOffHeapSet() ? mithraType.isEnableOffHeap() || forceOffHeap : mithraPureObjectResourceType.isEnableOffHeap();
MithraObjectTypeWrapper wrapper = new MithraObjectTypeWrapper(mithraObject, objectFileName, importSource, false, ignorePackageNamingConvention, MithraXMLObjectTypeParser.this.logger);
wrapper.setGenerateFileHeaders(generateFileHeaders);
wrapper.setIgnoreNonGeneratedAbstractClasses(ignoreNonGeneratedAbstractClasses);
wrapper.setIgnoreTransactionalMethods(ignoreTransactionalMethods);
wrapper.setPure(true);
wrapper.setEnableOffHeap(enableOffHeap);
mithraObjects.put(mithraPureObjectResourceType.getName(), wrapper);
}
}
});
}
waitForExecutorWithCheck();
}
return mithraPureObjectList.size();
}
private int parseMithraTempObjects(final MithraType mithraType, final String importSource,
final FileProvider fileProvider) throws FileNotFoundException
{
List<MithraTempObjectResourceType> mithraTempObjectList = mithraType.getMithraTempObjectResources();
if (mithraTempObjectList.size() > 0)
{
chopAndStickResource.resetSerialResource();
for (int i=0; i<mithraTempObjectList.size();i++)
{
final MithraTempObjectResourceType mithraTempObjectResourceType = mithraTempObjectList.get(i);
getExecutor().submit(new GeneratorTask(i)
{
public void run()
{
String objectName = mithraTempObjectResourceType.getName();
MithraBaseObjectType mithraObject = parseMithraObject(objectName, mithraObjects, fileProvider, this, "temp");
if (mithraObject != null)
{
String objectFileName = objectName + ".xml";
MithraObjectTypeWrapper wrapper = new MithraObjectTypeWrapper(mithraObject, objectFileName, importSource, false, ignorePackageNamingConvention, MithraXMLObjectTypeParser.this.logger);
wrapper.setGenerateFileHeaders(generateFileHeaders);
wrapper.setIgnoreNonGeneratedAbstractClasses(ignoreNonGeneratedAbstractClasses);
wrapper.setIgnoreTransactionalMethods(ignoreTransactionalMethods);
wrapper.setTemporary(true);
mithraObjects.put(mithraTempObjectResourceType.getName(), wrapper);
}
}
});
}
waitForExecutorWithCheck();
}
return mithraTempObjectList.size();
}
private int parseMithraEmbeddedValueObjects(final MithraType mithraType, final String importSource,
final FileProvider fileProvider) throws FileNotFoundException
{
List<MithraEmbeddedValueObjectResourceType> mithraEmbeddedValueObjectList = mithraType.getMithraEmbeddedValueObjectResources();
if (!mithraEmbeddedValueObjectList.isEmpty())
{
chopAndStickResource.resetSerialResource();
for (int i=0;i<mithraEmbeddedValueObjectList.size();i++)
{
final MithraEmbeddedValueObjectResourceType mithraEmbeddedValueObjectResourceType = mithraEmbeddedValueObjectList.get(i);
getExecutor().submit(new GeneratorTask(i)
{
public void run()
{
String objectName = mithraEmbeddedValueObjectResourceType.getName();
MithraEmbeddedValueObjectType evo = (MithraEmbeddedValueObjectType) parseMithraBaseObject(objectName, mithraEmbeddedValueObjects, fileProvider, this);
if (evo != null)
{
String objectFileName = objectName + ".xml";
MithraEmbeddedValueObjectTypeWrapper wrapper = new MithraEmbeddedValueObjectTypeWrapper(evo, objectFileName, importSource);
wrapper.setIgnoreNonGeneratedAbstractClasses(ignoreNonGeneratedAbstractClasses);
mithraEmbeddedValueObjects.put(mithraEmbeddedValueObjectResourceType.getName(), wrapper);
}
}
});
}
waitForExecutorWithCheck();
}
return mithraEmbeddedValueObjectList.size();
}
private MithraInterfaceType parseMithraInterfaceType(String objectName, Map objectMap,
FileProvider fileProvider,
GeneratorTask task, boolean isReadOnlyInterfaces,
String importedSource)
{
MithraInterfaceType mithraObject = (MithraInterfaceType) parseMithraType(objectName, objectMap, fileProvider, task);
mithraObject.setReadOnlyInterfaces(isReadOnlyInterfaces);
mithraObject.setImportedSource(importedSource);
checkClassName(objectName, mithraObject.getClassName());
mithraObject.postInitialize(objectName);
return mithraObject;
}
private MithraBaseObjectType parseMithraObject(String objectName, Map objectMap,
FileProvider fileProvider,
GeneratorTask task, String errorType)
{
return parseMithraBaseObject(objectName, objectMap, fileProvider, task);
}
private void checkClassName(String objectName, String className)
{
if (objectName.contains("/"))
{
objectName = objectName.substring(objectName.lastIndexOf('/') + 1);
}
if (!objectName.equals(className))
{
throw new MithraGeneratorException("XML filename: '" + objectName + "' must match class name specified: '" + className + "'");
}
}
private int parseMithraInterfaceObjects(final MithraType mithraType, final String importSource,
final FileProvider fileProvider) throws FileNotFoundException
{
List<MithraInterfaceResourceType> mithraObjectList = mithraType.getMithraInterfaceResources();
chopAndStickResource.resetSerialResource();
for (int i = 0; i < mithraObjectList.size(); i++)
{
final MithraInterfaceResourceType mithraObjectResourceType = mithraObjectList.get(i);
final String objectName = mithraObjectResourceType.getName();
getExecutor().submit(new GeneratorTask(i)
{
public void run()
{
MithraInterfaceType mithraObject = parseMithraInterfaceType(objectName, mithraInterfaces, fileProvider, this, mithraObjectResourceType.isReadOnlyInterfaces(), importSource);
if (mithraObject != null)
{
mithraInterfaces.put(mithraObjectResourceType.getName(), mithraObject);
}
}
});
}
waitForExecutorWithCheck();
return mithraObjectList.size();
}
private MithraBaseObjectType parseMithraBaseObject(String objectName, Map objectMap,
FileProvider fileProvider,
GeneratorTask task)
{
MithraBaseObjectType mithraObject = (MithraBaseObjectType) parseMithraType(objectName, objectMap, fileProvider, task);
checkClassName(objectName, mithraObject.getClassName());
return mithraObject;
}
private Object parseMithraType(String objectName, Map objectMap, FileProvider fileProvider, GeneratorTask task)
{
Object mithraObject = null;
this.logger.info("Reading " + objectName);
if (!fileProvider.excludeObject(objectName))
{
if (objectMap.containsKey(objectName))
{
throw new MithraGeneratorException("Attempted to add object " + objectName + " twice");
}
String objectFileName = objectName + ".xml";
InputStream objectFileIs = null;
boolean serialAquired = false;
try
{
FileInputStreamWithSize streamWithSize = fileProvider.getFileInputStream(objectFileName);
FullFileBuffer ffb = getFullFileBuffer();
chopAndStickResource.acquireIoResource();
try
{
ffb.bufferFile(streamWithSize.getInputStream(), (int) streamWithSize.getSize());
}
finally
{
chopAndStickResource.releaseIoResource();
}
task.acquireSerialResource();
serialAquired = true;
try
{
ffb.updateCrc(crc32);
}
finally
{
task.releaseSerialResource();
}
chopAndStickResource.acquireCpuResource();
try
{
objectFileIs = streamWithSize.getInputStream();
mithraObject = new MithraGeneratorUnmarshaller().parse(ffb.getBufferedInputStream(), objectFileName);
}
finally
{
chopAndStickResource.releaseCpuResource();
}
}
catch (FileNotFoundException e)
{
throw new MithraGeneratorException("Unable to find " + objectFileName, e);
}
catch (MithraGeneratorParserException e)
{
throw new MithraGeneratorException("Unable to parse " + objectFileName+" "+e.getMessage(), e);
}
catch (IOException e)
{
throw new MithraGeneratorException("Unable to read x" + objectFileName, e);
}
finally
{
if (!serialAquired)
{
task.acquireSerialResource();
task.releaseSerialResource();
}
closeIs(objectFileIs);
}
}
else
{
this.logger.info("Skipping " + objectName + ", excluded");
}
return mithraObject;
}
private String concatParsed(String msg, int count, String type)
{
if (count > 0)
{
msg += count + " " +type+", ";
}
return msg;
}
private void waitForExecutorWithCheck()
{
getExecutor().waitUntilDone();
if (executorError != null)
{
throw new MithraGeneratorException("exception while generating", executorError);
}
}
private void closeIs(InputStream is)
{
if (is != null)
{
try
{
is.close();
}
catch (IOException e)
{
throw new MithraGeneratorException("Exception closing InputStream",e);
}
}
}
public ChopAndStickResource getChopAndStickResource()
{
return chopAndStickResource;
}
public AwaitingThreadExecutor getExecutor()
{
if (executor == null)
{
executor = new AwaitingThreadExecutor(Runtime.getRuntime().availableProcessors()+IO_THREADS, "Mithra Generator");
executor.setExceptionHandler(new AutoShutdownThreadExecutor.ExceptionHandler() {
public void handleException(AutoShutdownThreadExecutor executor, Runnable target, Throwable exception)
{
executor.shutdownNow();
MithraXMLObjectTypeParser.this.logger.error("Error in runnable target. Shutting down queue "+exception.getClass().getName()+" :"+exception.getMessage());
executorError = exception;
}
});
}
return executor;
}
private class ParentClassComparator implements Comparator
{
public int compare(Object o1, Object o2)
{
MithraObjectTypeWrapper left = (MithraObjectTypeWrapper) o1;
MithraObjectTypeWrapper right = (MithraObjectTypeWrapper) o2;
Map<String, MithraObjectTypeWrapper> mithraObjects = MithraXMLObjectTypeParser.this.getMithraObjects();
int result = left.getHierarchyDepth(mithraObjects) - right.getHierarchyDepth(mithraObjects);
if (result == 0)
{
result = left.getClassName().compareTo(right.getClassName());
}
return result;
}
}
public abstract class GeneratorTask implements Runnable
{
private int resourceNumber;
public GeneratorTask(int resourceNumber)
{
this.resourceNumber = resourceNumber;
}
public void acquireSerialResource()
{
getChopAndStickResource().acquireSerialResource(resourceNumber);
}
public void releaseSerialResource()
{
getChopAndStickResource().releaseSerialResource();
}
}
private void createSortedList()
{
this.sortedMithraObjects = new ArrayList<MithraObjectTypeWrapper>(this.mithraObjects.values());
Collections.sort(this.sortedMithraObjects, new ParentClassComparator());
}
private void createSortedEmbeddedValueObjectList()
{
this.sortedMithraEmbeddedValueObjects = new ArrayList<MithraEmbeddedValueObjectTypeWrapper>(this.mithraEmbeddedValueObjects.values());
}
public List<MithraEmbeddedValueObjectTypeWrapper> getSortedMithraEmbeddedValueObjects()
{
return sortedMithraEmbeddedValueObjects;
}
private FullFileBuffer getFullFileBuffer()
{
FullFileBuffer result = fullFileBufferThreadLocal.get();
if (result == null)
{
result = new FullFileBuffer();
fullFileBufferThreadLocal.set(result);
}
return result;
}
}
| {
"pile_set_name": "Github"
} |
// Copyright 2014 Emilie Gillet.
//
// Author: Emilie Gillet ([email protected])
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
// See http://creativecommons.org/licenses/MIT/ for more information.
//
// -----------------------------------------------------------------------------
//
// User interface
#ifndef CLOUDS_UI_H_
#define CLOUDS_UI_H_
#include "stmlib/stmlib.h"
#include "stmlib/ui/event_queue.h"
#include "clouds/drivers/leds.h"
#include "clouds/drivers/switches.h"
namespace clouds {
enum UiMode {
UI_MODE_VU_METER,
UI_MODE_BLEND_METER,
UI_MODE_QUALITY,
UI_MODE_BLENDING,
UI_MODE_PLAYBACK_MODE,
UI_MODE_LOAD,
UI_MODE_SAVE,
UI_MODE_PANIC,
UI_MODE_CALIBRATION_1,
UI_MODE_CALIBRATION_2,
UI_MODE_SAVING,
UI_MODE_SPLASH,
UI_MODE_LAST
};
enum SwitchIndex {
SWITCH_MODE,
SWITCH_WRITE,
SWITCH_FREEZE
};
enum FactoryTestingCommand {
FACTORY_TESTING_READ_POT,
FACTORY_TESTING_READ_CV,
FACTORY_TESTING_READ_GATE,
FACTORY_TESTING_SET_BYPASS,
FACTORY_TESTING_CALIBRATE
};
class GranularProcessor;
class CvScaler;
class Meter;
class Settings;
class Ui {
public:
Ui() { }
~Ui() { }
void Init(
Settings* settings,
CvScaler* cv_scaler,
GranularProcessor* processor,
Meter* meter);
void Poll();
void DoEvents();
void FlushEvents();
void Panic() {
mode_ = UI_MODE_PANIC;
}
uint8_t HandleFactoryTestingRequest(uint8_t command);
private:
void OnSwitchPressed(const stmlib::Event& e);
void OnSwitchReleased(const stmlib::Event& e);
void OnSecretHandshake();
void CalibrateC1();
void CalibrateC3();
void SaveState();
void PaintLeds();
void LoadSampleMemory();
void SaveSampleMemory();
stmlib::EventQueue<16> queue_;
Settings* settings_;
CvScaler* cv_scaler_;
Leds leds_;
Switches switches_;
uint32_t press_time_[kNumSwitches];
uint32_t long_press_time_[kNumSwitches];
UiMode mode_;
GranularProcessor* processor_;
Meter* meter_;
uint8_t load_save_location_;
DISALLOW_COPY_AND_ASSIGN(Ui);
};
} // namespace clouds
#endif // CLOUDS_UI_H_
| {
"pile_set_name": "Github"
} |
/**
*
*/
define(
["dojo/_base/declare", "dojo/_base/lang", "esri/geometry/Extent",
"esri/geometry/Point"
], function(declare, lang, Extent, Point) {
return declare(null, {
BASE32_CODES: "0123456789bcdefghjkmnpqrstuvwxyz",
BASE32_CODES_DICT: {},
ENCODE_AUTO: 'auto',
SIGFIG_HASH_LENGTH: [0, 5, 7, 8, 11, 12, 13, 15, 16, 17,
18
],
constructor: function() {
console.log("constructed");
for (var i = 0; i < this.BASE32_CODES.length; i++) {
this.BASE32_CODES_DICT[this.BASE32_CODES.charAt(
i)] = i;
}
console.log("constructed complete");
},
encode: function(latitude, longitude, numberOfChars) {
if (numberOfChars === this.ENCODE_AUTO) {
if (typeof(latitude) === 'number' || typeof(
longitude) === 'number') {
throw new Error(
'string notation required for auto precision.'
);
}
var decSigFigsLat = latitude.split('.')[1].length;
var decSigFigsLong = longitude.split('.')[1]
.length;
var numberOfSigFigs = Math.max(
decSigFigsLat, decSigFigsLong);
numberOfChars = this.SIGFIG_HASH_LENGTH[
numberOfSigFigs];
} else if (numberOfChars === undefined) {
numberOfChars = 9;
}
var chars = [],
bits = 0,
bitsTotal = 0,
hash_value = 0,
maxLat = 90,
minLat = -90,
maxLon = 180,
minLon = -180,
mid;
while (chars.length < numberOfChars) {
if (bitsTotal % 2 === 0) {
mid = (maxLon + minLon) / 2;
if (longitude > mid) {
hash_value = (hash_value << 1) + 1;
minLon = mid;
} else {
hash_value = (hash_value << 1) + 0;
maxLon = mid;
}
} else {
mid = (maxLat + minLat) / 2;
if (latitude > mid) {
hash_value = (hash_value << 1) + 1;
minLat = mid;
} else {
hash_value = (hash_value << 1) + 0;
maxLat = mid;
}
}
bits++;
bitsTotal++;
if (bits === 5) {
var code = this.BASE32_CODES[hash_value];
chars.push(code);
bits = 0;
hash_value = 0;
}
}
return chars.join('');
},
encode_int: function(latitude, longitude, bitDepth) {
bitDepth = bitDepth || 52;
var bitsTotal = 0,
maxLat = 90,
minLat = -90,
maxLon = 180,
minLon = -180,
mid,
combinedBits = 0;
while (bitsTotal < bitDepth) {
combinedBits *= 2;
if (bitsTotal % 2 === 0) {
mid = (maxLon + minLon) / 2;
if (longitude > mid) {
combinedBits += 1;
minLon = mid;
} else {
maxLon = mid;
}
} else {
mid = (maxLat + minLat) / 2;
if (latitude > mid) {
combinedBits += 1;
minLat = mid;
} else {
maxLat = mid;
}
}
bitsTotal++;
}
return combinedBits;
},
decode_bbox: function(hash_string) {
var isLon = true,
maxLat = 90,
minLat = -90,
maxLon = 180,
minLon = -180,
mid;
var hashValue = 0;
for (var i = 0, l = hash_string.length; i < l; i++) {
var code = hash_string[i].toLowerCase();
hashValue = this.BASE32_CODES_DICT[code];
for (var bits = 4; bits >= 0; bits--) {
var bit = (hashValue >> bits) & 1;
if (isLon) {
mid = (maxLon + minLon) / 2;
if (bit === 1) {
minLon = mid;
} else {
maxLon = mid;
}
} else {
mid = (maxLat + minLat) / 2;
if (bit === 1) {
minLat = mid;
} else {
maxLat = mid;
}
}
isLon = !isLon;
}
}
return [minLat, minLon, maxLat, maxLon];
},
decode_bbox_int: function(hashInt, bitDepth) {
bitDepth = bitDepth || 52;
var maxLat = 90,
minLat = -90,
maxLon = 180,
minLon = -180;
var latBit = 0,
lonBit = 0;
var step = bitDepth / 2;
for (var i = 0; i < step; i++) {
lonBit = this.get_bit(hashInt, ((step - i) *
2) - 1);
latBit = this.get_bit(hashInt, ((step - i) *
2) - 2);
if (latBit === 0) {
maxLat = (maxLat + minLat) / 2;
} else {
minLat = (maxLat + minLat) / 2;
}
if (lonBit === 0) {
maxLon = (maxLon + minLon) / 2;
} else {
minLon = (maxLon + minLon) / 2;
}
}
return [minLat, minLon, maxLat, maxLon];
},
get_bit: function(bits, position) {
return (bits / Math.pow(2, position)) & 0x01;
},
decode: function(hashString) {
var bbox = this.decode_bbox(hashString);
var lat = (bbox[0] + bbox[2]) / 2;
var lon = (bbox[1] + bbox[3]) / 2;
var latErr = bbox[2] - lat;
var lonErr = bbox[3] - lon;
return {
latitude: lat,
longitude: lon,
error: {
latitude: latErr,
longitude: lonErr
}
};
},
decode_int: function(hash_int, bitDepth) {
var bbox = this.decode_bbox_int(hash_int,
bitDepth);
var lat = (bbox[0] + bbox[2]) / 2;
var lon = (bbox[1] + bbox[3]) / 2;
var latErr = bbox[2] - lat;
var lonErr = bbox[3] - lon;
return {
latitude: lat,
longitude: lon,
error: {
latitude: latErr,
longitude: lonErr
}
};
},
neighbor: function(hashString, direction) {
var lonLat = this.decode(hashString);
var neighborLat = lonLat.latitude + direction[0] *
lonLat.error.latitude * 2;
var neighborLon = lonLat.longitude + direction[
1] * lonLat.error.longitude * 2;
return this.encode(neighborLat, neighborLon,
hashString.length);
},
neighbor_int: function(hash_int, direction, bitDepth) {
bitDepth = bitDepth || 52;
var lonlat = this.decode_int(hash_int, bitDepth);
var neighbor_lat = lonlat.latitude + direction[
0] * lonlat.error.latitude * 2;
var neighbor_lon = lonlat.longitude + direction[
1] * lonlat.error.longitude * 2;
return this.encode_int(neighbor_lat,
neighbor_lon, bitDepth);
},
neighbors: function(hash_string) {
var hashstringLength = hash_string.length;
var lonlat = this.decode(hash_string);
var lat = lonlat.latitude;
var lon = lonlat.longitude;
var latErr = lonlat.error.latitude * 2;
var lonErr = lonlat.error.longitude * 2;
var neighbor_lat,
neighbor_lon;
var neighborHashList = [
encodeNeighbor(1, 0),
encodeNeighbor(1, 1),
encodeNeighbor(0, 1),
encodeNeighbor(-1, 1),
encodeNeighbor(-1, 0),
encodeNeighbor(-1, -1),
encodeNeighbor(0, -1),
encodeNeighbor(1, -1)
];
function encodeNeighbor(neighborLatDir,
neighborLonDir) {
neighbor_lat = lat + neighborLatDir *
latErr;
neighbor_lon = lon + neighborLonDir *
lonErr;
return encode(neighbor_lat,
neighbor_lon, hashstringLength);
}
return neighborHashList;
},
neighbors_int: function(hash_int, bitDepth) {
bitDepth = bitDepth || 52;
var lonlat = this.decode_int(hash_int, bitDepth);
var lat = lonlat.latitude;
var lon = lonlat.longitude;
var latErr = lonlat.error.latitude * 2;
var lonErr = lonlat.error.longitude * 2;
var neighbor_lat,
neighbor_lon;
var neighborHashIntList = [
encodeNeighbor_int(1, 0),
encodeNeighbor_int(1, 1),
encodeNeighbor_int(0, 1),
encodeNeighbor_int(-1, 1),
encodeNeighbor_int(-1, 0),
encodeNeighbor_int(-1, -1),
encodeNeighbor_int(0, -1),
encodeNeighbor_int(1, -1)
];
function encodeNeighbor_int(neighborLatDir,
neighborLonDir) {
neighbor_lat = lat + neighborLatDir *
latErr;
neighbor_lon = lon + neighborLonDir *
lonErr;
return encode_int(neighbor_lat,
neighbor_lon, bitDepth);
}
return neighborHashIntList;
},
bboxes: function(minLat, minLon, maxLat, maxLon,
numberOfChars) {
numberOfChars = numberOfChars || 9;
var hashSouthWest = this.encode(minLat, minLon,numberOfChars);
var hashNorthEast = this.encode(maxLat, maxLon,numberOfChars);
var latLon = this.decode(hashSouthWest);
var perLat = latLon.error.latitude * 2;
var perLon = latLon.error.longitude * 2;
var boxSouthWest = this.decode_bbox(
hashSouthWest);
var boxNorthEast = this.decode_bbox(
hashNorthEast);
var latStep = Math.round((boxNorthEast[0] -
boxSouthWest[0]) / perLat);
var lonStep = Math.round((boxNorthEast[1] -
boxSouthWest[1]) / perLon);
var hashList = [];
for (var lat = 0; lat <= latStep; lat++) {
for (var lon = 0; lon <= lonStep; lon++) {
hashList.push(this.neighbor(
hashSouthWest, [lat, lon]));
}
}
return hashList;
},
bboxes_int: function(minLat, minLon, maxLat, maxLon,
bitDepth) {
bitDepth = bitDepth || 52;
var hashSouthWest = encode_int(minLat, minLon,
bitDepth);
var hashNorthEast = encode_int(maxLat, maxLon,
bitDepth);
var latlon = this.decode_int(hashSouthWest,
bitDepth);
var perLat = latlon.error.latitude * 2;
var perLon = latlon.error.longitude * 2;
var boxSouthWest = this.decode_bbox_int(
hashSouthWest, bitDepth);
var boxNorthEast = this.decode_bbox_int(
hashNorthEast, bitDepth);
var latStep = Math.round((boxNorthEast[0] -
boxSouthWest[0]) / perLat);
var lonStep = Math.round((boxNorthEast[1] -
boxSouthWest[1]) / perLon);
var hashList = [];
for (var lat = 0; lat <= latStep; lat++) {
for (var lon = 0; lon <= lonStep; lon++) {
hashList.push(this.neighbor_int(
hashSouthWest, [lat, lon],
bitDepth));
}
}
return hashList;
}
});
}); | {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2007 - 2008 by Damien Di Fede <[email protected]>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Library General Public License as published
* by the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
package ddf.minim.effects;
/**
* HighPassSP is a single pole high pass filter. It is not super high quality, but it gets
* the job done.
*
* @author Damien Di Fede
*
*/
public class HighPassSP extends IIRFilter
{
/**
* Constructs a high pass filter with a cutoff frequency of <code>freq</code> that will be
* used to filter audio recorded at <code>sampleRate</code>.
*
* @param freq the cutoff frequency
* @param sampleRate the sample rate of audio that will be filtered
*/
public HighPassSP(float freq, float sampleRate)
{
super(freq, sampleRate);
}
protected void calcCoeff()
{
float fracFreq = frequency()/sampleRate();
float x = (float)Math.exp(-2 * Math.PI * fracFreq);
a = new float[] { (1+x)/2, -(1+x)/2 };
b = new float[] { x };
}
}
| {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: d5a51525b27e3ee4aadbeb39cbcf0750
timeCreated: 1485107929
licenseType: Store
TextureImporter:
fileIDToRecycleName: {}
serializedVersion: 4
mipmaps:
mipMapMode: 0
enableMipMap: 0
sRGBTexture: 0
linearTexture: 0
fadeOut: 0
borderMipMap: 0
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:
convertToNormalMap: 0
externalNormalMap: 0
heightScale: 0.25
normalMapFilter: 0
isReadable: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0
seamlessCubemap: 0
textureFormat: 1
maxTextureSize: 2048
textureSettings:
filterMode: 0
aniso: -1
mipBias: -1
wrapMode: 0
nPOTScale: 1
lightmap: 0
compressionQuality: 50
spriteMode: 0
spriteExtrude: 1
spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaUsage: 2
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 10
textureShape: 1
maxTextureSizeSet: 0
compressionQualitySet: 0
textureFormatSet: 0
platformSettings:
- buildTarget: DefaultTexturePlatform
maxTextureSize: 64
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
- buildTarget: Standalone
maxTextureSize: 64
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
spriteSheet:
serializedVersion: 2
sprites: []
outline: []
spritePackingTag:
userData:
assetBundleName:
assetBundleVariant:
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:gravity="center|top"
android:background="#fff"
>
<TextView
android:id="@+id/warning_title"
android:paddingTop="20dp"
android:textColor="#000"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:textSize="15dp"
android:layout_marginTop="10dp"
android:text="@string/warning_title"
android:textStyle="bold"
android:gravity="center"
/>
<TextView
android:id="@+id/warning_body"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:textSize="13dp"
android:textColor="#000"
android:layout_marginTop="10dp"
android:paddingBottom="10dp"
android:layout_marginBottom="10dp"
android:text="@string/warning_body"
android:gravity="center"
android:layout_marginRight="10dp"
android:layout_marginLeft="10dp"
/>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="horizontal"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:gravity="center|top"
android:paddingBottom="20dp"
>
<Button
android:id="@+id/button_ok"
android:layout_width="100dp"
android:layout_height="30dp"
android:text="@string/warning_re_approve"
android:textColor="#fff"
android:fontFamily="Avenir Next LT Pro"
android:textSize="14sp"
android:textStyle="bold"
android:layout_marginRight="10dp"
android:layout_marginLeft="10dp"
android:background="#2A5E93"
android:layout_marginTop="10dp"
android:paddingBottom="10dp"
android:layout_marginBottom="10dp"
/>
<Button
android:id="@+id/button_close"
android:layout_width="100dp"
android:layout_height="30dp"
android:text="@string/warning_close"
android:textColor="#fff"
android:fontFamily="Avenir Next LT Pro"
android:textSize="12sp"
android:textStyle="bold"
android:layout_marginRight="10dp"
android:layout_marginLeft="10dp"
android:background="#2A5E93"
android:layout_marginTop="10dp"
android:layout_marginBottom="10dp"
android:paddingBottom="10dp"
/>
</LinearLayout>
</LinearLayout>
| {
"pile_set_name": "Github"
} |
{
"name": "Skyfall",
"year": 2012,
"runtime": 143,
"categories": [
"action",
"adventure",
"thriller"
],
"release-date": "2012-11-09",
"director": "Sam Mendes",
"writer": [
"Neal Purvis",
"Robert Wade",
"John Logan"
],
"actors": [
"Daniel Craig",
"Judi Dench",
"Javier Bardem",
"Ralph Fiennes"
],
"storyline": "Bond's loyalty to M is tested when her past comes back to haunt her. When MI6 comes under attack, 007 must track down and destroy the threat, no matter how personal the cost."
}
| {
"pile_set_name": "Github"
} |
require('../../modules/es6.object.assign');
module.exports = require('../../modules/$.core').Object.assign; | {
"pile_set_name": "Github"
} |
// Copyright 2018 The Starlark in Rust Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use super::errors::SyntaxError;
use codemap::Span;
use codemap_diagnostic::{Diagnostic, Level, SpanLabel, SpanStyle};
use std::char;
use std::collections::linked_list::IntoIter;
use std::collections::LinkedList;
use std::fmt;
// TODO: move that code in some common error code list?
// CL prefix = Critical Lexing
const LEX_ERROR_CODE: &str = "CL00";
const INDENT_ERROR_CODE: &str = "CL01";
const UNFINISHED_STRING_LITERAL_CODE: &str = "CL02";
const INVALID_ESCAPE_SEQUENCE_CODE: &str = "CL03";
/// Errors that can be generated during lexing
#[doc(hidden)]
#[derive(Debug, Clone, PartialEq, Copy)]
pub enum LexerError {
Indentation(u64, u64),
InvalidCharacter(u64),
UnfinishedStringLiteral(u64, u64),
InvalidEscapeSequence(u64, u64),
WrappedError {
span: Span,
code: &'static str,
label: &'static str,
},
}
impl SyntaxError for LexerError {
/// Convert the error to a codemap diagnostic.
///
/// To build this diagnostic, the method needs the file span corresponding to the parsed
/// file.
fn to_diagnostic(self, file_span: Span) -> Diagnostic {
let sl = SpanLabel {
span: match self {
LexerError::Indentation(x, y)
| LexerError::UnfinishedStringLiteral(x, y)
| LexerError::InvalidEscapeSequence(x, y) => file_span.subspan(x, y),
LexerError::InvalidCharacter(x) => file_span.subspan(x, x),
LexerError::WrappedError { span, .. } => span,
},
style: SpanStyle::Primary,
label: Some(
match self {
LexerError::Indentation(..) => "Incorrect indentation",
LexerError::InvalidCharacter(..) => "Character not valid at present location",
LexerError::UnfinishedStringLiteral(..) => "Unfinished string literal",
LexerError::InvalidEscapeSequence(..) => "Invalid string escape sequence",
LexerError::WrappedError { label, .. } => label,
}
.to_owned(),
),
};
Diagnostic {
level: Level::Error,
message: "Parse error".to_owned(),
code: Some(
match self {
LexerError::Indentation(..) => INDENT_ERROR_CODE,
LexerError::InvalidCharacter(..) => LEX_ERROR_CODE,
LexerError::UnfinishedStringLiteral(..) => UNFINISHED_STRING_LITERAL_CODE,
LexerError::InvalidEscapeSequence(..) => INVALID_ESCAPE_SEQUENCE_CODE,
LexerError::WrappedError { code, .. } => code,
}
.to_owned(),
),
spans: vec![sl],
}
}
}
/// All token that can be generated by the lexer
#[doc(hidden)]
#[derive(Debug, Clone, PartialEq)]
pub enum Token {
// Indentation block & meaningfull spaces
Indent, // New indentation block
Dedent, // Leaving an indentation block
Newline, // Newline outside a string
// Keywords
And, // "and" keyword
Else, // "else" keyword
Load, // "load" keyword
Break, // "break" keyword
For, // "for" keyword
Not, // "not" keyword
NotIn, // "not in" keyword (taken as keyword)
Continue, // "continue" keyword
If, // "if" keyword
Or, // "or" keyword
Def, // "def" keyword
In, // "in" keyword
Pass, // "pass" keyword
Elif, // "elif" keyword
Return, // "return" keyword
// Symbols
Comma, // ','
Semicolon, // ';'
Colon, // ':'
PlusEqual, // '+='
MinusEqual, // '-='
StarEqual, // '*='
SlashEqual, // '/='
DoubleSlashEqual, // '//='
PercentEqual, // '%='
DoubleEqual, // '=='
BangEqual, // '!='
LowerEqual, // '<='
GreaterEqual, // '>='
Doublestar, // '**'
Equal, // '='
LowerThan, // '<'
GreaterThan, // '>'
Minus, // '-'
Plus, // '+'
Star, // '*'
Percent, // '%'
Slash, // '/'
DoubleSlash, // '//'
Dot, // '.'
Pipe, // '|'
// Brackets
OpeningBracket, // '['
OpeningCurlyBracket, // '{'
OpeningParenthesis, // '('
ClosingBracket, // ']'
ClosingCurlyBracket, // '}'
ClosingParenthesis, // ')'
Reserved(String), // One of the reserved keywords
Identifier(String), // An identifier
IntegerLiteral(i64), // An integer literal (123, 0x1, 0b1011, 0755, ...)
StringLiteral(String), // A string literal
}
impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Token::Indent => write!(f, "new indentation block"),
Token::Dedent => write!(f, "end of indentation block"),
Token::Newline => write!(f, "new line"),
Token::And => write!(f, "keyword 'and'"),
Token::Else => write!(f, "keyword 'else'"),
Token::Load => write!(f, "keyword 'load'"),
Token::Break => write!(f, "keyword 'break'"),
Token::For => write!(f, "keyword 'for'"),
Token::Not => write!(f, "keyword 'not'"),
Token::NotIn => write!(f, "keyword 'not in'"),
Token::Continue => write!(f, "keyword 'continue'"),
Token::If => write!(f, "keyword 'if'"),
Token::Or => write!(f, "keyword 'or'"),
Token::Def => write!(f, "keyword 'def'"),
Token::In => write!(f, "keyword 'in'"),
Token::Pass => write!(f, "keyword 'pass'"),
Token::Elif => write!(f, "keyword 'elif'"),
Token::Return => write!(f, "keyword 'return'"),
Token::Comma => write!(f, "symbol ','"),
Token::Semicolon => write!(f, "symbol ';'"),
Token::Colon => write!(f, "symbol ':'"),
Token::PlusEqual => write!(f, "symbol '+='"),
Token::MinusEqual => write!(f, "symbol '-='"),
Token::StarEqual => write!(f, "symbol '*='"),
Token::SlashEqual => write!(f, "symbol '/='"),
Token::DoubleSlashEqual => write!(f, "symbol '//='"),
Token::PercentEqual => write!(f, "symbol '%='"),
Token::DoubleEqual => write!(f, "symbol '=='"),
Token::BangEqual => write!(f, "symbol '!='"),
Token::LowerEqual => write!(f, "symbol '<='"),
Token::GreaterEqual => write!(f, "symbol '>='"),
Token::Doublestar => write!(f, "symbol '**'"),
Token::Equal => write!(f, "symbol '='"),
Token::LowerThan => write!(f, "symbol '<'"),
Token::GreaterThan => write!(f, "symbol '>'"),
Token::Minus => write!(f, "symbol '-'"),
Token::Plus => write!(f, "symbol '+'"),
Token::Star => write!(f, "symbol '*'"),
Token::Percent => write!(f, "symbol '%'"),
Token::Slash => write!(f, "symbol '/'"),
Token::DoubleSlash => write!(f, "symbol '//'"),
Token::Dot => write!(f, "symbol '.'"),
Token::Pipe => write!(f, "symbol '|'"),
Token::OpeningBracket => write!(f, "symbol '['"),
Token::OpeningCurlyBracket => write!(f, "symbol '{{'"),
Token::OpeningParenthesis => write!(f, "symbol '('"),
Token::ClosingBracket => write!(f, "symbol ']'"),
Token::ClosingCurlyBracket => write!(f, "symbol '}}'"),
Token::ClosingParenthesis => write!(f, "symbol ')'"),
Token::Reserved(ref s) => write!(f, "reserved keyword '{}'", s),
Token::Identifier(ref s) => write!(f, "identifier '{}'", s),
Token::IntegerLiteral(ref i) => write!(f, "integer literal '{}'", i),
Token::StringLiteral(ref s) => write!(f, "string literal '{}'", s),
}
}
}
#[doc(hidden)]
pub type LexerItem = Result<(u64, Token, u64), LexerError>;
#[doc(hidden)]
pub trait LexerIntoIter<T: Iterator<Item = LexerItem>>:
IntoIterator<Item = LexerItem, IntoIter = T>
{
}
impl<T1: Iterator<Item = LexerItem>, T2: IntoIterator<Item = LexerItem, IntoIter = T1>>
LexerIntoIter<T1> for T2
{
}
/// An iterator over a string slice that convert it to a list of token, i.e. the lexer.
#[derive(Debug)]
#[doc(hidden)]
pub struct Lexer {
input: String,
/// Byte offset of the next char in `input`
pos_bytes: usize,
offset: u64,
process_end_of_file: bool,
last_new_line: bool,
last_pos: u64,
last_next: Option<(u64, char)>,
indentation_stack: LinkedList<u32>,
parentheses: i32,
backlog: LinkedList<LexerItem>,
}
/// An iterator that buffer a Lexer in order to wait for end of block / parentheses.
/// Two consecutive new lines are considered also the end of input to buffer.
#[doc(hidden)]
pub struct BufferedLexer {
backlog: LinkedList<LexerItem>,
lexer: Lexer,
last_colon: bool,
}
impl BufferedLexer {
pub fn new(input: &str) -> Self {
let mut r = BufferedLexer {
backlog: LinkedList::new(),
lexer: Lexer::new(input),
last_colon: false,
};
r.lexer.process_eof(false);
r.consume();
r
}
fn consume(&mut self) {
loop {
match self.lexer.next() {
Some(Ok((i, Token::Colon, j))) => {
self.last_colon = true;
self.backlog.push_back(Ok((i, Token::Colon, j)));
}
Some(Ok((i, Token::Newline, j))) => {
self.backlog.push_back(Ok((i, Token::Newline, j)));
}
Some(x) => {
self.last_colon = false;
self.backlog.push_back(x);
}
None => return,
}
}
}
pub fn need_more(&self) -> bool {
self.last_colon || !self.lexer.indentation_stack.is_empty() || self.lexer.parentheses > 0
}
pub fn input(&mut self, input: &str) {
if input.is_empty() || (input.len() == 1 && Lexer::is_nl(input.chars().next().unwrap())) {
self.lexer.process_eof(true);
}
self.lexer.replace_input(input);
self.consume();
}
}
impl IntoIterator for BufferedLexer {
type Item = LexerItem;
type IntoIter = IntoIter<LexerItem>;
fn into_iter(self) -> Self::IntoIter {
self.backlog.into_iter()
}
}
impl Lexer {
/// Create a new lexer from a string slice
pub fn new(input: &str) -> Self {
let input = input.to_owned();
Lexer {
input,
pos_bytes: 0,
offset: 0,
process_end_of_file: true,
last_new_line: true,
last_pos: 0,
last_next: None,
indentation_stack: LinkedList::new(),
parentheses: 0,
backlog: LinkedList::new(),
}
}
/// Mark this Lexer to process or not the end of iterator as end of file
fn process_eof(&mut self, process: bool) {
self.process_end_of_file = process
}
/// Replace the input by a new one, useful in interactive mode.
fn replace_input(&mut self, input: &str) {
self.offset = if let Some((p, _)) = self.peek() {
p
} else if let Some((i, c)) = self.last_next {
i + (c.len_utf8() as u64)
} else {
self.last_pos
};
assert!(self.offset >= self.last_pos);
self.input = input.to_owned();
self.pos_bytes = 0;
}
/// Enqueue a
fn is_nl(c: char) -> bool {
match c {
'\n' | '\r' | '\u{2028}' | '\u{2029}' => true,
_ => false,
}
}
fn peek(&mut self) -> Option<(u64, char)> {
match self.input[self.pos_bytes..].chars().next() {
Some(c) => Some((self.pos_bytes as u64 + self.offset, c)),
None => None,
}
}
fn pop(&mut self) -> Option<(u64, char)> {
let mut char_indices = self.input[self.pos_bytes..].char_indices();
self.last_next = match char_indices.next() {
Some((_, c)) => {
let pos = self.pos_bytes;
self.pos_bytes = match char_indices.next() {
Some((len, _)) => self.pos_bytes + len,
None => self.input.len(),
};
self.last_new_line = Lexer::is_nl(c);
Some((pos as u64 + self.offset, c))
}
None => {
self.last_new_line = false;
None
}
};
self.last_next
}
fn terminate(&mut self) {
self.pos_bytes = self.input.len();
self.indentation_stack.clear();
self.parentheses = 0;
}
fn next_char(&mut self) -> char {
self.pop().unwrap_or((0, '\0')).1
}
fn peek_char(&mut self) -> char {
self.peek().unwrap_or((0, '\0')).1
}
fn return_none(&mut self) -> Option<<Self as Iterator>::Item> {
// Emit a newline and N DEDENT at EOF
let p = self.end_pos();
if !self.last_new_line {
self.last_new_line = true;
Some(Ok((p.1, Token::Newline, p.1)))
} else if self.ihead() > 0 && self.process_end_of_file {
self.indentation_stack.pop_front();
Some(Ok((p.1, Token::Dedent, p.1)))
} else {
None
}
}
fn ihead(&self) -> u32 {
if self.indentation_stack.is_empty() {
0
} else {
*self.indentation_stack.front().unwrap()
}
}
fn begin(&mut self) {
if let Some((i, ..)) = self.peek() {
self.last_pos = i;
}
}
fn end_pos(&mut self) -> (u64, u64) {
if let Some((end, ..)) = self.peek() {
(self.last_pos, end)
} else if let Some((i, c)) = self.last_next {
(self.last_pos, i + (c.len_utf8() as u64))
} else {
(self.last_pos, self.last_pos)
}
}
fn end(&mut self, res: Token) -> Option<<Self as Iterator>::Item> {
let p = self.end_pos();
assert!(p.0 <= p.1, "{} > {}", p.0, p.1);
Some(Ok((p.0, res, p.1)))
}
fn consume(&mut self, res: Token) -> Option<<Self as Iterator>::Item> {
self.pop();
self.end(res)
}
fn invalid(&mut self) -> Option<<Self as Iterator>::Item> {
let p = self.end_pos();
Some(Err(LexerError::InvalidCharacter(p.1)))
}
fn internal_next(&mut self) -> Option<<Self as Iterator>::Item> {
if !self.backlog.is_empty() {
return self.backlog.pop_front();
}
if self.peek().is_none() {
return self.return_none();
}
let r = self.consume_token();
if let Some(Err(_)) = r {
// In case of errors, consume the whole input so we stop on next call
self.terminate();
} else if r.is_none() {
return self.return_none();
}
r
}
}
impl Iterator for Lexer {
type Item = LexerItem;
#[cfg(feature = "trace")]
fn next(&mut self) -> Option<Self::Item> {
let r = self.internal_next();
println!("[TOKEN] {:?}", r);
r
}
#[cfg(not(feature = "trace"))]
fn next(&mut self) -> Option<Self::Item> {
self.internal_next()
}
}
// Consumers to actually consume token
impl Lexer {
fn token_from_identifier(identifier: &str) -> Token {
match identifier {
"and" => Token::And,
"else" => Token::Else,
"load" => Token::Load,
"break" => Token::Break,
"for" => Token::For,
"not" => Token::Not,
"continue" => Token::Continue,
"if" => Token::If,
"or" => Token::Or,
"def" => Token::Def,
"in" => Token::In,
"pass" => Token::Pass,
"elif" => Token::Elif,
"return" => Token::Return,
"as" | "import" | "assert" | "is" | "class" | "nonlocal" | "del" | "raise"
| "except" | "try" | "finally" | "while" | "from" | "with" | "global" | "yield" => {
Token::Reserved(identifier.to_owned())
}
_ => Token::Identifier(identifier.to_owned()),
}
}
fn skip_comment(&mut self) {
assert_eq!(self.next_char(), '#');
loop {
match self.peek_char() {
'\n' | '\r' | '\u{2028}' | '\u{2029}' | '\0' => return,
_ => {
self.pop();
}
}
}
}
fn skip_spaces(&mut self, newline: bool) -> Option<<Self as Iterator>::Item> {
loop {
match self.peek_char() {
'\n' | '\r' | '\u{2028}' | '\u{2029}' => {
if newline {
self.pop();
} else {
return None;
}
}
'\\' => {
self.pop();
if self.peek_char() != '\n' {
return self.invalid();
} else {
self.pop();
}
}
'\t' | ' ' => {
self.pop();
}
'#' => self.skip_comment(),
_ => return None,
};
}
}
fn consume_spaces(&mut self) -> u32 {
let mut result = 0;
loop {
match self.peek_char() {
'\t' => result += 8 - (result % 8),
' ' => result += 1,
_ => return result,
};
self.pop();
}
}
fn consume_indentation(&mut self) -> Option<<Self as Iterator>::Item> {
loop {
self.begin();
let spaces = self.consume_spaces();
let p = self.peek_char();
if Lexer::is_nl(p) {
// ignore because it is an empty line, but still return new line
return None;
} else if p == '#' {
// Ignore the comment and start again
self.skip_comment();
self.consume_nl();
continue;
} else if spaces > self.ihead() {
self.indentation_stack.push_front(spaces);
return self.end(Token::Indent);
} else if spaces == self.ihead() {
return None;
} else {
let mut step = 0;
while spaces < self.ihead() {
self.indentation_stack.pop_front();
step += 1;
}
if spaces == self.ihead() {
let r = self.end(Token::Dedent);
while step > 1 {
self.backlog.push_front(r.clone().unwrap());
step -= 1;
}
return r;
} else {
let p = self.end_pos();
return Some(Err(LexerError::Indentation(p.0, p.1)));
}
}
}
}
fn consume_nl(&mut self) -> Option<<Self as Iterator>::Item> {
self.begin();
match (self.next_char(), self.peek_char()) {
('\n', '\r') | ('\r', '\n') => self.consume(Token::Newline),
_ => self.end(Token::Newline),
}
}
fn consume_identifier_queue(&mut self, head: &str) -> Option<<Self as Iterator>::Item> {
let mut result = head.to_owned();
while self.peek_char().is_alphabetic()
|| self.peek_char().is_digit(10)
|| self.peek_char() == '_'
{
result.push(self.next_char());
}
assert!(!result.is_empty());
let r = self.end(Self::token_from_identifier(&result));
match r {
Some(Ok((b, Token::Not, ..))) => {
// Special handling of "not in"
self.consume_spaces();
if self.peek_char() == 'i' {
match self.consume_identifier() {
Some(Ok((.., Token::In, e))) => Some(Ok((b, Token::NotIn, e))),
Some(next_id) => {
self.backlog.push_front(next_id);
r
}
None => r, // This should never happen but it is safe to just return r.
}
} else {
r
}
}
_ => r,
}
}
fn consume_identifier(&mut self) -> Option<<Self as Iterator>::Item> {
self.begin();
assert!(!self.peek_char().is_digit(10));
self.consume_identifier_queue("")
}
fn consume_int_r(&mut self, radix: u32) -> Result<i64, ()> {
let mut number = String::new();
while self.peek_char().is_digit(radix) {
number.push(self.next_char());
}
let val = i64::from_str_radix(&number, radix);
if val.is_err() {
Err(())
} else {
Ok(val.unwrap())
}
}
fn consume_int_radix(&mut self, radix: u32) -> Option<<Self as Iterator>::Item> {
let val = self.consume_int_r(radix);
if val.is_err() {
self.invalid()
} else {
self.end(Token::IntegerLiteral(val.unwrap()))
}
}
fn consume_int(&mut self) -> Option<<Self as Iterator>::Item> {
self.begin();
let cur = self.peek_char();
if cur == '0' {
self.pop();
let cur = self.peek_char();
match cur {
'o' | 'O' => {
self.pop();
self.consume_int_radix(8)
}
'0'..='7' => self.consume_int_radix(8),
'x' | 'X' => {
self.pop();
self.consume_int_radix(16)
}
'b' | 'B' => {
self.pop();
self.consume_int_radix(2)
}
c if !c.is_numeric() => self.end(Token::IntegerLiteral(0)),
_ => self.invalid(),
}
} else {
self.consume_int_radix(10)
}
}
fn consume_escape_sequence(&mut self, triple: bool) -> Result<Option<char>, LexerError> {
if let Some((pos, c)) = self.pop() {
assert_eq!(c, '\\');
if let Some((pos2, c2)) = self.peek() {
match c2 {
'n' => {
self.pop();
Ok(Some('\n'))
}
'r' => {
self.pop();
Ok(Some('\r'))
}
't' => {
self.pop();
Ok(Some('\t'))
}
'0' => {
self.pop();
if self.peek_char().is_digit(8) {
if let Ok(r) = self.consume_int_r(8) {
Ok(Some(char::from_u32(r as u32).unwrap()))
} else {
let p = self.end_pos();
Err(LexerError::InvalidEscapeSequence(pos, p.1))
}
} else {
Ok(Some('\0'))
}
}
'x' => {
self.pop();
if let Ok(r) = self.consume_int_r(16) {
Ok(Some(char::from_u32(r as u32).unwrap()))
} else {
let p = self.end_pos();
Err(LexerError::InvalidEscapeSequence(pos, p.1))
}
}
'1'..='9' => {
self.pop();
Err(LexerError::InvalidEscapeSequence(pos, pos2 + 1))
}
'\n' => {
self.pop();
if triple {
Ok(None)
} else {
Err(LexerError::InvalidEscapeSequence(pos, pos2 + 1))
}
}
'u' => {
self.pop();
let c = self.next_char();
if c != '{' {
let p = self.end_pos();
Err(LexerError::InvalidEscapeSequence(pos, p.1))
} else if let Ok(r) = self.consume_int_r(16) {
let c = self.next_char();
if c != '}' {
let p = self.end_pos();
Err(LexerError::InvalidEscapeSequence(pos, p.1))
} else {
Ok(Some(char::from_u32(r as u32).unwrap()))
}
} else {
let p = self.end_pos();
Err(LexerError::InvalidEscapeSequence(pos, p.1))
}
}
'"' | '\'' | '\\' => {
self.pop();
Ok(Some(c2))
}
_ => Ok(Some('\\')),
}
} else {
Err(LexerError::InvalidEscapeSequence(pos, pos + 1))
}
} else {
panic!("This is a bug");
}
}
fn consume_string(&mut self, raw: bool) -> Option<<Self as Iterator>::Item> {
self.begin();
let mut res = String::new();
let quote = self.next_char();
let mut triple = false;
if self.peek_char() == quote {
self.next_char();
if self.peek_char() == quote {
self.next_char();
triple = true;
} else {
return self.end(Token::StringLiteral(res));
}
}
loop {
match self.peek_char() {
'\\' => {
if raw {
self.pop();
if self.peek_char() == quote {
self.pop();
res.push(quote);
} else {
res.push('\\');
}
} else {
match self.consume_escape_sequence(triple) {
Ok(Some(x)) => res.push(x),
Ok(None) => {}
Err(c) => return Some(Result::Err(c)),
}
}
}
'\n' | '\r' | '\u{2028}' | '\u{2029}' => {
if triple {
res.push(self.next_char());
} else {
let p = self.end_pos();
return Some(Err(LexerError::UnfinishedStringLiteral(p.0, p.1)));
}
}
'\0' => {
let p = self.end_pos();
return Some(Err(LexerError::UnfinishedStringLiteral(p.0, p.1)));
}
x if x == quote => {
self.pop();
if triple {
let n = self.next_char();
if n == quote {
if self.next_char() == quote {
break;
} else {
res.push(quote);
res.push(quote);
}
} else {
res.push(quote);
res.push(n);
}
} else {
break;
}
}
x => {
self.pop();
res.push(x);
}
}
}
self.end(Token::StringLiteral(res))
}
fn consume_token(&mut self) -> Option<<Self as Iterator>::Item> {
if self.last_new_line && self.parentheses == 0 {
if let Some(r) = self.consume_indentation() {
return Some(r);
}
} else {
let skip_newline = self.parentheses > 0;
if let Some(x) = self.skip_spaces(skip_newline) {
return Some(x);
}
}
self.begin();
match self.peek_char() {
'\0' => None,
'\n' | '\r' | '\u{2028}' | '\u{2029}' => self.consume_nl(),
'\'' | '"' => self.consume_string(false),
'r' => {
self.pop();
let p = self.peek_char();
if p == '\'' || p == '"' {
self.consume_string(true)
} else {
self.consume_identifier_queue("r")
}
}
'0'..='9' => self.consume_int(),
'_' => self.consume_identifier(),
c if c.is_alphabetic() => self.consume_identifier(),
',' => self.consume(Token::Comma),
';' => self.consume(Token::Semicolon),
':' => self.consume(Token::Colon),
'+' => {
self.pop();
if self.peek_char() == '=' {
self.consume(Token::PlusEqual)
} else {
self.end(Token::Plus)
}
}
'-' => {
self.pop();
if self.peek_char() == '=' {
self.consume(Token::MinusEqual)
} else {
self.end(Token::Minus)
}
}
'*' => {
self.pop();
match self.peek_char() {
'=' => self.consume(Token::StarEqual),
'*' => self.consume(Token::Doublestar),
_ => self.end(Token::Star),
}
}
'/' => {
self.pop();
if self.peek_char() == '=' {
self.consume(Token::SlashEqual)
} else if self.peek_char() == '/' {
self.pop();
if self.peek_char() == '=' {
self.consume(Token::DoubleSlashEqual)
} else {
self.end(Token::DoubleSlash)
}
} else {
self.end(Token::Slash)
}
}
'%' => {
self.pop();
if self.peek_char() == '=' {
self.consume(Token::PercentEqual)
} else {
self.end(Token::Percent)
}
}
'=' => {
self.pop();
if self.peek_char() == '=' {
self.consume(Token::DoubleEqual)
} else {
self.end(Token::Equal)
}
}
'!' => {
self.pop();
if self.peek_char() == '=' {
self.consume(Token::BangEqual)
} else {
self.invalid()
}
}
'<' => {
self.pop();
if self.peek_char() == '=' {
self.consume(Token::LowerEqual)
} else {
self.end(Token::LowerThan)
}
}
'>' => {
self.pop();
if self.peek_char() == '=' {
self.consume(Token::GreaterEqual)
} else {
self.end(Token::GreaterThan)
}
}
'|' => self.consume(Token::Pipe),
'.' => self.consume(Token::Dot),
'[' => {
self.parentheses += 1;
self.consume(Token::OpeningBracket)
}
']' => {
self.parentheses -= 1;
self.consume(Token::ClosingBracket)
}
'(' => {
self.parentheses += 1;
self.consume(Token::OpeningParenthesis)
}
')' => {
self.parentheses -= 1;
self.consume(Token::ClosingParenthesis)
}
'{' => {
self.parentheses += 1;
self.consume(Token::OpeningCurlyBracket)
}
'}' => {
self.parentheses -= 1;
self.consume(Token::ClosingCurlyBracket)
}
_ => self.invalid(),
}
}
}
#[cfg(test)]
mod tests {
use super::Token;
use crate::syntax::errors::SyntaxError;
use codemap;
use codemap_diagnostic;
use std::fs;
use std::fs::File;
use std::io::Read;
use std::path::PathBuf;
use std::sync::{Arc, Mutex};
fn collect_result_buffered(s: Vec<&'static str>) -> Vec<Token> {
let codemap = Arc::new(Mutex::new(codemap::CodeMap::new()));
let mut diagnostics = Vec::new();
let mut result = Vec::new();
let content = s.iter().fold("".to_string(), |a, it| a + it);
let file_span = {
codemap
.lock()
.unwrap()
.add_file("<test>".to_owned(), content)
.span
};
let mut lexer = super::BufferedLexer::new(s[0]);
for v in s.iter().skip(1) {
assert!(lexer.need_more(), "Should need more before '{}'", v,);
lexer.input(&v)
}
assert!(!lexer.need_more());
let mut pos = 0;
for x in lexer.into_iter() {
if x.is_err() {
diagnostics.push(x.err().unwrap().to_diagnostic(file_span));
} else {
let (i, t, j) = x.unwrap();
let span_incorrect = format!("Span of {:?} incorrect", t);
assert!(pos <= i, "{}: {} > {}", span_incorrect, pos, i);
result.push(t);
assert!(i <= j, "{}: {} > {}", span_incorrect, i, j);
pos = j;
}
}
assert_diagnostics!(diagnostics, codemap);
result
}
fn collect_result(s: &'static str) -> Vec<Token> {
let codemap = Arc::new(Mutex::new(codemap::CodeMap::new()));
let mut diagnostics = Vec::new();
let mut result = Vec::new();
let file_span = {
codemap
.lock()
.unwrap()
.add_file("<test>".to_owned(), s.to_owned())
.span
};
let mut pos = 0;
super::Lexer::new(s).for_each(|x| {
if x.is_err() {
diagnostics.push(x.err().unwrap().to_diagnostic(file_span));
} else {
let (i, t, j) = x.unwrap();
let span_incorrect = format!("Span of {:?} incorrect", t);
assert!(pos <= i, "{}: {} > {}", span_incorrect, pos, i);
result.push(t);
assert!(i <= j, "{}: {} > {}", span_incorrect, i, j);
pos = j;
}
});
assert_diagnostics!(diagnostics, codemap);
result
}
#[test]
fn test_int_lit() {
let get_result = |s: &'static str| -> Vec<i64> {
collect_result(s)
.iter()
.filter_map(|v| match v {
Token::IntegerLiteral(r) => Some(*r),
Token::Newline => None,
_ => panic!("{:?} is not a integer literal", v),
})
.collect()
};
assert_eq!(vec![0, 123], get_result("0 123"));
assert_eq!(vec![0x7f, 0x7f], get_result("0x7F 0x7f"));
assert_eq!(vec![0b1011, 0b1011], get_result("0B1011 0b1011"));
assert_eq!(vec![0o755, 0o755, 0o755], get_result("0o755 0O755 0755"));
}
#[test]
fn test_indentation() {
let r = collect_result(
"
+
-
/
*
=
%
.
+=
",
);
assert_eq!(
&[
Token::Newline,
Token::Plus,
Token::Newline,
Token::Indent,
Token::Minus,
Token::Newline,
Token::Indent,
Token::Slash,
Token::Newline,
Token::Star,
Token::Newline,
Token::Dedent,
Token::Equal,
Token::Newline,
Token::Indent,
Token::Percent,
Token::Newline,
Token::Indent,
Token::Dot,
Token::Newline,
Token::Dedent,
Token::Dedent,
Token::Dedent,
Token::PlusEqual,
Token::Newline,
],
&r[..]
);
}
#[test]
fn test_symbols() {
let r = collect_result(
", ; : += -= *= /= //= %= == != <= >= ** = < > - + * % / // . { } [ ] ( ) |",
);
assert_eq!(
&[
Token::Comma,
Token::Semicolon,
Token::Colon,
Token::PlusEqual,
Token::MinusEqual,
Token::StarEqual,
Token::SlashEqual,
Token::DoubleSlashEqual,
Token::PercentEqual,
Token::DoubleEqual,
Token::BangEqual,
Token::LowerEqual,
Token::GreaterEqual,
Token::Doublestar,
Token::Equal,
Token::LowerThan,
Token::GreaterThan,
Token::Minus,
Token::Plus,
Token::Star,
Token::Percent,
Token::Slash,
Token::DoubleSlash,
Token::Dot,
Token::OpeningCurlyBracket,
Token::ClosingCurlyBracket,
Token::OpeningBracket,
Token::ClosingBracket,
Token::OpeningParenthesis,
Token::ClosingParenthesis,
Token::Pipe,
Token::Newline,
],
&r[..]
);
}
#[test]
fn test_keywords() {
let r = collect_result(
"and else load break for not not in continue if or def in pass elif return",
);
assert_eq!(
&[
Token::And,
Token::Else,
Token::Load,
Token::Break,
Token::For,
Token::Not,
Token::NotIn,
Token::Continue,
Token::If,
Token::Or,
Token::Def,
Token::In,
Token::Pass,
Token::Elif,
Token::Return,
Token::Newline,
],
&r[..]
);
}
// Regression test for https://github.com/google/starlark-rust/issues/44.
#[test]
fn test_number_collated_with_keywords_or_identifier() {
let r = collect_result(
"0in 1and 2else 3load 4break 5for 6not 7not in 8continue 10identifier11",
);
assert_eq!(
&[
Token::IntegerLiteral(0),
Token::In,
Token::IntegerLiteral(1),
Token::And,
Token::IntegerLiteral(2),
Token::Else,
Token::IntegerLiteral(3),
Token::Load,
Token::IntegerLiteral(4),
Token::Break,
Token::IntegerLiteral(5),
Token::For,
Token::IntegerLiteral(6),
Token::Not,
Token::IntegerLiteral(7),
Token::NotIn,
Token::IntegerLiteral(8),
Token::Continue,
Token::IntegerLiteral(10),
Token::Identifier("identifier11".to_owned()),
Token::Newline,
],
&r[..]
);
}
#[test]
fn test_reserved() {
let r = collect_result(
"as import assert is class nonlocal del raise except try finally \
while from with global yield",
);
assert_eq!(
&[
Token::Reserved("as".to_owned()),
Token::Reserved("import".to_owned()),
Token::Reserved("assert".to_owned()),
Token::Reserved("is".to_owned()),
Token::Reserved("class".to_owned()),
Token::Reserved("nonlocal".to_owned()),
Token::Reserved("del".to_owned()),
Token::Reserved("raise".to_owned()),
Token::Reserved("except".to_owned()),
Token::Reserved("try".to_owned()),
Token::Reserved("finally".to_owned()),
Token::Reserved("while".to_owned()),
Token::Reserved("from".to_owned()),
Token::Reserved("with".to_owned()),
Token::Reserved("global".to_owned()),
Token::Reserved("yield".to_owned()),
Token::Newline,
],
&r[..]
);
}
#[test]
fn test_comment() {
// Comment should be ignored
assert!(collect_result("# a comment\n").is_empty());
assert!(collect_result(" # a comment\n").is_empty());
let r = collect_result("a # a comment\n");
assert_eq!(&[Token::Identifier("a".to_owned()), Token::Newline], &r[..]);
// But it should not eat everything
let r = collect_result("[\n# a comment\n]");
assert_eq!(
&[Token::OpeningBracket, Token::ClosingBracket, Token::Newline],
&r[..]
);
}
#[test]
fn test_identifier() {
let r = collect_result("a identifier CAPS _CAPS _0123");
assert_eq!(
&[
Token::Identifier("a".to_owned()),
Token::Identifier("identifier".to_owned()),
Token::Identifier("CAPS".to_owned()),
Token::Identifier("_CAPS".to_owned()),
Token::Identifier("_0123".to_owned()),
Token::Newline,
],
&r[..]
);
}
#[test]
fn test_string_lit() {
let r = collect_result("'123' \"123\" '' \"\" '\\'' \"\\\"\" '\"' \"'\" '\\n' '\\w'");
assert_eq!(
&[
Token::StringLiteral("123".to_owned()),
Token::StringLiteral("123".to_owned()),
Token::StringLiteral("".to_owned()),
Token::StringLiteral("".to_owned()),
Token::StringLiteral("'".to_owned()),
Token::StringLiteral("\"".to_owned()),
Token::StringLiteral("\"".to_owned()),
Token::StringLiteral("'".to_owned()),
Token::StringLiteral("\n".to_owned()),
Token::StringLiteral("\\w".to_owned()),
Token::Newline,
],
&r[..]
);
// unfinished string literal
assert_eq!(
super::Lexer::new("'\n'").next().unwrap(),
Err(super::LexerError::UnfinishedStringLiteral(0, 1))
);
assert_eq!(
super::Lexer::new("\"\n\"").next().unwrap(),
Err(super::LexerError::UnfinishedStringLiteral(0, 1))
);
// Multiline string
let r =
collect_result("'''''' '''\\n''' '''\n''' \"\"\"\"\"\" \"\"\"\\n\"\"\" \"\"\"\n\"\"\"");
assert_eq!(
&[
Token::StringLiteral("".to_owned()),
Token::StringLiteral("\n".to_owned()),
Token::StringLiteral("\n".to_owned()),
Token::StringLiteral("".to_owned()),
Token::StringLiteral("\n".to_owned()),
Token::StringLiteral("\n".to_owned()),
Token::Newline,
],
&r[..]
);
// Raw string
let r = collect_result("r'' r\"\" r'\\'' r\"\\\"\" r'\"' r\"'\" r'\\n'");
assert_eq!(
&[
Token::StringLiteral("".to_owned()),
Token::StringLiteral("".to_owned()),
Token::StringLiteral("'".to_owned()),
Token::StringLiteral("\"".to_owned()),
Token::StringLiteral("\"".to_owned()),
Token::StringLiteral("'".to_owned()),
Token::StringLiteral("\\n".to_owned()),
Token::Newline,
],
&r[..]
);
let r = collect_result(r#""""foo"bar""""#);
assert_eq!(
&[Token::StringLiteral("foo\"bar".to_owned()), Token::Newline],
&r[..]
);
let r = collect_result(r#""""foo'bar""""#);
assert_eq!(
&[Token::StringLiteral("foo\'bar".to_owned()), Token::Newline],
&r[..]
);
let r = collect_result(r#"'''foo'bar'''"#);
assert_eq!(
&[Token::StringLiteral("foo\'bar".to_owned()), Token::Newline],
&r[..]
);
let r = collect_result(r#"'''foo\"bar'''"#);
assert_eq!(
&[Token::StringLiteral("foo\"bar".to_owned()), Token::Newline],
&r[..]
);
}
#[test]
fn test_simple_example() {
let r = collect_result(
"\"\"\"A docstring.\"\"\"
def _impl(ctx):
# Print Hello, World!
print('Hello, World!')
",
);
assert_eq!(
&[
Token::StringLiteral("A docstring.".to_owned()),
Token::Newline,
Token::Newline,
Token::Def,
Token::Identifier("_impl".to_owned()),
Token::OpeningParenthesis,
Token::Identifier("ctx".to_owned()),
Token::ClosingParenthesis,
Token::Colon,
Token::Newline,
Token::Indent,
Token::Identifier("print".to_owned()),
Token::OpeningParenthesis,
Token::StringLiteral("Hello, World!".to_owned()),
Token::ClosingParenthesis,
Token::Newline,
Token::Dedent,
],
&r[..]
);
}
#[test]
fn test_escape_newline() {
let r = collect_result("a \\\nb");
assert_eq!(
&[
Token::Identifier("a".to_owned()),
Token::Identifier("b".to_owned()),
Token::Newline,
],
&r[..]
);
}
#[test]
fn test_span() {
let expected = vec![
(0, Token::Newline, 1),
(1, Token::Def, 4),
(5, Token::Identifier("test".to_owned()), 9),
(9, Token::OpeningParenthesis, 10),
(10, Token::Identifier("a".to_owned()), 11),
(11, Token::ClosingParenthesis, 12),
(12, Token::Colon, 13),
(13, Token::Newline, 14),
(14, Token::Indent, 16),
(16, Token::Identifier("fail".to_owned()), 20),
(20, Token::OpeningParenthesis, 21),
(21, Token::Identifier("a".to_owned()), 22),
(22, Token::ClosingParenthesis, 23),
(23, Token::Newline, 24),
(24, Token::Newline, 25),
(25, Token::Dedent, 25),
(25, Token::Identifier("test".to_owned()), 29),
(29, Token::OpeningParenthesis, 30),
(30, Token::StringLiteral("abc".to_owned()), 35),
(35, Token::ClosingParenthesis, 36),
(36, Token::Newline, 37),
];
let actual: Vec<(u64, Token, u64)> = super::Lexer::new(
r#"
def test(a):
fail(a)
test("abc")
"#,
)
.map(Result::unwrap)
.collect();
assert_eq!(expected, actual);
}
#[test]
fn test_buffered() {
let r = collect_result_buffered(vec!["\"\"\"A docstring.\"\"\"\n"]);
assert_eq!(
&[
Token::StringLiteral("A docstring.".to_owned()),
Token::Newline,
],
&r[..]
);
let r = collect_result_buffered(vec!["\n"]);
assert_eq!(&[Token::Newline], &r[..]);
let r = collect_result_buffered(vec![
"def _impl(ctx):\n",
" # Print Hello, World!\n",
" print('Hello, World!')\n",
"\n",
]);
assert_eq!(
&[
Token::Def,
Token::Identifier("_impl".to_owned()),
Token::OpeningParenthesis,
Token::Identifier("ctx".to_owned()),
Token::ClosingParenthesis,
Token::Colon,
Token::Newline,
Token::Indent,
Token::Identifier("print".to_owned()),
Token::OpeningParenthesis,
Token::StringLiteral("Hello, World!".to_owned()),
Token::ClosingParenthesis,
Token::Newline,
Token::Newline,
Token::Dedent,
],
&r[..]
);
}
#[test]
fn smoke_test() {
let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
let codemap = Arc::new(Mutex::new(codemap::CodeMap::new()));
let mut diagnostics = Vec::new();
d.push("src/syntax/testcases");
let paths = fs::read_dir(d.as_path()).unwrap();
for p in paths {
let entry = p.unwrap();
let filename = entry.file_name().into_string().unwrap();
if filename.ends_with(".bzl") {
let mut content = String::new();
let mut file = File::open(entry.path()).unwrap();
file.read_to_string(&mut content).unwrap();
let file_span = {
codemap
.lock()
.unwrap()
.add_file(filename, content.clone())
.span
};
super::Lexer::new(&content).for_each(|x| {
if x.is_err() {
diagnostics.push(x.err().unwrap().to_diagnostic(file_span));
}
});
}
}
assert_diagnostics!(diagnostics, codemap);
}
}
| {
"pile_set_name": "Github"
} |
The best baselines are obtainable via the following configuration:
## MNIST => MNIST_M
Accuracy:
MNIST-Train: 99.9
MNIST_M-Train: 63.9
MNIST_M-Valid: 63.9
MNIST_M-Test: 63.6
Learning Rate = 0.0001
Weight Decay = 0.0
Number of Steps: 105,000
## MNIST => USPS
Accuracy:
MNIST-Train: 100.0
USPS-Train: 82.8
USPS-Valid: 82.8
USPS-Test: 78.9
Learning Rate = 0.0001
Weight Decay = 0.0
Number of Steps: 22,000
## MNIST_M => MNIST
Accuracy:
MNIST_M-Train: 100
MNIST-Train: 98.5
MNIST-Valid: 98.5
MNIST-Test: 98.1
Learning Rate = 0.001
Weight Decay = 0.0
Number of Steps: 604,400
## MNIST_M => MNIST_M
Accuracy:
MNIST_M-Train: 100.0
MNIST_M-Valid: 96.6
MNIST_M-Test: 96.4
Learning Rate = 0.001
Weight Decay = 0.0
Number of Steps: 139,400
## USPS => USPS
Accuracy:
USPS-Train: 100.0
USPS-Valid: 100.0
USPS-Test: 96.5
Learning Rate = 0.001
Weight Decay = 0.0
Number of Steps: 67,000
| {
"pile_set_name": "Github"
} |
#include "PreloadInjector.h"
#include "lib/InspectorServer.h"
#include <QtCore/QDebug>
#include <QtCore/QFileInfo>
#include <QtCore/QProcess>
#include <QtCore/QProcessEnvironment>
#include <unistd.h>
bool PreloadInjector::startAndInject(const QString& program, const QStringList& args,
const QString& libraryPath, const QString& entryPoint, int* pid)
{
*pid = 0;
QProcess* process = new QProcess;
QObject::connect(process, SIGNAL(finished(int,QProcess::ExitStatus)), process, SLOT(deleteLater()));
process->setProcessChannelMode(QProcess::ForwardedChannels);
QProcessEnvironment env = QProcessEnvironment::systemEnvironment();
QString libPath = QFileInfo(libraryPath).absoluteFilePath();
if (!QFile::exists(libPath))
{
return false;
}
#ifdef Q_OS_MAC
QString var = "DYLD_INSERT_LIBRARIES";
QStringList currentLibs = env.value(var).split(':', QString::SkipEmptyParts);
currentLibs.prepend(libraryPath);
env.insert(var, currentLibs.join(":"));
#elif defined(Q_OS_LINUX)
QString var = "LD_PRELOAD";
QStringList currentLibs = env.value(var).split(' ', QString::SkipEmptyParts);
currentLibs.prepend(libraryPath);
env.insert(var, currentLibs.join(" "));
#else
#error Platform not supported
#endif
process->setProcessEnvironment(env);
process->start(program, args);
if (!process->waitForStarted())
{
qWarning() << "Failed to start" << program;
return false;
}
*pid = process->pid();
QString socketPath = InspectorServer::socketName(*pid);
while (!QFile::exists(socketPath))
{
#ifdef Q_OS_UNIX
usleep(100 * 1000);
#endif
}
return true;
}
bool PreloadInjector::inject(int pid, const QString& libraryPath, const QString& entryPoint)
{
Q_UNUSED(pid);
Q_UNUSED(libraryPath);
Q_UNUSED(entryPoint);
return false;
}
| {
"pile_set_name": "Github"
} |
EESchema-DOCLIB Version 2.0
#
#End Doc Library
| {
"pile_set_name": "Github"
} |
#! /bin/sh
set -m
id=$1
python eval.py --image_root /share/data/vision-greg/coco/ --batch_size 100 --dump_images 0 --num_images -1 --split test --model log_$id/model-best.pth --language_eval 0 --beam_size 5 --sample_n $4 --temperature $2 --sample_method greedy --sample_n_method top$3 --infos_path log_$id/infos_$id-best.pkl --id $5$id"_tk_"$2_$3_$4
| {
"pile_set_name": "Github"
} |
using NUnit.Allure.Attributes;
using NUnit.Allure.Core;
using NUnit.Framework;
namespace TestAutomationReportingAllure
{
[TestFixture]
[AllureNUnit]
[AllureSuite("CalculatorTests")]
[AllureDisplayIgnored]
class CalculatorMultipleTests
{
[Test(Description = "Performing Division on two float variables. ")]
[AllureTag("CI")]
[AllureOwner("Anton")]
[AllureSubSuite("Division")]
public void ThrowException_When_DivisionOn0()
{
var calculator = new Calculator();
float actualResult = calculator.Division(2, 0);
Assert.AreEqual(4, actualResult);
}
[Test(Description = "Performing Division on two float variables. ")]
[AllureTag("CI")]
[AllureOwner("Anton")]
[AllureSubSuite("Division")]
public void Return2_When_Division2On1()
{
var calculator = new Calculator();
float actualResult = calculator.Division(2, 1);
Assert.AreEqual(4, actualResult);
}
}
}
| {
"pile_set_name": "Github"
} |
{
"images" : [
{
"idiom" : "universal",
"filename" : "cm2_btm_icn_music.png",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "[email protected]",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "[email protected]",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
},
"properties" : {
"template-rendering-intent" : "original"
}
} | {
"pile_set_name": "Github"
} |
/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1
import (
api "k8s.io/client-go/pkg/api"
v1 "k8s.io/client-go/pkg/api/v1"
watch "k8s.io/client-go/pkg/watch"
rest "k8s.io/client-go/rest"
)
// ConfigMapsGetter has a method to return a ConfigMapInterface.
// A group's client should implement this interface.
type ConfigMapsGetter interface {
ConfigMaps(namespace string) ConfigMapInterface
}
// ConfigMapInterface has methods to work with ConfigMap resources.
type ConfigMapInterface interface {
Create(*v1.ConfigMap) (*v1.ConfigMap, error)
Update(*v1.ConfigMap) (*v1.ConfigMap, error)
Delete(name string, options *v1.DeleteOptions) error
DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error
Get(name string) (*v1.ConfigMap, error)
List(opts v1.ListOptions) (*v1.ConfigMapList, error)
Watch(opts v1.ListOptions) (watch.Interface, error)
Patch(name string, pt api.PatchType, data []byte, subresources ...string) (result *v1.ConfigMap, err error)
ConfigMapExpansion
}
// configMaps implements ConfigMapInterface
type configMaps struct {
client rest.Interface
ns string
}
// newConfigMaps returns a ConfigMaps
func newConfigMaps(c *CoreV1Client, namespace string) *configMaps {
return &configMaps{
client: c.RESTClient(),
ns: namespace,
}
}
// Create takes the representation of a configMap and creates it. Returns the server's representation of the configMap, and an error, if there is any.
func (c *configMaps) Create(configMap *v1.ConfigMap) (result *v1.ConfigMap, err error) {
result = &v1.ConfigMap{}
err = c.client.Post().
Namespace(c.ns).
Resource("configmaps").
Body(configMap).
Do().
Into(result)
return
}
// Update takes the representation of a configMap and updates it. Returns the server's representation of the configMap, and an error, if there is any.
func (c *configMaps) Update(configMap *v1.ConfigMap) (result *v1.ConfigMap, err error) {
result = &v1.ConfigMap{}
err = c.client.Put().
Namespace(c.ns).
Resource("configmaps").
Name(configMap.Name).
Body(configMap).
Do().
Into(result)
return
}
// Delete takes name of the configMap and deletes it. Returns an error if one occurs.
func (c *configMaps) Delete(name string, options *v1.DeleteOptions) error {
return c.client.Delete().
Namespace(c.ns).
Resource("configmaps").
Name(name).
Body(options).
Do().
Error()
}
// DeleteCollection deletes a collection of objects.
func (c *configMaps) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {
return c.client.Delete().
Namespace(c.ns).
Resource("configmaps").
VersionedParams(&listOptions, api.ParameterCodec).
Body(options).
Do().
Error()
}
// Get takes name of the configMap, and returns the corresponding configMap object, and an error if there is any.
func (c *configMaps) Get(name string) (result *v1.ConfigMap, err error) {
result = &v1.ConfigMap{}
err = c.client.Get().
Namespace(c.ns).
Resource("configmaps").
Name(name).
Do().
Into(result)
return
}
// List takes label and field selectors, and returns the list of ConfigMaps that match those selectors.
func (c *configMaps) List(opts v1.ListOptions) (result *v1.ConfigMapList, err error) {
result = &v1.ConfigMapList{}
err = c.client.Get().
Namespace(c.ns).
Resource("configmaps").
VersionedParams(&opts, api.ParameterCodec).
Do().
Into(result)
return
}
// Watch returns a watch.Interface that watches the requested configMaps.
func (c *configMaps) Watch(opts v1.ListOptions) (watch.Interface, error) {
return c.client.Get().
Prefix("watch").
Namespace(c.ns).
Resource("configmaps").
VersionedParams(&opts, api.ParameterCodec).
Watch()
}
// Patch applies the patch and returns the patched configMap.
func (c *configMaps) Patch(name string, pt api.PatchType, data []byte, subresources ...string) (result *v1.ConfigMap, err error) {
result = &v1.ConfigMap{}
err = c.client.Patch(pt).
Namespace(c.ns).
Resource("configmaps").
SubResource(subresources...).
Name(name).
Body(data).
Do().
Into(result)
return
}
| {
"pile_set_name": "Github"
} |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in the project root for license information.
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.Threading.Tasks;
using Microsoft.Common.Core.Services;
using Microsoft.Common.Core.Test.Utility;
using Microsoft.Languages.Editor.Test.Utility;
using Microsoft.Markdown.Editor.ContentTypes;
using Microsoft.UnitTests.Core.XUnit;
using Xunit;
namespace Microsoft.R.Editor.Application.Test.Markdown {
[ExcludeFromCodeCoverage]
[Collection(CollectionNames.NonParallel)]
public class RmdClassificationTest {
private static bool _regenerateBaselineFiles = false;
private readonly IServiceContainer _services;
private readonly EditorHostMethodFixture _editorHost;
private readonly EditorAppTestFilesFixture _files;
public RmdClassificationTest(IServiceContainer services, EditorHostMethodFixture editorHost, EditorAppTestFilesFixture files) {
_services = services;
_editorHost = editorHost;
_files = files;
}
[CompositeTest]
[Category.Interactive]
[InlineData("01.rmd")]
public async Task RColors(string fileName) {
var content = _files.LoadDestinationFile(fileName);
using (var script = await _editorHost.StartScript(_services, content, fileName, MdContentTypeDefinition.ContentType, null)) {
script.DoIdle(500);
var spans = script.GetClassificationSpans();
var actual = ClassificationWriter.WriteClassifications(spans);
VerifyClassifications(fileName, actual);
}
}
private void VerifyClassifications(string testFileName, string actual) {
var testFilePath = _files.GetDestinationPath(testFileName);
var baselineFile = testFilePath + ".colors";
if (_regenerateBaselineFiles) {
baselineFile = Path.Combine(_files.SourcePath, testFileName) + ".colors";
TestFiles.UpdateBaseline(baselineFile, actual);
} else {
TestFiles.CompareToBaseLine(baselineFile, actual);
}
}
}
}
| {
"pile_set_name": "Github"
} |
(*** hide ***)
#r "netstandard"
#I "../../bin/net45"
#load "Deedle.fsx"
#I "../../packages/MathNet.Numerics/lib/net40"
#load "../../packages/FSharp.Charting/FSharp.Charting.fsx"
open System
open FSharp.Data
open Deedle
open FSharp.Charting
let root = __SOURCE_DIRECTORY__ + "/data/"
(**
Working with series and time series data in F#
==============================================
In this section, we look at F# data frame library features that are useful when working
with time series data or, more generally, any ordered series. Although we mainly look at
operations on the `Series` type, many of the operations can be applied to data frame `Frame`
containing multiple series. Furthermore, data frame provides an elegant way for aligning and
joining series.
You can also get this page as an [F# script file](https://github.com/fslaborg/Deedle/blob/master/docs/content/series.fsx)
from GitHub and run the samples interactively.
Generating input data
---------------------
For the purpose of this tutorial, we'll need some input data. For simplicitly, we use the
following function which generates random prices using the geometric Brownian motion.
The code is adapted from the [financial tutorial on Try F#](http://www.tryfsharp.org/Learn/financial-computing#simulating-and-analyzing).
*)
// Use Math.NET for probability distributions
#r "MathNet.Numerics.dll"
open MathNet.Numerics.Distributions
/// Generates price using geometric Brownian motion
/// - 'seed' specifies the seed for random number generator
/// - 'drift' and 'volatility' set properties of the price movement
/// - 'initial' and 'start' specify the initial price and date
/// - 'span' specifies time span between individual observations
/// - 'count' is the number of required values to generate
let randomPrice seed drift volatility initial start span count =
(*[omit:(Implementation omitted)]*)
let dist = Normal(0.0, 1.0, RandomSource=Random(seed))
let dt = (span:TimeSpan).TotalDays / 250.0
let driftExp = (drift - 0.5 * pown volatility 2) * dt
let randExp = volatility * (sqrt dt)
((start:DateTimeOffset), initial) |> Seq.unfold (fun (dt, price) ->
let price = price * exp (driftExp + randExp * dist.Sample())
Some((dt, price), (dt + span, price))) |> Seq.take count(*[/omit]*)
// 12:00 AM today, in current time zone
let today = DateTimeOffset(DateTime.Today)
let stock1 = randomPrice 1 0.1 3.0 20.0 today
let stock2 = randomPrice 2 0.2 1.5 22.0 today
(**
The implementation of the function is not particularly important for the purpose of this
page, but you can find it in the [script file with full source](https://github.com/fslaborg/Deedle/blob/master/docs/content/series.fsx).
Once we have the function, we define a date `today` (representing today's midnight) and
two helper functions that set basic properties for the `randomPrice` function.
To get random prices, we now only need to call `stock1` or `stock2` with `TimeSpan` and
the required number of prices:
*)
(*** define-output: stocks ***)
Chart.Combine
[ stock1 (TimeSpan(0, 1, 0)) 1000 |> Chart.FastLine
stock2 (TimeSpan(0, 1, 0)) 1000 |> Chart.FastLine ]
(**
The above snippet generates 1k of prices in one minute intervals and plots them using the
[F# Charting library](https://github.com/fsharp/FSharp.Charting). When you run the code
and tweak the chart look, you should see something like this:
*)
(*** include-it: stocks ***)
(**
<a name="alignment"></a>
Data alignment and zipping
--------------------------
One of the key features of the data frame library for working with time series data is
_automatic alignment_ based on the keys. When we have multiple time series with date
as the key (here, we use `DateTimeOffset`, but any type of date will do), we can combine
multiple series and align them automatically to specified date keys.
To demonstrate this feature, we generate random prices in 60 minute, 30 minute and
65 minute intervals:
*)
let s1 = stock1 (TimeSpan(1, 0, 0)) 6 |> series
// [fsi:val s1 : Series<DateTimeOffset,float> =]
// [fsi: series [ 12:00:00 AM => 20.76; 1:00:00 AM => 21.11; 2:00:00 AM => 22.51 ]
// [fsi: 3:00:00 AM => 23.88; 4:00:00 AM => 23.23; 5:00:00 AM => 22.68 ] ]
let s2 =stock2 (TimeSpan(0, 30, 0)) 12 |> series
// [fsi:val s2 : Series<DateTimeOffset,float> =]
// [fsi: series [ 12:00:00 AM => 21.61; 12:30:00 AM => 21.64; 1:00:00 AM => 21.86 ]
// [fsi: 1:30:00 AM => 22.22; 2:00:00 AM => 22.35; 2:30:00 AM => 22.76 ]
// [fsi: 3:00:00 AM => 22.68; 3:30:00 AM => 22.64; 4:00:00 AM => 22.90 ]
// [fsi: 4:30:00 AM => 23.40; 5:00:00 AM => 23.33; 5:30:00 AM => 23.43] ]
let s3 = stock1 (TimeSpan(1, 5, 0)) 6 |> series
// [fsi:val s3 : Series<DateTimeOffset,float> =]
// [fsi: series [ 12:00:00 AM => 21.37; 1:05:00 AM => 22.73; 2:10:00 AM => 22.08 ]
// [fsi: 3:15:00 AM => 23.92; 4:20:00 AM => 22.72; 5:25:00 AM => 22.79 ]
(**
### Zipping time series
Let's first look at operations that are available on the `Series<K, V>` type. A series
exposes `Zip` operation that can combine multiple series into a single series of pairs.
This is not as convenient as working with data frames (which we'll see later), but it
is useful if you only need to work with one or two columns without missing values:
*)
// Match values from right series to keys of the left one
// (this creates series with no missing values)
s1.Zip(s2, JoinKind.Left)
// [fsi:val it : Series<DateTimeOffset,float opt * float opt>]
// [fsi: 12:00:00 AM -> (21.32, 21.61) ]
// [fsi: 1:00:00 AM -> (22.62, 21.86) ]
// [fsi: 2:00:00 AM -> (22.00, 22.35) ]
// [fsi: (...)]
// Match values from the left series to keys of the right one
// (right has higher resolution, so half of left values are missing)
s1.Zip(s2, JoinKind.Right)
// [fsi:val it : Series<DateTimeOffset,float opt * float opt>]
// [fsi: 12:00:00 AM -> (21.32, 21.61) ]
// [fsi: 12:30:00 AM -> (<missing>, 21.64) ]
// [fsi: 1:00:00 AM -> (22.62, 21.86) ]
// [fsi: (...)]
// Use left series key and find the nearest previous
// (smaller) value from the right series
s1.Zip(s2, JoinKind.Left, Lookup.ExactOrSmaller)
// [fsi:val it : Series<DateTimeOffset,float opt * float opt>]
// [fsi: 12:00:00 AM -04:00 -> (21.32, 21.61) ]
// [fsi: 1:00:00 AM -04:00 -> (22.62, 21.86) ]
// [fsi: 2:00:00 AM -04:00 -> (22.00, 22.35) ]
// [fsi: (...)]
(**
Using `Zip` on series is somewhat complicated. The result is a series of tuples, but each
component of the tuple may be missing. To represent this, the library uses the `T opt` type
(a type alias for `OptionalValue<T>`). This is not necessary when we use data frame to
work with multiple columns.
### Joining data frames
When we store data in data frames, we do not need to use tuples to represent combined values.
Instead, we can simply use data frame with multiple columns. To see how this works, let's first
create three data frames containing the three series from the previous section:
*)
// Contains value for each hour
let f1 = Frame.ofColumns ["S1" => s1]
// Contains value every 30 minutes
let f2 = Frame.ofColumns ["S2" => s2]
// Contains values with 65 minute offsets
let f3 = Frame.ofColumns ["S3" => s3]
(**
Similarly to `Series<K, V>`, the type `Frame<R, C>` has an instance method `Join` that can be
used for joining (for unordered) or aligning (for ordered) data. The same operation is also
exposed as `Frame.join` and `Frame.joinAlign` functions, but it is usually more convenient to use
the member syntax in this case:
*)
// Union keys from both frames and align corresponding values
f1.Join(f2, JoinKind.Outer)
// [fsi:val it : Frame<DateTimeOffset,string> =]
// [fsi: S1 S2 ]
// [fsi: 12:00:00 AM -> 21.32 21.61 ]
// [fsi: 12:30:00 AM -> <missing> 21.64 ]
// [fsi: 1:00:00 AM -> 22.62 21.86 ]
// [fsi: (...)]
// Take only keys where both frames contain all values
// (We get only a single row, because 'f3' is off by 5 minutes)
f2.Join(f3, JoinKind.Inner)
// [fsi:val it : Frame<DateTimeOffset,string> =]
// [fsi: S2 S3 ]
// [fsi: 12:00:00 AM -> 21.61 21.37 ]
// Take keys from the left frame and find corresponding values
// from the right frame, or value for a nearest smaller date
// ($21.37 is repeated for all values between 12:00 and 1:05)
f2.Join(f3, JoinKind.Left, Lookup.ExactOrSmaller)
// [fsi:val it : Frame<DateTimeOffset,string> =]
// [fsi: S2 S3 ]
// [fsi: 12:00:00 AM -> 21.61 21.37 ]
// [fsi: 12:30:00 AM -> 21.64 21.37 ]
// [fsi: 1:00:00 AM -> 21.86 21.37 ]
// [fsi: 1:30:00 AM -> 22.22 22.73 ]
// [fsi: (...)]
// If we perform left join as previously, but specify exact
// matching, then most of the values are missing
f2.Join(f3, JoinKind.Left, Lookup.Exact)
// [fsi:val it : Frame<DateTimeOffset,string> =]
// [fsi: S2 S3 ]
// [fsi: 12:00:00 AM -> 21.61 21.37]
// [fsi: 12:30:00 AM -> 21.64 <missing> ]
// [fsi: 1:00:00 AM -> 21.86 <missing> ]
// [fsi: (...)]
// Equivalent to line 2, using function syntax
Frame.join JoinKind.Outer f1 f2
// Equivalent to line 20, using function syntax
Frame.joinAlign JoinKind.Left Lookup.ExactOrSmaller f1 f2
(**
The automatic alignment is extremely useful when you have multiple data series with different
offsets between individual observations. You can choose your set of keys (dates) and then easily
align other data to match the keys. Another alternative to using `Join` explicitly is to create
a new frame with just keys that you are interested in (using `Frame.ofRowKeys`) and then use
the `AddSeries` member (or the `df?New <- s` syntax) to add series. This will automatically left
join the new series to match the current row keys.
When aligning data, you may or may not want to create data frame with missing values. If your
observations do not happen at exact time, then using `Lookup.ExactOrSmaller` or `Lookup.ExactOrGreater`
is a great way to avoid mismatch.
If you have observations that happen e.g. at two times faster rate (one series is hourly and
another is half-hourly), then you can create data frame with missing values using `Lookup.Exact`
(the default value) and then handle missing values explicitly (as [discussed here](frame.html#missing)).
<a name="windowing"></a>
Windowing, chunking and pairwise
--------------------------------
Windowing and chunking are two operations on ordered series that allow aggregating
the values of series into groups. Both of these operations work on consecutive elements,
which contrast with [grouping](tutorial.html#grouping) that does not use order.
### Sliding windows
Sliding window creates windows of certain size (or certain condition). The window
"slides" over the input series and provides a view on a part of the series. The
key thing is that a single element will typically appear in multiple windows.
*)
// Create input series with 6 observations
let lf = stock1 (TimeSpan(0, 1, 0)) 6 |> series
// Create series of series representing individual windows
lf |> Series.window 4
// Aggregate each window using 'Stats.mean'
lf |> Series.windowInto 4 Stats.mean
// Get first value in each window
lf |> Series.windowInto 4 Series.firstValue
(**
The functions used above create window of size 4 that moves from the left to right.
Given input `[1,2,3,4,5,6]` the this produces the following three windows:
`[1,2,3,4]`, `[2,3,4,5]` and `[3,4,5,6]`. By default, the `Series.window` function
automatically chooses the key of the last element of the window as the key for
the whole window (we'll see how to change this soon):
*)
// Calculate means for sliding windows
let lfm1 = lf |> Series.windowInto 4 Stats.mean
// Construct dataframe to show aligned results
Frame.ofColumns [ "Orig" => lf; "Means" => lfm1 ]
// [fsi:val it : Frame<DateTimeOffset,string> =]
// [fsi: Means Orig ]
// [fsi: 12:00:00 AM -> <missing> 20.16]
// [fsi: 12:01:00 AM -> <missing> 20.32]
// [fsi: 12:02:00 AM -> <missing> 20.25]
// [fsi: 12:03:00 AM -> 20.30 20.45]
// [fsi: 12:04:00 AM -> 20.34 20.32]
// [fsi: 12:05:00 AM -> 20.34 20.33]
(**
What if we want to avoid creating `<missing>` values? One approach is to
specify that we want to generate windows of smaller sizes at the beginning
or at the end of the beginning. This way, we get _incomplete_ windows that look as
`[1]`, `[1,2]`, `[1,2,3]` followed by the three _complete_ windows shown above:
*)
let lfm2 =
// Create sliding windows with incomplete windows at the beginning
lf |> Series.windowSizeInto (4, Boundary.AtBeginning) (fun ds ->
Stats.mean ds.Data)
Frame.ofColumns [ "Orig" => lf; "Means" => lfm2 ]
// [fsi:val it : Frame<DateTimeOffset,string> =]
// [fsi: Means Orig ]
// [fsi: 12:00:00 AM -> 20.16 20.16]
// [fsi: 12:01:00 AM -> 20.24 20.32]
// [fsi: 12:02:00 AM -> 20.24 20.25]
// [fsi: 12:03:00 AM -> 20.30 20.45]
// [fsi: 12:04:00 AM -> 20.34 20.32]
// [fsi: 12:05:00 AM -> 20.34 20.33]
(**
As you can see, the values in the first column are equal, because the first
`Mean` value is just the average of singleton series.
When you specify `Boundary.AtBeginning` (this example) or `Boundary.Skip`
(default value used in the previous example), the function uses the last key
of the window as the key of the aggregated value. When you specify
`Boundary.AtEnding`, the first key is used, so the values can be nicely
aligned with original values. When you want to specify custom key selector,
you can use a more general function `Series.aggregate`.
In the previous sample, the code that performs aggregation is no longer
just a simple function like `Stats.mean`, but a lambda that takes `ds`,
which is of type `DataSegment<T>`. This type informs us whether the window
is complete or not. For example:
*)
// Simple series with characters
let st = Series.ofValues [ 'a' .. 'e' ]
st |> Series.windowSizeInto (3, Boundary.AtEnding) (function
| DataSegment.Complete(ser) ->
// Return complete windows as uppercase strings
String(ser |> Series.values |> Array.ofSeq).ToUpper()
| DataSegment.Incomplete(ser) ->
// Return incomplete windows as padded lowercase strings
String(ser |> Series.values |> Array.ofSeq).PadRight(3, '-') )
// [fsi:val it : Series<int,string> =]
// [fsi: 0 -> ABC ]
// [fsi: 1 -> BCD ]
// [fsi: 2 -> CDE ]
// [fsi: 3 -> de- ]
// [fsi: 4 -> e-- ]
(**
### Window size conditions
The previous examples generated windows of fixed size. However, there are two other
options for specifying when a window ends.
- The first option is to specify the maximal
_distance_ between the first and the last key
- The second option is to specify a function that is called with the first
and the last key; a window ends when the function returns false.
The two functions are `Series.windowDist` and `Series.windowWhile` (together
with versions suffixed with `Into` that call a provided function to aggregate
each window):
*)
// Generate prices for each hour over 30 days
let hourly = stock1 (TimeSpan(1, 0, 0)) (30*24) |> series
// Generate windows of size 1 day (if the source was
// irregular, windows would have varying size)
hourly |> Series.windowDist (TimeSpan(24, 0, 0))
// Generate windows such that date in each window is the same
// (windows start every hour and end at the end of the day)
hourly |> Series.windowWhile (fun d1 d2 -> d1.Date = d2.Date)
(**
### Chunking series
Chunking is similar to windowing, but it creates non-overlapping chunks,
rather than (overlapping) sliding windows. The size of chunk can be specified
in the same three ways as for sliding windows (fixed size, distance on keys
and condition):
*)
// Generate per-second observations over 10 minutes
let hf = stock1 (TimeSpan(0, 0, 1)) 600 |> series
// Create 10 second chunks with (possible) incomplete
// chunk of smaller size at the end.
hf |> Series.chunkSize (10, Boundary.AtEnding)
// Create 10 second chunks using time span and get
// the first observation for each chunk (downsample)
hf |> Series.chunkDistInto (TimeSpan(0, 0, 10)) Series.firstValue
// Create chunks where hh:mm component is the same
// (containing observations for all seconds in the minute)
hf |> Series.chunkWhile (fun k1 k2 ->
(k1.Hour, k1.Minute) = (k2.Hour, k2.Minute))
(**
The above examples use various chunking functions in a very similar way, mainly
because the randomly generated input is very uniform. However, they all behave
differently for inputs with non-uniform keys.
Using `chunkSize` means that the chunks have the same size, but may correspond
to time series of different time spans. Using `chunkDist` guarantees that there
is a maximal time span over each chunk, but it does not guarantee when a chunk
starts. That is something which can be achieved using `chunkWhile`.
Finally, all of the aggregations discussed so far are just special cases of
`Series.aggregate` which takes a discriminated union that specifies the kind
of aggregation ([see API reference](reference/fsharp-dataframe-aggregation-1.html)).
However, in practice it is more convenient to use the helpers presented here -
in some rare cases, you might need to use `Series.aggregate` as it provides
a few other options.
### Pairwise
A special form of windowing is building a series of pairs containing a current
and previous value from the input series (in other words, the key for each pair
is the key of the later element). For example:
*)
// Create a series of pairs from earlier 'hf' input
hf |> Series.pairwise
// Calculate differences between the current and previous values
hf |> Series.pairwiseWith (fun k (v1, v2) -> v2 - v1)
(**
The `pairwise` operation always returns a series that has no value for
the first key in the input series. If you want more complex behavior, you
will usually need to replace `pairwise` with `window`. For example, you might
want to get a series that contains the first value as the first element,
followed by differences. This has the nice property that summing rows,
starting from the first one gives you the current price:
*)
// Sliding window with incomplete segment at the beginning
hf |> Series.windowSizeInto (2, Boundary.AtBeginning) (function
// Return the first value for the first segment
| DataSegment.Incomplete s -> s.GetAt(0)
// Calculate difference for all later segments
| DataSegment.Complete s -> s.GetAt(1) - s.GetAt(0))
(**
<a name="sampling"></a>
Sampling and resampling time series
-----------------------------------
Given a time series with high-frequency prices, sampling or resampling makes
it possible to get time series with representative values at lower frequency.
The library uses the following terminology:
- **Lookup** means that we find values at specified key; if a key is not
available, we can look for value associated with the nearest smaller or
the nearest greater key.
- **Resampling** means that we aggregate values values into chunks based
on a specified collection of keys (e.g. explicitly provided times), or
based on some relation between keys (e.g. date times having the same date).
- **Uniform resampling** is similar to resampling, but we specify keys by
providing functions that generate a uniform sequence of keys (e.g. days),
the operation also fills value for days that have no corresponding
observations in the input sequence.
Finally, the library also provides a few helper functions that are specifically
desinged for series with keys of types `DateTime` and `DateTimeOffset`.
### Lookup
Given a series `hf`, you can get a value at a specified key using `hf.Get(key)`
or using `hf |> Series.get key`. However, it is also possible to find values
for larger number of keys at once. The instance member for doing this
is `hf.GetItems(..)`. Moreover, both `Get` and `GetItems` take an optional
parameter that specifies the behavior when the exact key is not found.
Using the function syntax, you can use `Series.getAll` for exact key
lookup and `Series.lookupAll` when you want more flexible lookup:
*)
// Generate a bit less than 24 hours of data with 13.7sec offsets
let mf = stock1 (TimeSpan.FromSeconds(13.7)) 6300 |> series
// Generate keys for all minutes in 24 hours
let keys = [ for m in 0.0 .. 24.0*60.0-1.0 -> today.AddMinutes(m) ]
// Find value for a given key, or nearest greater key with value
mf |> Series.lookupAll keys Lookup.ExactOrGreater
// [fsi:val it : Series<DateTimeOffset,float> =]
// [fsi: 12:00:00 AM -> 20.07 ]
// [fsi: 12:01:00 AM -> 19.98 ]
// [fsi: ... -> ... ]
// [fsi: 11:58:00 PM -> 19.03 ]
// [fsi: 11:59:00 PM -> <missing> ]
// Find value for nearest smaller key
// (This returns value for 11:59:00 PM as well)
mf |> Series.lookupAll keys Lookup.ExactOrSmaller
// Find values for exact key
// (This only works for the first key)
mf |> Series.lookupAll keys Lookup.Exact
(**
Lookup operations only return one value for each key, so they are useful for
quick sampling of large (or high-frequency) data. When we want to calculate
a new value based on multiple values, we need to use resampling.
### Resampling
Series supports two kinds of resamplings. The first kind is similar to lookup
in that we have to explicitly specify keys. The difference is that resampling
does not find just the nearest key, but all smaller or greater keys. For example:
*)
// For each key, collect values for greater keys until the
// next one (chunk for 11:59:00 PM is empty)
mf |> Series.resample keys Direction.Forward
// For each key, collect values for smaller keys until the
// previous one (the first chunk will be singleton series)
mf |> Series.resample keys Direction.Backward
// Aggregate each chunk of preceding values using mean
mf |> Series.resampleInto keys Direction.Backward
(fun k s -> Stats.mean s)
// Resampling is also available via the member syntax
mf.Resample(keys, Direction.Forward)
(**
The second kind of resampling is based on a projection from existing keys in
the series. The operation then collects chunks such that the projection returns
equal keys. This is very similar to `Series.groupBy`, but resampling assumes
that the projection preserves the ordering of the keys, and so it only aggregates
consequent keys.
The typical scenario is when you have time series with date time information
(here `DateTimeOffset`) and want to get information for each day (we use
`DateTime` with empty time to represent dates):
*)
// Generate 2.5 months of data in 1.7 hour offsets
let ds = stock1 (TimeSpan.FromHours(1.7)) 1000 |> series
// Sample by day (of type 'DateTime')
ds |> Series.resampleEquiv (fun d -> d.Date)
// Sample by day (of type 'DateTime')
ds.ResampleEquivalence(fun d -> d.Date)
(**
The same operation can be easily implemented using `Series.chunkWhile`, but as
it is often used in the context of sampling, it is included in the library as a
primitive. Moreover, we'll see that it is closely related to uniform resampling.
Note that the resulting series has different type of keys than the source. The
source has keys `DateTimeOffset` (representing date with time) while the resulting
keys are of the type returned by the projection (here, `DateTime` representing just
dates).
### Uniform resampling
In the previous section, we looked at `resampleEquiv`, which is useful if you want
to sample time series by keys with "lower resolution" - for example, sample date time
observations by date. However, the function discussed in the previous section only
generates values for which there are keys in the input sequence - if there is no
observation for an entire day, then the day will not be included in the result.
If you want to create sampling that assigns value to each key in the range specified
by the input sequence, then you can use _uniform resampling_.
The idea is that uniform resampling applies the key projection to the smallest and
greatest key of the input (e.g. gets date of the first and last observation) and then
it generates all keys in the projected space (e.g. all dates). Then it picks the
best value for each of the generated key.
*)
// Create input data with non-uniformly distributed keys
// (1 value for 10/3, three for 10/4 and two for 10/6)
let days =
[ "10/3/2013 12:00:00"; "10/4/2013 15:00:00"
"10/4/2013 18:00:00"; "10/4/2013 19:00:00"
"10/6/2013 15:00:00"; "10/6/2013 21:00:00" ]
let nu =
stock1 (TimeSpan(24,0,0)) 10 |> series
|> Series.indexWith days |> Series.mapKeys DateTimeOffset.Parse
// Generate uniform resampling based on dates. Fill
// missing chunks with nearest smaller observations.
let sampled =
nu |> Series.resampleUniform Lookup.ExactOrSmaller
(fun dt -> dt.Date) (fun dt -> dt.AddDays(1.0))
// Same thing using the C#-friendly member syntax
// (Lookup.ExactOrSmaller is the default value)
nu.ResampleUniform((fun dt -> dt.Date), (fun dt -> dt.AddDays(1.0)))
// Turn into frame with multiple columns for each day
// (to format the result in a readable way)
sampled
|> Series.mapValues Series.indexOrdinally
|> Frame.ofRows
// [fsi:val it : Frame<DateTime,int> =]
// [fsi: 0 1 2 ]
// [fsi:10/3/2013 -> 21.45 <missing> <missing> ]
// [fsi:10/4/2013 -> 21.63 19.83 17.51]
// [fsi:10/5/2013 -> 17.51 <missing> <missing> ]
// [fsi:10/6/2013 -> 18.80 20.93 <missing> ]
(**
To perform the uniform resampling, we need to specify how to project (resampled) keys
from original keys (we return the `Date`), how to calculate the next key (add 1 day)
and how to fill missing values.
After performing the resampling, we turn the data into a data frame, so that we can
nicely see the results. The individual chunks have the actual observation times as keys,
so we replace those with just integers (using `Series.indexOrdinal`). The result contains
a simple ordered row of observations for each day.
The important thing is that there is an observation for each day - even for for 10/5/2013
which does not have any corresponding observations in the input. We call the resampling
function with `Lookup.ExactOrSmaller`, so the value 17.51 is picked from the last observation
of the previous day (`Lookup.ExactOrGreater` would pick 18.80 and `Lookup.Exact` would give
us an empty series for that date).
### Sampling time series
Perhaps the most common sampling operation that you might want to do is to sample time series
by a specified `TimeSpan`. Although this can be easily done by using some of the functions above,
the library provides helper functions exactly for this purpose:
*)
// Generate 1k observations with 1.7 hour offsets
let pr = stock1 (TimeSpan.FromHours(1.7)) 1000 |> series
// Sample at 2 hour intervals; 'Backward' specifies that
// we collect all previous values into a chunk.
pr |> Series.sampleTime (TimeSpan(2, 0, 0)) Direction.Backward
// Same thing using member syntax - 'Backward' is the dafult
pr.Sample(TimeSpan(2, 0, 0))
// Get the most recent value, sampled at 2 hour intervals
pr |> Series.sampleTimeInto
(TimeSpan(2, 0, 0)) Direction.Backward Series.lastValue
(**
<a name="stats"></a>
Calculations and statistics
---------------------------
In the final section of this tutorial, we look at writing some calculations over time series. Many of the
functions demonstrated here can be also used on unordered data frames and series.
### Shifting and differences
First of all, let's look at functions that we need when we need to compare subsequent values in
the series. We already demonstrated how to do this using `Series.pairwise`. In many cases,
the same thing can be done using an operation that operates over the entire series.
The two useful functions here are:
- `Series.diff` calcualtes the difference between current and n-_th_ previous element
- `Series.shift` shifts the values of a series by a specified offset
The following snippet illustrates how both functions work:
*)
// Generate sample data with 1.7 hour offsets
let sample = stock1 (TimeSpan.FromHours(1.7)) 6 |> series
// Calculates: new[i] = s[i] - s[i-1]
let diff1 = sample |> Series.diff 1
// Diff in the opposite direction
let diffM1 = sample |> Series.diff -1
// Shift series values by 1
let shift1 = sample |> Series.shift 1
// Align all results in a frame to see the results
let df =
[ "Shift +1" => shift1
"Diff +1" => diff1
"Diff" => sample - shift1
"Orig" => sample ] |> Frame.ofColumns
// [fsi:val it : Frame<DateTimeOffset,string> =]
// [fsi: Diff Diff +1 Orig Shift +1 ]
// [fsi: 12:00:00 AM -> <missing> <missing> 21.73 <missing> ]
// [fsi: 1:42:00 AM -> 1.73 1.73 23.47 21.73 ]
// [fsi: 3:24:00 AM -> -0.83 -0.83 22.63 23.47 ]
// [fsi: 5:06:00 AM -> 2.37 2.37 25.01 22.63 ]
// [fsi: 6:48:00 AM -> -1.57 -1.57 23.43 25.01 ]
// [fsi: 8:30:00 AM -> 0.09 0.09 23.52 23.43 ]
(**
In the above snippet, we first calcluate difference using the `Series.diff` function.
Then we also show how to do that using `Series.shift` and binary operator applied
to two series (`sample - shift`). The following section provides more details.
So far, we also used the functional notation (e.g. `sample |> Series.diff 1`), but
all operations can be called using the member syntax - very often, this gives you
a shorter syntax. This is also shown in the next few snippets.
### Operators and functions
Time series also supports a large number of standard F# functions such as `log` and `abs`.
You can also use standard numerical operators to apply some operation to all elements
of the series.
Because series are indexed, we can also apply binary operators to two series. This
automatically aligns the series and then applies the operation on corresponding elements.
*)
// Subtract previous value from the current value
sample - sample.Shift(1)
// Calculate logarithm of such differences
log (sample - sample.Shift(1))
// Calculate square of differences
sample.Diff(1) ** 2.0
// Calculate average of value and two immediate neighbors
(sample.Shift(-1) + sample + sample.Shift(2)) / 3.0
// Get absolute value of differences
abs (sample - sample.Shift(1))
// Get absolute value of distance from the mean
abs (sample - (Stats.mean sample))
(**
The time series library provides a large number of functions that can be applied in this
way. These include trigonometric functions (`sin`, `cos`, ...), rounding functions
(`round`, `floor`, `ceil`), exponentials and logarithms (`exp`, `log`, `log10`) and more.
In general, whenever there is a built-in numerical F# function that can be used on
standard types, the time series library should support it too.
However, what can you do when you write a custom function to do some calculation and
want to apply it to all series elements? Let's have a look:
*)
// Truncate value to interval [-1.0, +1.0]
let adjust v = min 1.0 (max -1.0 v)
// Apply adjustment to all function
adjust $ sample.Diff(1)
// The $ operator is a shorthand for
sample.Diff(1) |> Series.mapValues adjust
(**
In general, the best way to apply custom functions to all values in a series is to
align the series (using either `Series.join` or `Series.joinAlign`) into a single series
containing tuples and then apply `Series.mapValues`. The library also provides the `$` operator
that simplifies the last step - `f $ s` applies the function `f` to all values of the series `s`.
### Data frame operations
Finally, many of the time series operations demonstrated above can be applied to entire
data frames as well. This is particularly useful if you have data frame that contains multiple
aligned time series of similar structure (for example, if you have multiple stock prices or
open-high-low-close values for a given stock).
The following snippet is a quick overview of what you can do:
*)
/// Multiply all numeric columns by a given constant
df * 0.65
// Apply function to all columns in all series
let conv x = min x 20.0
df |> Frame.mapRowValues (fun os -> conv $ os.As<float>())
|> Frame.ofRows
// Sum each column and divide results by a constant
Stats.sum df / 6.0
// Divide sum by mean of each frame column
Stats.sum df / Stats.mean df
| {
"pile_set_name": "Github"
} |
--- config/mh-darwin.orig 2016-06-15 13:58:17.000000000 -0500
+++ config/mh-darwin 2017-02-11 09:37:32.000000000 -0600
@@ -30,11 +30,7 @@
SHLIB.cc= $(CXX) -dynamiclib -dynamic $(CXXFLAGS) $(LDFLAGS) $(LD_SOOPTIONS)
## Compiler switches to embed a library name and version information
-ifeq ($(ENABLE_RPATH),YES)
LD_SONAME = -Wl,-compatibility_version -Wl,$(SO_TARGET_VERSION_MAJOR) -Wl,-current_version -Wl,$(SO_TARGET_VERSION) -install_name $(libdir)/$(notdir $(MIDDLE_SO_TARGET))
-else
-LD_SONAME = -Wl,-compatibility_version -Wl,$(SO_TARGET_VERSION_MAJOR) -Wl,-current_version -Wl,$(SO_TARGET_VERSION) -install_name $(notdir $(MIDDLE_SO_TARGET))
-endif
## Compiler switch to embed a runtime search path
LD_RPATH=
| {
"pile_set_name": "Github"
} |
package com.sequenceiq.cloudbreak.cloud.model.component;
import com.sequenceiq.cloudbreak.cloud.model.ClouderaManagerProduct;
import com.sequenceiq.cloudbreak.cloud.model.catalog.Image;
import com.sequenceiq.cloudbreak.cloud.model.catalog.Images;
import com.sequenceiq.cloudbreak.cloud.model.catalog.StackDetails;
import com.sequenceiq.cloudbreak.cloud.model.catalog.StackRepoDetails;
import com.sequenceiq.cloudbreak.common.mappable.CloudPlatform;
import com.sequenceiq.cloudbreak.core.CloudbreakImageCatalogException;
import com.sequenceiq.cloudbreak.service.image.ImageCatalogService;
import com.sequenceiq.cloudbreak.service.image.PreWarmParcelParser;
import com.sequenceiq.cloudbreak.service.image.StatedImages;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static java.util.Arrays.asList;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class ImageBasedDefaultCDHEntriesTest {
private static final String IMAGE_VERSION = "7.2.0";
private static final String REPO_VERSION = "repo version";
private static final String OS = "redhat7";
private static final String OS_URL = "http://cloudera-build-us-west-1.vpc.cloudera.com/s3/build/2365123/cm7/7.1.0/redhat7/yum/";
private static final List<String> PRE_WARM_CSD = asList("csd");
private static final String PLATFORM = CloudPlatform.AWS.name();
private static final String IMAGE_CATALOG_NAME = "imageCatalogName";
@Mock
private Images images;
@Mock
private Images emptyImages;
@Mock
private ImageCatalogService imageCatalogService;
@Mock
private PreWarmParcelParser preWarmParcelParser;
@InjectMocks
private ImageBasedDefaultCDHEntries victim;
@BeforeEach
public void initTests() {
MockitoAnnotations.initMocks(this);
}
@Test
public void shouldReturnImageBasedDefaultCDHInfoMapByDefaultCdhImages() {
List<Image> imageList = getImages();
when(images.getCdhImages()).thenReturn(imageList);
Map<String, ImageBasedDefaultCDHInfo> actual = victim.getEntries(images);
Image image = imageList.stream().filter(Image::isDefaultImage).findFirst().get();
verify(image, actual.get(IMAGE_VERSION));
}
@Test
public void shouldReturnImageBasedDefaultCDHInfoMapByPlatformAndImageCatalog() throws CloudbreakImageCatalogException {
List<Image> imageList = getImages();
when(images.getCdhImages()).thenReturn(imageList);
StatedImages statedImages = StatedImages.statedImages(images, null, null);
when(imageCatalogService.getImages(0L, IMAGE_CATALOG_NAME, PLATFORM)).thenReturn(statedImages);
Map<String, ImageBasedDefaultCDHInfo> actual = victim.getEntries(0L, PLATFORM, IMAGE_CATALOG_NAME);
Image image = imageList.stream().filter(Image::isDefaultImage).findFirst().get();
verify(image, actual.get(IMAGE_VERSION));
}
@Test
public void shouldFallBackToAwsInCaseOfMissingCdhImages() throws CloudbreakImageCatalogException {
List<Image> imageList = getImages();
when(images.getCdhImages()).thenReturn(imageList);
when(emptyImages.getCdhImages()).thenReturn(Collections.emptyList());
StatedImages statedImages = StatedImages.statedImages(images, null, null);
StatedImages emptyStatedImages = StatedImages.statedImages(emptyImages, null, null);
when(imageCatalogService.getImages(0L, IMAGE_CATALOG_NAME, CloudPlatform.YARN.name())).thenReturn(emptyStatedImages);
when(imageCatalogService.getImages(0L, IMAGE_CATALOG_NAME, PLATFORM)).thenReturn(statedImages);
Map<String, ImageBasedDefaultCDHInfo> actual = victim.getEntries(0L, CloudPlatform.YARN.name(), IMAGE_CATALOG_NAME);
Image image = imageList.stream().filter(Image::isDefaultImage).findFirst().get();
verify(image, actual.get(IMAGE_VERSION));
}
private List<Image> getImages() {
StackRepoDetails stackRepoDetails = new StackRepoDetails(getRepo(), null);
StackDetails stackDetails = new StackDetails(null, stackRepoDetails, null);
List<List<String>> parcels = getParcels();
Image defaultImage = mock(Image.class);
when(defaultImage.isDefaultImage()).thenReturn(true);
when(defaultImage.getVersion()).thenReturn(IMAGE_VERSION);
when(defaultImage.getStackDetails()).thenReturn(stackDetails);
when(defaultImage.getPreWarmParcels()).thenReturn(parcels);
when(defaultImage.getPreWarmCsd()).thenReturn(PRE_WARM_CSD);
Image nonDefaultImage = mock(Image.class);
when(nonDefaultImage.isDefaultImage()).thenReturn(false);
//Default image added double times to test
//the algorithm is not failing on multiple default images for the same version
return asList(defaultImage, defaultImage, nonDefaultImage);
}
private Map<String, String> getRepo() {
Map<String, String> repo = new HashMap<>();
repo.put(com.sequenceiq.cloudbreak.cloud.model.component.StackRepoDetails.REPOSITORY_VERSION, REPO_VERSION);
repo.put(OS, OS_URL);
return repo;
}
private List<List<String>> getParcels() {
List<String> parcel = asList("parcel");
when(preWarmParcelParser.parseProductFromParcel(parcel)).thenReturn(Optional.of(mock(ClouderaManagerProduct.class)));
return asList(parcel);
}
private void verify(Image image, ImageBasedDefaultCDHInfo imageBasedDefaultCDHInfo) {
assertNotNull(imageBasedDefaultCDHInfo);
assertEquals(image, imageBasedDefaultCDHInfo.getImage());
assertEquals(REPO_VERSION, imageBasedDefaultCDHInfo.getDefaultCDHInfo().getVersion());
assertEquals(REPO_VERSION, imageBasedDefaultCDHInfo.getDefaultCDHInfo().getVersion());
assertEquals(OS_URL, imageBasedDefaultCDHInfo.getDefaultCDHInfo().getRepo().getStack().get(OS));
assertEquals(1, imageBasedDefaultCDHInfo.getDefaultCDHInfo().getParcels().size());
assertEquals(PRE_WARM_CSD, imageBasedDefaultCDHInfo.getDefaultCDHInfo().getCsd());
}
} | {
"pile_set_name": "Github"
} |
/*
Copyright 2014 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package fuzz
import (
"fmt"
"math/rand"
"reflect"
"time"
)
// fuzzFuncMap is a map from a type to a fuzzFunc that handles that type.
type fuzzFuncMap map[reflect.Type]reflect.Value
// Fuzzer knows how to fill any object with random fields.
type Fuzzer struct {
fuzzFuncs fuzzFuncMap
defaultFuzzFuncs fuzzFuncMap
r *rand.Rand
nilChance float64
minElements int
maxElements int
}
// New returns a new Fuzzer. Customize your Fuzzer further by calling Funcs,
// RandSource, NilChance, or NumElements in any order.
func New() *Fuzzer {
f := &Fuzzer{
defaultFuzzFuncs: fuzzFuncMap{
reflect.TypeOf(&time.Time{}): reflect.ValueOf(fuzzTime),
},
fuzzFuncs: fuzzFuncMap{},
r: rand.New(rand.NewSource(time.Now().UnixNano())),
nilChance: .2,
minElements: 1,
maxElements: 10,
}
return f
}
// Funcs adds each entry in fuzzFuncs as a custom fuzzing function.
//
// Each entry in fuzzFuncs must be a function taking two parameters.
// The first parameter must be a pointer or map. It is the variable that
// function will fill with random data. The second parameter must be a
// fuzz.Continue, which will provide a source of randomness and a way
// to automatically continue fuzzing smaller pieces of the first parameter.
//
// These functions are called sensibly, e.g., if you wanted custom string
// fuzzing, the function `func(s *string, c fuzz.Continue)` would get
// called and passed the address of strings. Maps and pointers will always
// be made/new'd for you, ignoring the NilChange option. For slices, it
// doesn't make much sense to pre-create them--Fuzzer doesn't know how
// long you want your slice--so take a pointer to a slice, and make it
// yourself. (If you don't want your map/pointer type pre-made, take a
// pointer to it, and make it yourself.) See the examples for a range of
// custom functions.
func (f *Fuzzer) Funcs(fuzzFuncs ...interface{}) *Fuzzer {
for i := range fuzzFuncs {
v := reflect.ValueOf(fuzzFuncs[i])
if v.Kind() != reflect.Func {
panic("Need only funcs!")
}
t := v.Type()
if t.NumIn() != 2 || t.NumOut() != 0 {
panic("Need 2 in and 0 out params!")
}
argT := t.In(0)
switch argT.Kind() {
case reflect.Ptr, reflect.Map:
default:
panic("fuzzFunc must take pointer or map type")
}
if t.In(1) != reflect.TypeOf(Continue{}) {
panic("fuzzFunc's second parameter must be type fuzz.Continue")
}
f.fuzzFuncs[argT] = v
}
return f
}
// RandSource causes f to get values from the given source of randomness.
// Use if you want deterministic fuzzing.
func (f *Fuzzer) RandSource(s rand.Source) *Fuzzer {
f.r = rand.New(s)
return f
}
// NilChance sets the probability of creating a nil pointer, map, or slice to
// 'p'. 'p' should be between 0 (no nils) and 1 (all nils), inclusive.
func (f *Fuzzer) NilChance(p float64) *Fuzzer {
if p < 0 || p > 1 {
panic("p should be between 0 and 1, inclusive.")
}
f.nilChance = p
return f
}
// NumElements sets the minimum and maximum number of elements that will be
// added to a non-nil map or slice.
func (f *Fuzzer) NumElements(atLeast, atMost int) *Fuzzer {
if atLeast > atMost {
panic("atLeast must be <= atMost")
}
if atLeast < 0 {
panic("atLeast must be >= 0")
}
f.minElements = atLeast
f.maxElements = atMost
return f
}
func (f *Fuzzer) genElementCount() int {
if f.minElements == f.maxElements {
return f.minElements
}
return f.minElements + f.r.Intn(f.maxElements-f.minElements)
}
func (f *Fuzzer) genShouldFill() bool {
return f.r.Float64() > f.nilChance
}
// Fuzz recursively fills all of obj's fields with something random. First
// this tries to find a custom fuzz function (see Funcs). If there is no
// custom function this tests whether the object implements fuzz.Interface and,
// if so, calls Fuzz on it to fuzz itself. If that fails, this will see if
// there is a default fuzz function provided by this package. If all of that
// fails, this will generate random values for all primitive fields and then
// recurse for all non-primitives.
//
// Not safe for cyclic or tree-like structs!
//
// obj must be a pointer. Only exported (public) fields can be set (thanks, golang :/ )
// Intended for tests, so will panic on bad input or unimplemented fields.
func (f *Fuzzer) Fuzz(obj interface{}) {
v := reflect.ValueOf(obj)
if v.Kind() != reflect.Ptr {
panic("needed ptr!")
}
v = v.Elem()
f.doFuzz(v, 0)
}
// FuzzNoCustom is just like Fuzz, except that any custom fuzz function for
// obj's type will not be called and obj will not be tested for fuzz.Interface
// conformance. This applies only to obj and not other instances of obj's
// type.
// Not safe for cyclic or tree-like structs!
// obj must be a pointer. Only exported (public) fields can be set (thanks, golang :/ )
// Intended for tests, so will panic on bad input or unimplemented fields.
func (f *Fuzzer) FuzzNoCustom(obj interface{}) {
v := reflect.ValueOf(obj)
if v.Kind() != reflect.Ptr {
panic("needed ptr!")
}
v = v.Elem()
f.doFuzz(v, flagNoCustomFuzz)
}
const (
// Do not try to find a custom fuzz function. Does not apply recursively.
flagNoCustomFuzz uint64 = 1 << iota
)
func (f *Fuzzer) doFuzz(v reflect.Value, flags uint64) {
if !v.CanSet() {
return
}
if flags&flagNoCustomFuzz == 0 {
// Check for both pointer and non-pointer custom functions.
if v.CanAddr() && f.tryCustom(v.Addr()) {
return
}
if f.tryCustom(v) {
return
}
}
if fn, ok := fillFuncMap[v.Kind()]; ok {
fn(v, f.r)
return
}
switch v.Kind() {
case reflect.Map:
if f.genShouldFill() {
v.Set(reflect.MakeMap(v.Type()))
n := f.genElementCount()
for i := 0; i < n; i++ {
key := reflect.New(v.Type().Key()).Elem()
f.doFuzz(key, 0)
val := reflect.New(v.Type().Elem()).Elem()
f.doFuzz(val, 0)
v.SetMapIndex(key, val)
}
return
}
v.Set(reflect.Zero(v.Type()))
case reflect.Ptr:
if f.genShouldFill() {
v.Set(reflect.New(v.Type().Elem()))
f.doFuzz(v.Elem(), 0)
return
}
v.Set(reflect.Zero(v.Type()))
case reflect.Slice:
if f.genShouldFill() {
n := f.genElementCount()
v.Set(reflect.MakeSlice(v.Type(), n, n))
for i := 0; i < n; i++ {
f.doFuzz(v.Index(i), 0)
}
return
}
v.Set(reflect.Zero(v.Type()))
case reflect.Array:
if f.genShouldFill() {
n := v.Len()
for i := 0; i < n; i++ {
f.doFuzz(v.Index(i), 0)
}
return
}
v.Set(reflect.Zero(v.Type()))
case reflect.Struct:
for i := 0; i < v.NumField(); i++ {
f.doFuzz(v.Field(i), 0)
}
case reflect.Chan:
fallthrough
case reflect.Func:
fallthrough
case reflect.Interface:
fallthrough
default:
panic(fmt.Sprintf("Can't handle %#v", v.Interface()))
}
}
// tryCustom searches for custom handlers, and returns true iff it finds a match
// and successfully randomizes v.
func (f *Fuzzer) tryCustom(v reflect.Value) bool {
// First: see if we have a fuzz function for it.
doCustom, ok := f.fuzzFuncs[v.Type()]
if !ok {
// Second: see if it can fuzz itself.
if v.CanInterface() {
intf := v.Interface()
if fuzzable, ok := intf.(Interface); ok {
fuzzable.Fuzz(Continue{f: f, Rand: f.r})
return true
}
}
// Finally: see if there is a default fuzz function.
doCustom, ok = f.defaultFuzzFuncs[v.Type()]
if !ok {
return false
}
}
switch v.Kind() {
case reflect.Ptr:
if v.IsNil() {
if !v.CanSet() {
return false
}
v.Set(reflect.New(v.Type().Elem()))
}
case reflect.Map:
if v.IsNil() {
if !v.CanSet() {
return false
}
v.Set(reflect.MakeMap(v.Type()))
}
default:
return false
}
doCustom.Call([]reflect.Value{v, reflect.ValueOf(Continue{
f: f,
Rand: f.r,
})})
return true
}
// Interface represents an object that knows how to fuzz itself. Any time we
// find a type that implements this interface we will delegate the act of
// fuzzing itself.
type Interface interface {
Fuzz(c Continue)
}
// Continue can be passed to custom fuzzing functions to allow them to use
// the correct source of randomness and to continue fuzzing their members.
type Continue struct {
f *Fuzzer
// For convenience, Continue implements rand.Rand via embedding.
// Use this for generating any randomness if you want your fuzzing
// to be repeatable for a given seed.
*rand.Rand
}
// Fuzz continues fuzzing obj. obj must be a pointer.
func (c Continue) Fuzz(obj interface{}) {
v := reflect.ValueOf(obj)
if v.Kind() != reflect.Ptr {
panic("needed ptr!")
}
v = v.Elem()
c.f.doFuzz(v, 0)
}
// FuzzNoCustom continues fuzzing obj, except that any custom fuzz function for
// obj's type will not be called and obj will not be tested for fuzz.Interface
// conformance. This applies only to obj and not other instances of obj's
// type.
func (c Continue) FuzzNoCustom(obj interface{}) {
v := reflect.ValueOf(obj)
if v.Kind() != reflect.Ptr {
panic("needed ptr!")
}
v = v.Elem()
c.f.doFuzz(v, flagNoCustomFuzz)
}
// RandString makes a random string up to 20 characters long. The returned string
// may include a variety of (valid) UTF-8 encodings.
func (c Continue) RandString() string {
return randString(c.Rand)
}
// RandUint64 makes random 64 bit numbers.
// Weirdly, rand doesn't have a function that gives you 64 random bits.
func (c Continue) RandUint64() uint64 {
return randUint64(c.Rand)
}
// RandBool returns true or false randomly.
func (c Continue) RandBool() bool {
return randBool(c.Rand)
}
func fuzzInt(v reflect.Value, r *rand.Rand) {
v.SetInt(int64(randUint64(r)))
}
func fuzzUint(v reflect.Value, r *rand.Rand) {
v.SetUint(randUint64(r))
}
func fuzzTime(t *time.Time, c Continue) {
var sec, nsec int64
// Allow for about 1000 years of random time values, which keeps things
// like JSON parsing reasonably happy.
sec = c.Rand.Int63n(1000 * 365 * 24 * 60 * 60)
c.Fuzz(&nsec)
*t = time.Unix(sec, nsec)
}
var fillFuncMap = map[reflect.Kind]func(reflect.Value, *rand.Rand){
reflect.Bool: func(v reflect.Value, r *rand.Rand) {
v.SetBool(randBool(r))
},
reflect.Int: fuzzInt,
reflect.Int8: fuzzInt,
reflect.Int16: fuzzInt,
reflect.Int32: fuzzInt,
reflect.Int64: fuzzInt,
reflect.Uint: fuzzUint,
reflect.Uint8: fuzzUint,
reflect.Uint16: fuzzUint,
reflect.Uint32: fuzzUint,
reflect.Uint64: fuzzUint,
reflect.Uintptr: fuzzUint,
reflect.Float32: func(v reflect.Value, r *rand.Rand) {
v.SetFloat(float64(r.Float32()))
},
reflect.Float64: func(v reflect.Value, r *rand.Rand) {
v.SetFloat(r.Float64())
},
reflect.Complex64: func(v reflect.Value, r *rand.Rand) {
panic("unimplemented")
},
reflect.Complex128: func(v reflect.Value, r *rand.Rand) {
panic("unimplemented")
},
reflect.String: func(v reflect.Value, r *rand.Rand) {
v.SetString(randString(r))
},
reflect.UnsafePointer: func(v reflect.Value, r *rand.Rand) {
panic("unimplemented")
},
}
// randBool returns true or false randomly.
func randBool(r *rand.Rand) bool {
if r.Int()&1 == 1 {
return true
}
return false
}
type charRange struct {
first, last rune
}
// choose returns a random unicode character from the given range, using the
// given randomness source.
func (r *charRange) choose(rand *rand.Rand) rune {
count := int64(r.last - r.first)
return r.first + rune(rand.Int63n(count))
}
var unicodeRanges = []charRange{
{' ', '~'}, // ASCII characters
{'\u00a0', '\u02af'}, // Multi-byte encoded characters
{'\u4e00', '\u9fff'}, // Common CJK (even longer encodings)
}
// randString makes a random string up to 20 characters long. The returned string
// may include a variety of (valid) UTF-8 encodings.
func randString(r *rand.Rand) string {
n := r.Intn(20)
runes := make([]rune, n)
for i := range runes {
runes[i] = unicodeRanges[r.Intn(len(unicodeRanges))].choose(r)
}
return string(runes)
}
// randUint64 makes random 64 bit numbers.
// Weirdly, rand doesn't have a function that gives you 64 random bits.
func randUint64(r *rand.Rand) uint64 {
return uint64(r.Uint32())<<32 | uint64(r.Uint32())
}
| {
"pile_set_name": "Github"
} |
require 'action_controller'
require 'action_controller/test_process'
require 'will_paginate'
WillPaginate.enable_actionpack
ActionController::Routing::Routes.draw do |map|
map.connect 'dummy/page/:page', :controller => 'dummy'
map.connect 'dummy/dots/page.:page', :controller => 'dummy', :action => 'dots'
map.connect 'ibocorp/:page', :controller => 'ibocorp',
:requirements => { :page => /\d+/ },
:defaults => { :page => 1 }
map.connect ':controller/:action/:id'
end
ActionController::Base.perform_caching = false
class WillPaginate::ViewTestCase < Test::Unit::TestCase
def setup
super
@controller = DummyController.new
@request = @controller.request
@html_result = nil
@template = '<%= will_paginate collection, options %>'
@view = ActionView::Base.new
@view.assigns['controller'] = @controller
@view.assigns['_request'] = @request
@view.assigns['_params'] = @request.params
end
def test_no_complain; end
protected
def paginate(collection = {}, options = {}, &block)
if collection.instance_of? Hash
page_options = { :page => 1, :total_entries => 11, :per_page => 4 }.merge(collection)
collection = [1].paginate(page_options)
end
locals = { :collection => collection, :options => options }
if defined? ActionView::InlineTemplate
# Rails 2.1
args = [ ActionView::InlineTemplate.new(@view, @template, locals) ]
else
# older Rails versions
args = [nil, @template, nil, locals]
end
@html_result = @view.render_template(*args)
@html_document = HTML::Document.new(@html_result, true, false)
if block_given?
classname = options[:class] || WillPaginate::ViewHelpers.pagination_options[:class]
assert_select("div.#{classname}", 1, 'no main DIV', &block)
end
end
def response_from_page_or_rjs
@html_document.root
end
def validate_page_numbers expected, links, param_name = :page
param_pattern = /\W#{CGI.escape(param_name.to_s)}=([^&]*)/
assert_equal(expected, links.map { |e|
e['href'] =~ param_pattern
$1 ? $1.to_i : $1
})
end
def assert_links_match pattern, links = nil, numbers = nil
links ||= assert_select 'div.pagination a[href]' do |elements|
elements
end
pages = [] if numbers
links.each do |el|
assert_match pattern, el['href']
if numbers
el['href'] =~ pattern
pages << ($1.nil?? nil : $1.to_i)
end
end
assert_equal numbers, pages, "page numbers don't match" if numbers
end
def assert_no_links_match pattern
assert_select 'div.pagination a[href]' do |elements|
elements.each do |el|
assert_no_match pattern, el['href']
end
end
end
end
class DummyRequest
attr_accessor :symbolized_path_parameters
def initialize
@get = true
@params = {}
@symbolized_path_parameters = { :controller => 'foo', :action => 'bar' }
end
def get?
@get
end
def post
@get = false
end
def relative_url_root
''
end
def params(more = nil)
@params.update(more) if more
@params
end
end
class DummyController
attr_reader :request
attr_accessor :controller_name
def initialize
@request = DummyRequest.new
@url = ActionController::UrlRewriter.new(@request, @request.params)
end
def params
@request.params
end
def url_for(params)
@url.rewrite(params)
end
end
module HTML
Node.class_eval do
def inner_text
children.map(&:inner_text).join('')
end
end
Text.class_eval do
def inner_text
self.to_s
end
end
Tag.class_eval do
def inner_text
childless?? '' : super
end
end
end
| {
"pile_set_name": "Github"
} |
Certificate:
Data:
Version: 3 (0x2)
Serial Number: 8 (0x8)
Signature Algorithm: sha224WithRSAEncryption
Issuer: C=NL, O=PolarSSL, CN=PolarSSL Test CA
Validity
Not Before: Feb 12 14:44:07 2011 GMT
Not After : Feb 12 14:44:07 2021 GMT
Subject: C=NL, O=PolarSSL, CN=PolarSSL Cert SHA224
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
RSA Public Key: (2048 bit)
Modulus (2048 bit):
00:b9:3c:4a:c5:c8:a3:8e:90:17:a4:9e:52:aa:71:
75:26:61:80:e7:c7:b5:6d:8c:ff:aa:b6:41:26:b7:
be:11:ad:5c:73:16:0c:64:11:48:04:ff:d6:e1:3b:
05:db:89:bb:b3:97:09:d5:1c:14:dd:68:87:39:b0:
3d:71:cb:e2:76:d0:1a:d8:18:2d:80:1b:54:f6:e5:
44:9a:f1:cb:af:61:2e:df:49:0d:9d:09:b7:ed:b1:
fd:3c:fd:3c:fa:24:cf:5d:bf:7c:e4:53:e7:25:b5:
ea:44:22:e9:26:d3:ea:20:94:9e:e6:61:67:ba:2e:
07:67:0b:03:2f:a2:09:ed:f0:33:8f:0b:ce:10:ef:
67:a4:c6:08:da:c1:ed:c2:3f:d7:4a:dd:15:3d:f9:
5e:1c:81:60:46:3e:b5:b3:3d:2f:a6:de:47:1c:bc:
92:ae:eb:df:27:6b:16:56:b7:dc:ec:d1:55:57:a5:
6e:ec:75:25:f5:b7:7b:df:ab:d2:3a:5a:91:98:7d:
97:17:0b:13:0a:a7:6b:4a:8b:c1:47:30:fb:3a:f8:
41:04:d5:c1:df:b8:1d:bf:7b:01:a5:65:a2:e0:1e:
36:b7:a6:5c:cc:30:5a:f8:cd:6f:cd:f1:19:62:25:
ca:01:e3:35:7f:fa:20:f5:dc:fd:69:b2:6a:00:7d:
17:f7
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Basic Constraints:
CA:FALSE
X509v3 Subject Key Identifier:
7D:E4:9C:6B:E6:F9:71:7D:46:D2:12:3D:AD:6B:1D:FD:C2:AA:78:4C
X509v3 Authority Key Identifier:
keyid:B4:5A:E4:A5:B3:DE:D2:52:F6:B9:D5:A6:95:0F:EB:3E:BC:C7:FD:FF
Signature Algorithm: sha224WithRSAEncryption
b8:9b:0a:d1:b4:d1:a4:ce:05:39:42:7a:3b:7b:5e:fd:97:57:
8a:36:60:42:39:d0:e6:0c:9c:7e:2f:2b:be:ef:e7:45:34:77:
48:7a:10:4a:fd:76:ca:42:39:25:3c:fa:19:f8:63:6c:e7:36:
27:9a:ec:06:ce:e4:f7:2c:2e:c6:36:c1:25:bd:ab:09:aa:e2:
da:4e:de:ae:b5:f5:ba:9e:90:24:52:34:96:96:61:4c:26:b5:
57:65:b1:10:ed:13:2b:54:90:ce:d3:21:cb:8c:d3:4c:6c:e5:
e1:78:22:16:3f:e1:be:f1:ee:5d:39:48:a1:e6:80:46:f4:46:
f2:79:03:3e:f1:fc:51:47:d9:05:e8:85:81:1b:0b:4f:fa:85:
9d:ce:e7:76:5a:6f:da:98:9f:43:f1:f3:2f:2f:57:28:aa:70:
14:82:7f:d5:69:14:8c:f9:82:b6:2f:a6:df:b5:6b:0e:43:c9:
96:91:64:3d:8b:a8:17:15:9a:88:42:a4:d0:90:c0:a3:a2:e1:
dd:f6:95:6d:3b:9d:71:a6:1e:9e:2c:1e:db:f6:5f:93:43:2c:
ed:53:70:55:50:56:df:cd:96:6c:d5:91:0f:b1:a7:f4:b7:17:
9d:1f:0b:f6:0b:f8:fe:e7:7c:de:c1:20:b7:fc:69:13:ba:e2:
61:9b:a5:62
-----BEGIN CERTIFICATE-----
MIIDQjCCAiqgAwIBAgIBCDANBgkqhkiG9w0BAQ4FADA7MQswCQYDVQQGEwJOTDER
MA8GA1UEChMIUG9sYXJTU0wxGTAXBgNVBAMTEFBvbGFyU1NMIFRlc3QgQ0EwHhcN
MTEwMjEyMTQ0NDA3WhcNMjEwMjEyMTQ0NDA3WjA/MQswCQYDVQQGEwJOTDERMA8G
A1UEChMIUG9sYXJTU0wxHTAbBgNVBAMTFFBvbGFyU1NMIENlcnQgU0hBMjI0MIIB
IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuTxKxcijjpAXpJ5SqnF1JmGA
58e1bYz/qrZBJre+Ea1ccxYMZBFIBP/W4TsF24m7s5cJ1RwU3WiHObA9ccvidtAa
2BgtgBtU9uVEmvHLr2Eu30kNnQm37bH9PP08+iTPXb985FPnJbXqRCLpJtPqIJSe
5mFnui4HZwsDL6IJ7fAzjwvOEO9npMYI2sHtwj/XSt0VPfleHIFgRj61sz0vpt5H
HLySruvfJ2sWVrfc7NFVV6Vu7HUl9bd736vSOlqRmH2XFwsTCqdrSovBRzD7OvhB
BNXB37gdv3sBpWWi4B42t6ZczDBa+M1vzfEZYiXKAeM1f/og9dz9abJqAH0X9wID
AQABo00wSzAJBgNVHRMEAjAAMB0GA1UdDgQWBBR95Jxr5vlxfUbSEj2tax39wqp4
TDAfBgNVHSMEGDAWgBS0WuSls97SUva51aaVD+s+vMf9/zANBgkqhkiG9w0BAQ4F
AAOCAQEAuJsK0bTRpM4FOUJ6O3te/ZdXijZgQjnQ5gycfi8rvu/nRTR3SHoQSv12
ykI5JTz6GfhjbOc2J5rsBs7k9ywuxjbBJb2rCari2k7errX1up6QJFI0lpZhTCa1
V2WxEO0TK1SQztMhy4zTTGzl4XgiFj/hvvHuXTlIoeaARvRG8nkDPvH8UUfZBeiF
gRsLT/qFnc7ndlpv2pifQ/HzLy9XKKpwFIJ/1WkUjPmCti+m37VrDkPJlpFkPYuo
FxWaiEKk0JDAo6Lh3faVbTudcaYeniwe2/Zfk0Ms7VNwVVBW382WbNWRD7Gn9LcX
nR8L9gv4/ud83sEgt/xpE7riYZulYg==
-----END CERTIFICATE-----
| {
"pile_set_name": "Github"
} |
// MIXINS
//------------------------------------------------
//------------------------------------------------
// Breakpoints
//------------------------------------------------
@mixin bp($point) {
@if $point==xxlarge {
@media (max-width: $bp-xxlarge) {
@content;
}
}
@if $point==xlarge {
@media (max-width: $bp-xlarge) {
@content;
}
}
@if $point==large {
@media (max-width: $bp-large) {
@content;
}
}
@if $point==medium {
@media (max-width: $bp-medium) {
@content;
}
}
@if $point==small {
@media (max-width: $bp-small) {
@content;
}
}
}
// Images
//------------------------------------------------
@mixin image-size {
-webkit-background-size: cover;
-moz-background-size: cover;
-o-background-size: cover;
background-size: cover;
}
// Font
//------------------------------------------------
@mixin font-smoothing($value: on) {
@if $value == on {
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
@else {
-webkit-font-smoothing: subpixel-antialiased;
-moz-osx-font-smoothing: auto;
}
}
// Border
//------------------------------------------------
@mixin border-radius($radius) {
border-radius: $radius;
-webkit-border-radius: $radius;
-moz-border-radius: $radius;
-ms-border-radius: $radius;
-o-border-radius: $radius;
}
// User select
//------------------------------------------------
@mixin no-select {
-webkit-touch-callout: none;
-webkit-user-select: none;
-khtml-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
}
// Box Shadow
//------------------------------------------------
@mixin box-shadow($params) {
-webkit-box-shadow: $params;
-moz-box-shadow: $params;
box-shadow: $params;
}
| {
"pile_set_name": "Github"
} |
// Copyright (C) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in project root for information.
package com.microsoft.ml.spark.cognitive
case class Rectangle(left: Int, top: Int, width: Int, height: Int)
case class Rectangle2(x: Int, y: Int, w: Int, h: Int)
| {
"pile_set_name": "Github"
} |
\documentclass[landscape, 11pt, svgnames]{article}
\usepackage[showframe]{geometry}
\usepackage[T1]{fontenc}
\usepackage[utf8]{inputenc}
\usepackage[english]{babel}
\usepackage{listings}
\usepackage{tikz-uml}
\usepackage{amsfonts, amsmath, amsthm, amssymb}
\geometry{
paperwidth=65cm,
paperheight=112cm,
margin=1cm
}
\date{\today}
\title{OpenST Protocol sequence diagrams v0.9.3}
\author{Benjamin Bollen}
\lstdefinelanguage{tikzuml}{language=[LaTeX]TeX, classoffset=0, morekeywords={umlbasiccomponent, umlprovidedinterface, umlrequiredinterface, umldelegatewayconnector, umlassemblyconnector, umlVHVassemblyconnector, umlHVHassemblyconnector, umlnote, umlusecase, umlactor, umlinherit, umlassoc, umlVHextend, umlinclude, umlstateinitial, umlbasicstate, umltrans, umlstatefinal, umlVHtrans, umlHVtrans, umldatabase, umlmulti, umlobject, umlfpart, umlcreatecall, umlclass, umlvirt, umlunicompo, umlimport, umlaggreg}, keywordstyle=\color{DarkBlue}, classoffset=1, morekeywords={umlcomponent, umlsystem, umlstate, umlseqdiag, umlcall, umlcallself, umlfragment, umlpackage}, keywordstyle=\color{DarkRed}, classoffset=0, sensitive=true, morecomment=[l]{\%}}
\begin{document}
\begin{minipage}[b]{0.55\linewidth}
\Huge \color{NavyBlue} \textbf{OpenST Protocol v0.9.3 } \color{Black}\\ % Title
\huge\textit{sequence diagrams for stake and mint - Benjamin Bollen \& Pranay Valson, last edit \today}\\[1cm] % Subtitle
\end{minipage}
\begin{tikzpicture}
\begin{umlseqdiag}
\umlactor[class=Address]{Staker}
\umlactor[class=Worker]{Facilitator}
\umlactor[class=Worker]{Hunter}
\umlboundary[class=ERC20]{OST}
\umlboundary[class=SK]{Branded Token Gateway}
\umlcontrol[class=SK]{OpenSTValue}
\umlobject[class=SK]{SimpleStake}
\umlobject[class=SK]{CoreUC}
%\umlcontrol[class=SK]{RegistrarVC}
\umlboundary[class=Web3]{Value Chain}
\umlactor[class=Worker, fill=purple!40]{OstDotCom}
\umlboundary[class=Web3, fill=blue!40]{Utility Chain}
%\umlcontrol[class=SK, fill=blue!40]{RegistrarUC}
\umlobject[class=SK, fill=blue!40]{CoreVC}
\umlcontrol[class=SK, fill=blue!40]{OpenSTUtility}
\umlboundary[class=ERC20, fill=blue!40]{Branded Token}
\umlactor[class=Address, fill=blue!40]{Beneficiary} %Token Holder contract in v0.9.4
%%%
%%% Staker initiates stake
%%%
% staker approves branded token gateway contract on OST for amount
\begin{umlcall}[op={: approve(gateway, amountST)}]{Staker}{OST}
\end{umlcall}
% worker approves worker contract for bounty amount
% \begin{umlcall}[dt=9,op={: approve(worker, bounty)}]{Facilitator}{Workers}
% \end{umlcall}
% staker requests stake to gateway
\begin{umlcall}[dt=10, op={: requestStake(amountST, beneficiary)}]{Staker}{Branded Token Gateway}
% gateway pulls amount from staker on OST
\begin{umlcall}[fill=green!20, dt=5, op={: transferFrom(staker, gateway, amountST)}, return=<<OST(amountST)>>]{Branded Token Gateway}{OST}
% pull OST from Facilitator
\begin{umlcall}[dt=5, fill=green!20, type=return, op={<<OST(amountST)>>}]{Staker}{OST}
\end{umlcall}
\end{umlcall}
% emit StakeRequested event which Facilitator listens to
\begin{umlcall}[dt=5, type=return, op={emit StakeRequested(staker, amountST, beneficiary)}]{Branded Token Gateway}{Facilitator}
\end{umlcall}
\end{umlcall}
% Facilitator evaluates request against policy (monetary and KYC/AML)
\begin{umlfragment}[type=alt, label=accept, name=policy, inner xsep=2]
\begin{umlcall}[dt=10, op={: approve(gateway, bounty)}]{Facilitator}{OST}
\end{umlcall}
% Facilitator accepts request
\begin{umlcall}[op={: acceptStakeRequest(staker, hashLock)}]{Facilitator}{Branded Token Gateway} %return={ emit StakingRequestAccpeted (staker, amountST, amountUT, nonce, unlockHeight, stakingIntentHash}
% Pull Facilitator's bounty into Gate
\begin{umlcall}[fill=green!20, op={: transferFrom(facilitator, gateway, bounty)}, return={<<OST(bounty)>>}]{Branded Token Gateway}{OST}
% pull OST for bounty from Facilitator
\begin{umlcall}[dt=5, fill=green!20, op={<<OST(bounty)>>},type=return]{Facilitator}{OST}
\end{umlcall}
\end{umlcall}
% Approve OpenSTValue as spender for Gate
\begin{umlcall}[op={: approve(openSTValue, amountST)}]{Branded Token Gateway}{OST}
\end{umlcall}
%emit acceptStaking
\begin{umlcall}[type=return, op={emit StakeRequestAccepted(staker, amountST, amountUT, nonce, unlockHeight, stakingIntentHash)}]{Branded Token Gateway}{Facilitator}
\end{umlcall}
% Gate calls on to OpenSTValue to stake (later abstract to library call)
\begin{umlcall}[op={: stake(uuid, amountST, beneficiary, hashLock, staker)}]{Branded Token Gateway}{OpenSTValue} %
% check stakingAccount
\begin{umlcallself}[op={require(stakingAccount == 0x || (stakingAccount == msg.sender \&\& msg.sender == gateway))}]{OpenSTValue}
\end{umlcallself}
% OpenSTValue pulls amount plus bounty from Facilitator to
% its OST account balance
\begin{umlcall}[dt=4, op={: transferFrom(gateway, OpenSTValue, amountST)}, fill=green!20, return=<<OST(amountST)>>]{OpenSTValue}{OST}
% pull OST for pre-fund amount from Facilitator
\begin{umlcall}[dt=20, fill=green!20, type ={return},op ={<<OST(amountST)>>}]{Branded Token Gateway}{OST}
\end{umlcall}
\end{umlcall}
% store StakingIntentHash in contract storage
\begin{umlcallself}[op={store StakingIntentHash},]{OpenSTValue}
\end{umlcallself}
% HTLC(facilitator, amount+bounty)Facilitator
% \begin{umlcallself}[dt=0, op={HTLC(staker, amount)},]{OpenSTValue}
%\end{umlcallself}
% emit StakingIntentDeclared
\begin{umlcall}[type=return, op={emit StakingIntentDeclared(uuid, staker, nonce, intentKeyHash, beneficiary, amountST, amountUT, unlockHeight, StakingIntentHash, chainId)}]{OpenSTValue}{Facilitator}
\end{umlcall}
\end{umlcall}
% HTLC(facilitator, bounty)
%\begin{umlcallself}[op={HTLC(facilitator, bounty)}]{Branded Token Gateway}
%\end{umlcallself}
\end{umlcall}
% Facilitator rejects request
\umlfpart[reject]
\begin{umlcall}[dt=5, op={: rejectStakeRequest(staker, reason)}]{Facilitator}{Branded Token Gateway}
% gateway checks msg.sender == whitelister worker address and there is no hashlock
\begin{umlcallself}[op={require(msg.sender=worker \&\& no HTLC)}]{Branded Token Gateway}
\end{umlcallself}
% transfer amount back to staker
\begin{umlcall}[fill=green!20, op={: transfer(staker, amountST)}]{Branded Token Gateway}{OST}
% return OST to staker
\begin{umlcall}[type=return, fill=green!20, op=<<OST(amountST)>>]{OST}{Staker}
% emit stake request rejected
\begin{umlcall}[dt=10, type=return, op={emit StakeRequestRejected(staker, amountST, reason)}]{Branded Token Gateway}{Facilitator}
\end{umlcall}
\end{umlcall}
\end{umlcall}
\end{umlcall}
\end{umlfragment}
\umlnote[x=2, y=-7]{policy}{evaluate request against policy (KYC/AML)}
\umlnote[x=30,y=-5, width=200]{policy}{to accept the staking request the facilitator generates a secret, random unlockSecret, and publishes hashLock=Hash(unlockSecret)}
% optionally, staker can initiate revert after timeout and no action from Facilitator
\begin{umlfragment}[type=opt]
% staker reverts Stake Request after timeout
\begin{umlcall}[dt=12, op={: revertStakeRequest()}, fill=green!20]{Staker}{Branded Token Gateway}
% gateway checks amount is not locked under HTLC or time-lock is not yet expired
\begin{umlcallself}[op={require(msg.sender=staker \&\& no HTLC)}]{Branded Token Gateway}
\end{umlcallself}
% transfer amount back to staker
\begin{umlcall}[fill=green!20, op={: transfer(staker, amountST)}]{Branded Token Gateway}{OST}
% return OST to staker
\begin{umlcall}[type=return, fill=green!20, op=<<OST(amountST)>>]{OST}{Staker}
\end{umlcall}
% emit rejectStakeRequest event
\begin{umlcall}[type=return, op={: emit StakeRequestReverted(staker, OST(amountST))}]{OST}{Facilitator}
\end{umlcall}
\end{umlcall}
\end{umlcall}
\end{umlfragment}
%%%
%%% OpenST Mosaic (OstDotCom reports respective state root of )
%%%
\begin{umlfragment}[type=loop, name=mosaic]
\begin{umlcall}[dt=205, op={new block(blockHeightUC, stateRootUC)}]{Utility Chain}{OstDotCom}
% OstDotCom report state root of Utility chain on CoreUC on value chain
%\begin{umlcall}[op={: report(UC, blockHeight, stateRoot)}]{OstDotCom}{RegistrarVC}
\begin{umlcall}[op={: commitStateRoot(blockHeightUC, stateRootUC)}]{OstDotCom}{CoreUC}
% make sure block height is greater than the lastestStateRootBlockHeight previously stored
\begin{umlcallself}[dt= 8, op={require(blockHeightUC > latestSateRootBlockHeightUC)}]{CoreUC}
\end{umlcallself}
% store the state root in the contract's mapping
\begin{umlcallself}[dt=7,op={store latest valid stateRootUC}]{CoreUC}
\end{umlcallself}
% state root of utility chain got reported on value chain
\begin{umlcall}[dt=13, type=return, op={emit StateRootCommitted(blockHeightUC, stateRootUC)}]{CoreUC}{Facilitator}
\end{umlcall}
\end{umlcall}
%\end{umlcall}
\end{umlcall}
% OstDotCom report state root of Value chain on CoreVC on utility chain
\begin{umlcall}[dt=212, op={new block(blockHeightVC, stateRootVC)}]{Value Chain}{OstDotCom}
%\begin{umlcall}[op={: report(VC, blockHeight, stateRoot}]{OstDotCom}{RegistrarUC}
% reporting through registrar is instant commit
\begin{umlcall}[op={: commitStateRoot(blockHeightVC, stateRootVC)}]{OstDotCom}{CoreVC}
% make sure block height is greater than the lastestStateRootBlockHeight previously stored
\begin{umlcallself}[op={require(blockHeightVC > latestSateRootBlockHeightVC)}]{CoreVC}
\end{umlcallself}
% store the state root in the contract's mapping
\begin{umlcallself}[op={store latest valid stateRootVC}]{CoreVC}
\end{umlcallself}
% state root of value chain got reported on utility chain
\begin{umlcall}[dt=10, type=return, op={emit StateRootCommitted(blockHeightVC, stateRootVC)}]{CoreVC}{Facilitator}
\end{umlcall}
\end{umlcall}
%\end{umlcall}
\end{umlcall}
\end{umlfragment}
%%%
%%% Facilitator has observed a committed state root that includes the StakingIntentHash
%%%
% Facilitator submits claim for StakingIntentHash by presenting Merkle proof
\begin{umlcall}[dt=15, op={: proveOpenST(blockHeightVC, rlpEncodedAccount, rlpParentNodes)}, return={emit OpenSTProven(blockHeightVC, storageRootVC, hashedAccount)}]{Facilitator}{CoreVC}
% OpenSTUtility checks StakingIntentHash against committed state root
% \begin{umlcall}[op={: getStateRoot(blockHeight)}, return={stateRoot @ blockHeight}]{OpenSTUtility}{CoreVC}
%\end{umlcall}
% OpenSTUtility validate merkle proof
\begin{umlcallself}[op={require(verify(hashedAccount, encodedOpenSTRemotePath, rlpParentNodes, stateRootVC))}]{CoreVC}
\end{umlcallself}
% OpenSTUtility store valid StakingIntentHash
\begin{umlcallself}[op={store latest valid storageRootVC}]{CoreVC}
\end{umlcallself}
\end{umlcall}
\umlnote[x=31, y=-28, width=180]{mosaic}{Placeholder for OpenST Mosaic game}
% Facilitator submits pre-image data for StakingIntentHash
\begin{umlcall}[dt=15, op={: confirmStakingIntent(uuid, staker, stakerNonce, beneficiary, amountST, amountUT, stakingUnlockHeight, hashLock, rlpParentNodes)}, return={emit StakingIntentConfirmed(stakingIntentHash, staker, beneficiary, amountST, amountUT, expirationHeight, blockHeight, storageRootVC)}]{Facilitator}{OpenSTUtility}
% check stake nonces of staker is less than staker nonce
% \begin{umlcallself}[op={require(nonces[staker] < stakerNonce)}]{OpenSTUtility}
% \end{umlcallself}
% check stakingUnlockHeight is greater than 0
\begin{umlcallself}[op={require(stakingUnlockHeight > 0)}]{OpenSTUtility}
\end{umlcallself}
% calculate the stakingIntentHash
%\begin{umlcallself}[op={stakingIntentHash = H(uuid, staker, stakerNonce, beneficiary, amountST, amountUT, stakingUnlockHeight, hashLock)}]{OpenSTUtility}
%\end{umlcallself}
% check calculated stakingIntentHash is equal to provided stakingIntentHash
\begin{umlcallself}[op={require(merkleVerificationOfStake(StakingIntentHash, rlpParentNodes, storageRoot)}]{OpenSTUtility}
\end{umlcallself}
%store the mints struct
\begin{umlcallself}[op={store mints[stakingIntentHash]}]{OpenSTUtility}
\end{umlcallself}
% OpenSTUtility asserts valid pre-image data for StakingIntentHash
%\begin{umlcallself}[op={assert valid pre-image data}]{OpenSTUtility}
%\end{umlcallself}
% OpenSTUtility checks StakingIntentHash against committed state root
%\begin{umlcall}[op={: getLatestHeight()}, return={latestHeight}]{OpenSTUtility}{CoreVC}
%\end{umlcall}
% OpenSTUtility asserts grace period before unlockHeight
%\begin{umlcallself}[op={assert grace period before unlockHeight}]{OpenSTUtility}
%\end{umlcallself}
% OpenSTUtility store mint object with StakingIntentHash and expiration Height
%\begin{umlcallself}[op={store mint with expirationHeight}]{OpenSTUtility}
%\end{umlcallself}
\end{umlcall}
%%%
%%% Facilitator has moved both value and utility chain to the first stage
%%% and can now either proceed or revert by revealing the hash lock secret or await timeout
%%%
% Correctly initialised staking information on both systems
\begin{umlfragment}[type=alt, label=proceed, name=phasetwo, inner xsep=1.5]
\begin{umlcall}[dt=8, fill=green!20, op={: processStaking(StakingIntentHash, unlockSecret)}]{Facilitator}{Branded Token Gateway}
% check hashlock
\begin{umlcallself}[op={require(H(unlockSecret) = hashLock)}]{Branded Token Gateway}
\end{umlcallself}
% check msg.sender = worker
%\begin{umlcallself}[op={require(msg.sender = worker)}]{Branded Token Gateway}
%\end{umlcallself}
% Facilitator first calls processStaking to ensure the bounty is return to him
\begin{umlcall}[dt=4, op={: processStaking(StakingIntentHash, unlockSecret)}]{Branded Token Gateway}{OpenSTValue}
% check stakingAccount
%\begin{umlcallself}[op={require(msg.sender = gateway = stakingAccount || stakingAccount = 0x)}]{OpenSTValue}
%\end{umlcallself}
% check hashlock
\begin{umlcallself}[op={require(H(unlockSecret) = hashLock)}]{OpenSTValue}
\end{umlcallself}
% transfer amount OST to SimpleStake
\begin{umlcall}[fill=green!20, op={: transfer(simpleStake, amountST)}]{OpenSTValue}{OST}
\begin{umlcall}[dt=5, fill=green!20, type=return, op={<<OST(amountST)>>}]{OST}{SimpleStake}
\end{umlcall}
\end{umlcall}
\begin{umlcall}[dt=5, type=return, op={emit ProcessedStake(StakingIntentHash, staker, amountST, amountUT, unlockSecret)}]{OpenSTValue}{Hunter}
\end{umlcall}
\end{umlcall}
% transfer bounty from gateway to facilitator
\begin{umlcall}[dt=5, fill=green!20, op={: transfer(facilitator, bounty)}]{Branded Token Gateway}{OST}
\begin{umlcall}[fill=green!20, type=return, op={<<OST(bounty)>>}]{OST}{Facilitator}
% \begin{umlcall}[fill=green!20, type=return, op={<<OST(bounty)>>}]{Workers}{Facilitator}
%\end{umlcall}
\end{umlcall}
\end{umlcall}
\end{umlcall}
% Facilitator then calls processMinting to mint the utility tokens
\begin{umlcall}[op={: processMinting(StakingIntentHash, unlockSecret)}]{Facilitator}{OpenSTUtility}
% check hashlock
\begin{umlcallself}[op={require(H(unlockSecret) = hashLock)}]{OpenSTUtility}
\end{umlcallself}
% require mint not yet expired
\begin{umlcallself}[op={require(expirationHeight > block.number)}]{OpenSTUtility}
\end{umlcallself}
% mint Branded Tokens
\begin{umlcall}[fill=green!20, op={: mint(beneficiary, amountUT)}]{OpenSTUtility}{Branded Token}
% store claim
\begin{umlcallself}[op={store claim}]{Branded Token}
\end{umlcallself}
\end{umlcall}
\begin{umlcall}[type=return, op={emit ProcessedMint(uuid, StakingIntentHash, tokenAddress, staker, beneficiary, amountUT, unlockSecret)}]{OpenSTUtility}{Hunter}
\end{umlcall}
\end{umlcall}
% Facilitator (or anyone) can call claim to transfer UT to token holder
\begin{umlcall}[dt=5, op={: claim(beneficiary)}]{Facilitator}{Branded Token}
\begin{umlcall}[fill=green!20, type=return, op={<<UT(amountUT)>>}]{Branded Token}{Beneficiary}
\end{umlcall}
\end{umlcall}
%%%
%%% Hunter
%%%
\begin{umlfragment}[type=opt, name=bounty, label={ensure completion}, inner xsep=15]
% process staking on gateway
\begin{umlcall}[dt=20, fill=green!20, op={: processStaking(StakingIntentHash, unlockSecret)}]{Hunter}{Branded Token Gateway}
% check hashlock
\begin{umlcallself}[op={require(H(unlockSecret = hashLock)}]{Branded Token Gateway}
\end{umlcallself}
% check msg.sender = worker
% \begin{umlcallself}[op={require(msg.sender = worker)}]{Branded Token Gateway}
%\end{umlcallself}
% Facilitator first calls processStaking to ensure the bounty is return to him
\begin{umlcall}[dt=4, op={: processStaking(StakingIntentHash, unlockSecret)}]{Branded Token Gateway}{OpenSTValue}
% check stakingAccount
% \begin{umlcallself}[op={require(msg.sender = gateway = stakingAccount || stakingAccount = 0x)}]{OpenSTValue}
%\end{umlcallself}
% check hashlock
\begin{umlcallself}[op={require(H(unlockSecret) = hashLock)}]{OpenSTValue}
\end{umlcallself}
% transfer amount OST to SimpleStake
\begin{umlcall}[fill=green!20, op={: transfer(simpleStake, amountST)}]{OpenSTValue}{OST}
\begin{umlcall}[dt=5, fill=green!20, type=return, op={<<OST(amountST)>>}]{OST}{SimpleStake}
\end{umlcall}
\end{umlcall}
\begin{umlcall}[dt=5, type=return, op={emit ProcessedStake(StakingIntentHash, simpleStake, staker, amountST, amountUT, unlockSecret)}]{OpenSTValue}{Facilitator}
\end{umlcall}
\end{umlcall}
% transfer bounty from gateway to facilitator
\begin{umlcall}[dt=5, fill=green!20, op={: transfer(hunter, bounty)}]{Branded Token Gateway}{OST}
\begin{umlcall}[fill=green!20, type=return, op={<<OST(bounty)>>}]{OST}{Hunter}
\end{umlcall}
\end{umlcall}
\end{umlcall}
\end{umlfragment}
\umlnote[x=12, y=-66, width=150]{bounty}{If stake was left unprocessed, unlock secret is known through mint. Bounty is always transferred to msg.sender of processStaking()}
\umlfpart[revert]
\begin{umlcall}[dt=20, fill=green!20, op={: revertStaking(StakingIntentHash)}]{Facilitator}{Branded Token Gateway}
% check unlockHeight is in the past
\begin{umlcallself}[op={require(unlockHeight <= block.number)}]{Branded Token Gateway}
\end{umlcallself}
% check msg.sender = worker
%\begin{umlcallself}[op={require(msg.sender = worker)}]{Branded Token Gateway}
%\end{umlcallself}
% Facilitator reverts stake to get back amount and bounty
\begin{umlcall}[fill=green!20, op={: revertStaking(StakingIntentHash)}]{Branded Token Gateway}{OpenSTValue}
% check stakingAccount
% \begin{umlcallself}[op={require(msg.sender = gateway = stakingAccount || stakingAccount = 0x)}]{OpenSTValue}
%\end{umlcallself}
% check unlockHeight
\begin{umlcallself}[op={require(unlockHeight <= block.number)}]{OpenSTValue}
\end{umlcallself}
% transfer amount and bounty OST to facilitator
\begin{umlcall}[fill=green!20, op={: transfer(staker, amountST)}]{OpenSTValue}{OST}
\begin{umlcall}[fill=green!20, type=return, op={<<OST(amountST)>>}]{OST}{Staker}
\end{umlcall}
\begin{umlcall}[dt=10, type=return, op={emit RevertedStake(StakingIntentHash, staker, amountST, amountUT)}]{OpenSTValue}{Facilitator}
\end{umlcall}
\end{umlcall}
\end{umlcall}
% transfer bounty from gateway to facilitator
\begin{umlcall}[dt=5, fill=green!20, op={: transfer(facilitator, bounty)}]{Branded Token Gateway}{OST}
\begin{umlcall}[fill=green!20, type=return, op={<<OST(bounty)>>}]{OST}{Facilitator}
%\begin{umlcall}[fill=green!20, type=return, op={<<OST(bounty)>>}]{Workers}{Facilitator}
%\end{umlcall}
\end{umlcall}
\end{umlcall}
\end{umlcall}
\begin{umlcall}[op={: revertMinting(StakingIntentHash)}]{Facilitator}{OpenSTUtility}
% require mint not yet expired
\begin{umlcallself}[op={require(expirationHeight <= block.number)}]{OpenSTUtility}
\end{umlcallself}
\begin{umlcallself}[op={delete mints[StakingIntentHash]}]{OpenSTUtility}
\end{umlcallself}
\begin{umlcall}[dt=5, type=return, op={emit RevertedMint(uuid, staker, beneficiary, amountUT)}]{OpenSTUtility}{Facilitator}
\end{umlcall}
\end{umlcall}
\end{umlfragment}
\umlnote[x=1, y=-55, width=100]{phasetwo}{With both value chain and utility chain configured correctly for StakingIntentHash, Facilitator can proceed by revealing the unlock secret, or revert by awaiting the unlock height }
\umlnote[x=35, y=-102, width=120]{phasetwo}{Any actor can call revertStaking and the bounty is returned to the facilitator}
\end{umlseqdiag}
\end{tikzpicture}
\begin{minipage}[b]{0.55\linewidth}
\Huge \color{NavyBlue} \textbf{OpenST Protocol v0.9.3 } \color{Black}\\ % Title
\huge\textit{sequence diagrams for redeem and unstake - Benjamin Bollen \& Pranay Valson, last edit \today}\\[1cm] % Subtitle
\end{minipage}
\begin{tikzpicture}
\begin{umlseqdiag}
\umlactor[class=Address]{Beneficiary}
\umlactor[class=Worker]{Facilitator}
\umlactor[class=Worker]{Hunter}
\umlboundary[class=ERC20]{OST}
\umlboundary[class=SK]{Branded Token Gateway}
\umlcontrol[class=SK]{OpenSTValue}
\umlobject[class=SK]{SimpleStake}
\umlobject[class=SK]{CoreUC}
%\umlcontrol[class=SK]{RegistrarVC}
\umlboundary[class=Web3]{Value Chain}
\umlactor[class=Worker, fill=purple!40]{OstDotCom}
\umlboundary[class=Web3, fill=blue!40]{Utility Chain}
%\umlcontrol[class=SK, fill=blue!40]{RegistrarUC}
\umlobject[class=SK, fill=blue!40]{CoreVC}
\umlcontrol[class=SK, fill=blue!40]{OpenSTUtility}
\umlboundary[class=ERC20, fill=blue!40]{Branded Token}
\umlactor[class=Address, fill=blue!40]{Redeemer} %Token Holder contract in v0.9.4
%%%
%%% redeemer initiates redeem
%%%
%start with redeem process on the VC side
% redeemer sets allowance for branded token with OpenSTUtility
\begin{umlcall}[dt =7, op={: approve(OpenSTUtility, amountUT)}]{Redeemer}{Branded Token}
\end{umlcall}
% redeemer calls redeem on OpenSTUtility
\begin{umlfragment}[type=alt, label=redeem, name=policy]
\begin{umlcall}[dt=10, op={: redeem(uuid, amountUT, nonce, beneficiary, hashLock)}]{Redeemer}{OpenSTUtility}
%require that allowance is there for OSTU for amount UT
\begin{umlcallself}[op={require (allowance(redeemer, OpenSTUtility) >= amountUT)}]{OpenSTUtility}
\end{umlcallself}
% transferfrom is called from openstutility on branded token
\begin{umlcall}[fill=green!20,op={: transferFrom(redeemer, OpenSTUtility, amountUT)}]{OpenSTUtility}{Branded Token}
% ut is taken from redeemer to branded token
\begin{umlcall}[dt=15,fill=green!20, type=return, op={<<UT(amountUT)>>}]{Redeemer}{Branded Token}
\end{umlcall}
% ut is the then transfferred to openstutility from branded token (which keeps balance)
\begin{umlcall}[dt=14,fill=green!20, type=return, op={<<UT(amountUT)>>}]{Branded Token}{OpenSTUtility}
\end{umlcall}
% store the intents in the storage
\begin{umlcallself}[dt=15, op={store intents[H(redeemer, nonce)] = redemptionIntentHash}]{OpenSTUtility}
\end{umlcallself}
% event is emitted with redemptionintentHash which the worker can listen to
\begin{umlcall}[type=return, op={emit RedemptionIntentDeclared(uuid, redemptionIntentHash, brandedToken, redeemer, nonce, beneficiary, amountUT, unlockHeight)}]{OpenSTUtility}{OstDotCom}
\end{umlcall}
%event can be heard by Redeemer
%\begin{umlcall}[type=return, op={emit RedemptionIntentDeclared}]{OpenSTUtility}{Redeemer}
%\end{umlcall}
\end{umlcall}
\end{umlcall}
\end{umlfragment} % Facilitator submits claim for StakingIntentHash by presenting Merkle proof
\begin{umlcall}[dt=90, op={: proveOpenST(blockHeightUC, rlpEncodedAccount, rlpParentNodes)}, return={emit OpenSTProven(blockHeightUC, storageRootUC, hashedAccount)}]{Facilitator}{CoreUC}
% OpenSTUtility checks StakingIntentHash against committed state root
% \begin{umlcall}[op={: getStateRoot(blockHeight)}, return={stateRoot @ blockHeight}]{OpenSTUtility}{CoreVC}
%\end{umlcall}
% OpenSTUtility validate merkle proof
\begin{umlcallself}[op={require(verify(hashedAccount, encodedOpenSTRemotePath, rlpParentNodes, stateRootUC))}]{CoreUC}
\end{umlcallself}
% OpenSTUtility store valid StakingIntentHash
\begin{umlcallself}[op={store latest valid storageRootUC}]{CoreUC}
\end{umlcallself}
\end{umlcall}
% confirm redemption intent begins on UC side
% beneficiary on VC side calls confirm redemption intent on openstvalue
\begin{umlcall}[dt= 45, op={: confirmRedemptionIntent(redeemer, redeemerNonce, beneficiary, amountUT, redemptionUnlockHeight, blockHeight, hashLock, rlpParentNodes)}]{OstDotCom}{OpenSTValue}
%require redemption unlock height > 0
\begin{umlcallself}[op={require(expirationHeight <= block.number)}]{OpenSTValue}
\end{umlcallself}
%require that the redeemer nonce +1 == redeemer nonce passed
% \begin{umlcallself}[op={require(nonces[redeemer] +1 = redeemerNonce)}]{OpenSTValue}
%\end{umlcallself}
%calculate amountST from amountUT
\begin{umlcallself}[op={calculate amountST = (amountUT*conversionRate)}]{OpenSTValue}
\end{umlcallself}
%make sure there is more balance than what is calculated
%\begin{umlcallself}[op= {require balanceOf(UT.SimpleStake)>= amountST}]{OpenSTValue}
%\end{umlcallself}
%require verify Redemption hash storage
\begin{umlcallself}[op={require verifyRedemptionIntentHashStorage(uuid, redeemer, redeemerNonce, blockHeight, redemptionIntenHash, rlpParentNodes)}]{OpenSTValue}
\end{umlcallself}
% get storage root from UC side
\begin{umlcall}[op= {: getStorageRoot(blockHeight)}, return = {storageRoot}]{OpenSTValue}{CoreVC}
\end{umlcall}
%verify intent storage on VC side
\begin{umlcallself}[op= {require verifyIntentStorage(redeemer, redeemerNonce, storageRoot, redemptionIntentHash, rlpParentNodes)}]{OpenSTValue}
\end{umlcallself}
% store the unstakes in the unstakes struct with the redemptionhash as key
\begin{umlcallself}[op={store unstakes[redemptionIntentHash]}]{OpenSTValue}
\end{umlcallself}
%emit the remeption Intent confirmed event
\begin{umlcall}[type= return, op={emit RedemptionIntentConfirmed(redemptionIntentHash, redeemer, beneificary, amountST, amountUT, expirationHeight )}]{OpenSTValue}{OstDotCom}
%emit the remeption Intent confirmed event
%\begin{umlcall}[type= return]{OpenSTValue}{Utility Chain}
%\end{umlcall}
\end{umlcall}
\end{umlcall}
\begin{umlfragment}[type=alt, label=proceed, name=policy, inner xsep=2]
% process redeem should begin now on UC side
% redeemer calls on process redeeming on openstutility
\begin{umlcall}[dt=100, op={: processRedeeming(redemptionIntentHash, unlockSecret)}]{Redeemer}{OpenSTUtility}
% require hashlock is equal to hash of unlock secret
\begin{umlcallself}[op={require (hashLock = H(unlockSecret))}]{OpenSTUtility}
\end{umlcallself}
% burn the UT tokens
\begin{umlcall}[fill=green!20, op={: burn(redeemer, amountUT)}]{OpenSTUtility}{Branded Token}
\end{umlcall}
%emit the process Redemption event
\begin{umlcall}[type= return, op={emit ProcessedRedemption(uuid, brandedToken, redeemer, beneficiary, amountUT, unlockSecret)}]{OpenSTUtility}{OstDotCom}
\end{umlcall}
%delete the redemptions struct from storage
\begin{umlcallself}[op={delete redemptions[redemptionIntentHash])}]{OpenSTUtility}
\end{umlcallself}
%delete the redemptions struct from storage
\begin{umlcallself}[op={delete intents[H(redeemer, nonce)]}]{OpenSTUtility}
\end{umlcallself}
\end{umlcall}
%revert unstaking could be initiatied from the redeemer side
% beneficiary calls on processUnstaking with redempition intent hash
\begin{umlcall}[dt= 38, op={: processUnstaking(redemptionIntentHash, unlockSecret)}]{OstDotCom}{OpenSTValue}
% require that the hashlock is the same
\begin{umlcallself}[op={require(H(unlockSecret)= hashLock)}]{OpenSTValue}
\end{umlcallself}
%expiraton height reqiure
\begin{umlcallself}[op={require unstake expirationHeight > block number}]{OpenSTValue}
\end{umlcallself}
%call on simple stake from openSTValue to relase funds
\begin{umlcall}[op={: releaseTo(beneficiary, amountST)}]{OpenSTValue}{SimpleStake}
%release to transfers funds to Beneficiary
\begin{umlcall}[op={: transfer(beneficiary, amountST)}]{SimpleStake}{OST}
\end{umlcall}
%transfer the amount
\begin{umlcall}[type=return, fill=green!20, op={<<OST(amountST)>>}]{SimpleStake}{OST}
\begin{umlcall}[type=return, fill=green!20, op={<<OST(amountST)>>}]{OST}{Beneficiary}
\end{umlcall}
%emit event Processed Unstake
\begin{umlcall}[dt= 21, type=return, op={emit ProcessedUnstake(uuid, redemptionIntentHash, stakeAddress, redeemer, beneficiary, amountST, unlockSecret)}]{OpenSTValue}{OstDotCom}
\end{umlcall}
%\begin{umlcall}[type=return, op={emit ProcessedUnstake}]{OpenSTValue}{Beneficiary}
%\end{umlcall}
%delete the storage for unstakes
\begin{umlcallself}[op={delete unstakes [redemptionIntentHash]}]{OpenSTValue}
\end{umlcallself}
\end{umlcall}
\end{umlcall}
\end{umlcall}
\umlfpart[revert]
%revert unstaking could be initiatied from the redeemer side
% beneficiary calls on processUnstaking with redempition intent hash
\begin{umlcall}[dt= 10, op={: revertUnstaking(redemptionIntentHash)}]{OstDotCom}{OpenSTValue}
% require that the hashlock is the same
\begin{umlcallself}[op={require expirationHeight<= blockNumber}]{OpenSTValue}
\end{umlcallself}
%expiraton height reqiure
%\begin{umlcallself}[op={require unstake expirationHeight > block number} ]{OpenSTValue}
%\end{umlcallself}
%call on simple stake from openSTValue to relase funds
%\begin{umlcall}[op={: releaseTo(beneficiary, amountST)}]{OpenSTValue}{SimpleStake}
%delete the storage for unstakes
\begin{umlcallself}[op={delete unstakes[redemptionIntentHash]}]{OpenSTValue}
\end{umlcallself}
%release to transfers funds to Beneficiary
%\begin{umlcall}[dt=16, op={: transfer(beneficiary, amountST)}]{OpenSTValue}{SimpleStake}
%transfer the amount
%\begin{umlcall}[type=return, fill=green!20, op={<<OST(amountST)>>}]{SimpleStake}{Beneficiary}
%emit event Processed Unstake
\begin{umlcall}[type=return, dt= 10, op={emit RevertedUnstake(uuid, redemptionIntentHash, redeemer, beneficiary, amountST)}]{OpenSTValue}{OstDotCom}
\end{umlcall}
%\begin{umlcall}[type=return, dt= 10, op={emit RevertedUnstake}]{OpenSTValue}{OstDotCom}
%\end{umlcall}
%\end{umlcall}
\end{umlcall}
\end{umlfragment}
\end{umlseqdiag}
\end{tikzpicture}
\end{document}
| {
"pile_set_name": "Github"
} |
// +build !ignore_autogenerated
/*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// This file was autogenerated by deepcopy-gen. Do not edit it manually!
package v1beta1
import (
core_v1 "k8s.io/api/core/v1"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
runtime "k8s.io/apimachinery/pkg/runtime"
intstr "k8s.io/apimachinery/pkg/util/intstr"
)
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ControllerRevision) DeepCopyInto(out *ControllerRevision) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ObjectMeta.DeepCopyInto(&out.ObjectMeta)
in.Data.DeepCopyInto(&out.Data)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ControllerRevision.
func (in *ControllerRevision) DeepCopy() *ControllerRevision {
if in == nil {
return nil
}
out := new(ControllerRevision)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *ControllerRevision) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
} else {
return nil
}
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ControllerRevisionList) DeepCopyInto(out *ControllerRevisionList) {
*out = *in
out.TypeMeta = in.TypeMeta
out.ListMeta = in.ListMeta
if in.Items != nil {
in, out := &in.Items, &out.Items
*out = make([]ControllerRevision, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ControllerRevisionList.
func (in *ControllerRevisionList) DeepCopy() *ControllerRevisionList {
if in == nil {
return nil
}
out := new(ControllerRevisionList)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *ControllerRevisionList) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
} else {
return nil
}
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *Deployment) DeepCopyInto(out *Deployment) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ObjectMeta.DeepCopyInto(&out.ObjectMeta)
in.Spec.DeepCopyInto(&out.Spec)
in.Status.DeepCopyInto(&out.Status)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Deployment.
func (in *Deployment) DeepCopy() *Deployment {
if in == nil {
return nil
}
out := new(Deployment)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *Deployment) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
} else {
return nil
}
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *DeploymentCondition) DeepCopyInto(out *DeploymentCondition) {
*out = *in
in.LastUpdateTime.DeepCopyInto(&out.LastUpdateTime)
in.LastTransitionTime.DeepCopyInto(&out.LastTransitionTime)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DeploymentCondition.
func (in *DeploymentCondition) DeepCopy() *DeploymentCondition {
if in == nil {
return nil
}
out := new(DeploymentCondition)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *DeploymentList) DeepCopyInto(out *DeploymentList) {
*out = *in
out.TypeMeta = in.TypeMeta
out.ListMeta = in.ListMeta
if in.Items != nil {
in, out := &in.Items, &out.Items
*out = make([]Deployment, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DeploymentList.
func (in *DeploymentList) DeepCopy() *DeploymentList {
if in == nil {
return nil
}
out := new(DeploymentList)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *DeploymentList) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
} else {
return nil
}
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *DeploymentRollback) DeepCopyInto(out *DeploymentRollback) {
*out = *in
out.TypeMeta = in.TypeMeta
if in.UpdatedAnnotations != nil {
in, out := &in.UpdatedAnnotations, &out.UpdatedAnnotations
*out = make(map[string]string, len(*in))
for key, val := range *in {
(*out)[key] = val
}
}
out.RollbackTo = in.RollbackTo
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DeploymentRollback.
func (in *DeploymentRollback) DeepCopy() *DeploymentRollback {
if in == nil {
return nil
}
out := new(DeploymentRollback)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *DeploymentRollback) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
} else {
return nil
}
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *DeploymentSpec) DeepCopyInto(out *DeploymentSpec) {
*out = *in
if in.Replicas != nil {
in, out := &in.Replicas, &out.Replicas
if *in == nil {
*out = nil
} else {
*out = new(int32)
**out = **in
}
}
if in.Selector != nil {
in, out := &in.Selector, &out.Selector
if *in == nil {
*out = nil
} else {
*out = new(v1.LabelSelector)
(*in).DeepCopyInto(*out)
}
}
in.Template.DeepCopyInto(&out.Template)
in.Strategy.DeepCopyInto(&out.Strategy)
if in.RevisionHistoryLimit != nil {
in, out := &in.RevisionHistoryLimit, &out.RevisionHistoryLimit
if *in == nil {
*out = nil
} else {
*out = new(int32)
**out = **in
}
}
if in.RollbackTo != nil {
in, out := &in.RollbackTo, &out.RollbackTo
if *in == nil {
*out = nil
} else {
*out = new(RollbackConfig)
**out = **in
}
}
if in.ProgressDeadlineSeconds != nil {
in, out := &in.ProgressDeadlineSeconds, &out.ProgressDeadlineSeconds
if *in == nil {
*out = nil
} else {
*out = new(int32)
**out = **in
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DeploymentSpec.
func (in *DeploymentSpec) DeepCopy() *DeploymentSpec {
if in == nil {
return nil
}
out := new(DeploymentSpec)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *DeploymentStatus) DeepCopyInto(out *DeploymentStatus) {
*out = *in
if in.Conditions != nil {
in, out := &in.Conditions, &out.Conditions
*out = make([]DeploymentCondition, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
if in.CollisionCount != nil {
in, out := &in.CollisionCount, &out.CollisionCount
if *in == nil {
*out = nil
} else {
*out = new(int32)
**out = **in
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DeploymentStatus.
func (in *DeploymentStatus) DeepCopy() *DeploymentStatus {
if in == nil {
return nil
}
out := new(DeploymentStatus)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *DeploymentStrategy) DeepCopyInto(out *DeploymentStrategy) {
*out = *in
if in.RollingUpdate != nil {
in, out := &in.RollingUpdate, &out.RollingUpdate
if *in == nil {
*out = nil
} else {
*out = new(RollingUpdateDeployment)
(*in).DeepCopyInto(*out)
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DeploymentStrategy.
func (in *DeploymentStrategy) DeepCopy() *DeploymentStrategy {
if in == nil {
return nil
}
out := new(DeploymentStrategy)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *RollbackConfig) DeepCopyInto(out *RollbackConfig) {
*out = *in
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RollbackConfig.
func (in *RollbackConfig) DeepCopy() *RollbackConfig {
if in == nil {
return nil
}
out := new(RollbackConfig)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *RollingUpdateDeployment) DeepCopyInto(out *RollingUpdateDeployment) {
*out = *in
if in.MaxUnavailable != nil {
in, out := &in.MaxUnavailable, &out.MaxUnavailable
if *in == nil {
*out = nil
} else {
*out = new(intstr.IntOrString)
**out = **in
}
}
if in.MaxSurge != nil {
in, out := &in.MaxSurge, &out.MaxSurge
if *in == nil {
*out = nil
} else {
*out = new(intstr.IntOrString)
**out = **in
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RollingUpdateDeployment.
func (in *RollingUpdateDeployment) DeepCopy() *RollingUpdateDeployment {
if in == nil {
return nil
}
out := new(RollingUpdateDeployment)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *RollingUpdateStatefulSetStrategy) DeepCopyInto(out *RollingUpdateStatefulSetStrategy) {
*out = *in
if in.Partition != nil {
in, out := &in.Partition, &out.Partition
if *in == nil {
*out = nil
} else {
*out = new(int32)
**out = **in
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RollingUpdateStatefulSetStrategy.
func (in *RollingUpdateStatefulSetStrategy) DeepCopy() *RollingUpdateStatefulSetStrategy {
if in == nil {
return nil
}
out := new(RollingUpdateStatefulSetStrategy)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *Scale) DeepCopyInto(out *Scale) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ObjectMeta.DeepCopyInto(&out.ObjectMeta)
out.Spec = in.Spec
in.Status.DeepCopyInto(&out.Status)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Scale.
func (in *Scale) DeepCopy() *Scale {
if in == nil {
return nil
}
out := new(Scale)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *Scale) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
} else {
return nil
}
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ScaleSpec) DeepCopyInto(out *ScaleSpec) {
*out = *in
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScaleSpec.
func (in *ScaleSpec) DeepCopy() *ScaleSpec {
if in == nil {
return nil
}
out := new(ScaleSpec)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ScaleStatus) DeepCopyInto(out *ScaleStatus) {
*out = *in
if in.Selector != nil {
in, out := &in.Selector, &out.Selector
*out = make(map[string]string, len(*in))
for key, val := range *in {
(*out)[key] = val
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScaleStatus.
func (in *ScaleStatus) DeepCopy() *ScaleStatus {
if in == nil {
return nil
}
out := new(ScaleStatus)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *StatefulSet) DeepCopyInto(out *StatefulSet) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ObjectMeta.DeepCopyInto(&out.ObjectMeta)
in.Spec.DeepCopyInto(&out.Spec)
in.Status.DeepCopyInto(&out.Status)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StatefulSet.
func (in *StatefulSet) DeepCopy() *StatefulSet {
if in == nil {
return nil
}
out := new(StatefulSet)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *StatefulSet) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
} else {
return nil
}
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *StatefulSetCondition) DeepCopyInto(out *StatefulSetCondition) {
*out = *in
in.LastTransitionTime.DeepCopyInto(&out.LastTransitionTime)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StatefulSetCondition.
func (in *StatefulSetCondition) DeepCopy() *StatefulSetCondition {
if in == nil {
return nil
}
out := new(StatefulSetCondition)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *StatefulSetList) DeepCopyInto(out *StatefulSetList) {
*out = *in
out.TypeMeta = in.TypeMeta
out.ListMeta = in.ListMeta
if in.Items != nil {
in, out := &in.Items, &out.Items
*out = make([]StatefulSet, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StatefulSetList.
func (in *StatefulSetList) DeepCopy() *StatefulSetList {
if in == nil {
return nil
}
out := new(StatefulSetList)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *StatefulSetList) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
} else {
return nil
}
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *StatefulSetSpec) DeepCopyInto(out *StatefulSetSpec) {
*out = *in
if in.Replicas != nil {
in, out := &in.Replicas, &out.Replicas
if *in == nil {
*out = nil
} else {
*out = new(int32)
**out = **in
}
}
if in.Selector != nil {
in, out := &in.Selector, &out.Selector
if *in == nil {
*out = nil
} else {
*out = new(v1.LabelSelector)
(*in).DeepCopyInto(*out)
}
}
in.Template.DeepCopyInto(&out.Template)
if in.VolumeClaimTemplates != nil {
in, out := &in.VolumeClaimTemplates, &out.VolumeClaimTemplates
*out = make([]core_v1.PersistentVolumeClaim, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
in.UpdateStrategy.DeepCopyInto(&out.UpdateStrategy)
if in.RevisionHistoryLimit != nil {
in, out := &in.RevisionHistoryLimit, &out.RevisionHistoryLimit
if *in == nil {
*out = nil
} else {
*out = new(int32)
**out = **in
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StatefulSetSpec.
func (in *StatefulSetSpec) DeepCopy() *StatefulSetSpec {
if in == nil {
return nil
}
out := new(StatefulSetSpec)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *StatefulSetStatus) DeepCopyInto(out *StatefulSetStatus) {
*out = *in
if in.ObservedGeneration != nil {
in, out := &in.ObservedGeneration, &out.ObservedGeneration
if *in == nil {
*out = nil
} else {
*out = new(int64)
**out = **in
}
}
if in.CollisionCount != nil {
in, out := &in.CollisionCount, &out.CollisionCount
if *in == nil {
*out = nil
} else {
*out = new(int32)
**out = **in
}
}
if in.Conditions != nil {
in, out := &in.Conditions, &out.Conditions
*out = make([]StatefulSetCondition, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StatefulSetStatus.
func (in *StatefulSetStatus) DeepCopy() *StatefulSetStatus {
if in == nil {
return nil
}
out := new(StatefulSetStatus)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *StatefulSetUpdateStrategy) DeepCopyInto(out *StatefulSetUpdateStrategy) {
*out = *in
if in.RollingUpdate != nil {
in, out := &in.RollingUpdate, &out.RollingUpdate
if *in == nil {
*out = nil
} else {
*out = new(RollingUpdateStatefulSetStrategy)
(*in).DeepCopyInto(*out)
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StatefulSetUpdateStrategy.
func (in *StatefulSetUpdateStrategy) DeepCopy() *StatefulSetUpdateStrategy {
if in == nil {
return nil
}
out := new(StatefulSetUpdateStrategy)
in.DeepCopyInto(out)
return out
}
| {
"pile_set_name": "Github"
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.olingo.server.core.serializer.utils;
import org.apache.olingo.commons.api.format.ContentType;
public class ContentTypeHelper {
private ContentTypeHelper() {
// Private constructor for utility classes
}
public static boolean isODataMetadataNone(final ContentType contentType) {
return contentType.isCompatible(ContentType.APPLICATION_JSON)
&& ContentType.VALUE_ODATA_METADATA_NONE.equalsIgnoreCase(
contentType.getParameter(ContentType.PARAMETER_ODATA_METADATA));
}
public static boolean isODataMetadataFull(final ContentType contentType) {
return contentType.isCompatible(ContentType.APPLICATION_JSON)
&& ContentType.VALUE_ODATA_METADATA_FULL.equalsIgnoreCase(
contentType.getParameter(ContentType.PARAMETER_ODATA_METADATA));
}
public static boolean isODataIEEE754Compatible(final ContentType contentType) {
return Boolean.TRUE.toString().equalsIgnoreCase(
contentType.getParameter(ContentType.PARAMETER_IEEE754_COMPATIBLE));
}
}
| {
"pile_set_name": "Github"
} |
#服务器端口
server:
port: 7007
#数据源配置
spring:
datasource:
url: ${blade.datasource.test.url}
username: ${blade.datasource.test.username}
password: ${blade.datasource.test.password}
| {
"pile_set_name": "Github"
} |
var baseIteratee = require('./_baseIteratee'),
baseMean = require('./_baseMean');
/**
* This method is like `_.mean` except that it accepts `iteratee` which is
* invoked for each element in `array` to generate the value to be averaged.
* The iteratee is invoked with one argument: (value).
*
* @static
* @memberOf _
* @since 4.7.0
* @category Math
* @param {Array} array The array to iterate over.
* @param {Function} [iteratee=_.identity] The iteratee invoked per element.
* @returns {number} Returns the mean.
* @example
*
* var objects = [{ 'n': 4 }, { 'n': 2 }, { 'n': 8 }, { 'n': 6 }];
*
* _.meanBy(objects, function(o) { return o.n; });
* // => 5
*
* // The `_.property` iteratee shorthand.
* _.meanBy(objects, 'n');
* // => 5
*/
function meanBy(array, iteratee) {
return baseMean(array, baseIteratee(iteratee, 2));
}
module.exports = meanBy;
| {
"pile_set_name": "Github"
} |
<?php
declare(strict_types=1);
namespace Matthewbdaly\SMS\Drivers;
use GuzzleHttp\ClientInterface as GuzzleClient;
use GuzzleHttp\Exception\ClientException;
use GuzzleHttp\Exception\ConnectException;
use GuzzleHttp\Exception\RequestException;
use GuzzleHttp\Exception\ServerException;
use Psr\Http\Message\ResponseInterface;
use Matthewbdaly\SMS\Contracts\Driver;
use Matthewbdaly\SMS\Exceptions\DriverNotConfiguredException;
/**
* Driver for Twilio.
*/
class Twilio implements Driver
{
/**
* Guzzle client.
*
* @var
*/
protected $client;
/**
* Guzzle response.
*
* @var
*/
protected $response;
/**
* Account ID.
*
* @var
*/
private $accountId;
/**
* API Token.
*
* @var
*/
private $apiToken;
/**
* Constructor.
*
* @param GuzzleClient $client The Guzzle Client instance.
* @param ResponseInterface $response The response instance.
* @param array $config The configuration array.
* @throws DriverNotConfiguredException Driver not configured correctly.
*
* @return void
*/
public function __construct(GuzzleClient $client, ResponseInterface $response, array $config)
{
$this->client = $client;
$this->response = $response;
if (! array_key_exists('account_id', $config) || ! array_key_exists('api_token', $config)) {
throw new DriverNotConfiguredException();
}
$this->accountId = $config['account_id'];
$this->apiToken = $config['api_token'];
}
/**
* Get driver name.
*
* @return string
*/
public function getDriver(): string
{
return 'Twilio';
}
/**
* Get endpoint URL.
*
* @return string
*/
public function getEndpoint(): string
{
return "https://api.twilio.com/2010-04-01/Accounts/$this->accountId/Messages.json";
}
/**
* Send the SMS.
*
* @param array $message An array containing the message.
*
* @throws \Matthewbdaly\SMS\Exceptions\ClientException Client exception.
* @throws \Matthewbdaly\SMS\Exceptions\ServerException Server exception.
* @throws \Matthewbdaly\SMS\Exceptions\RequestException Request exception.
* @throws \Matthewbdaly\SMS\Exceptions\ConnectException Connect exception.
*
* @return boolean
*/
public function sendRequest(array $message): bool
{
try {
$cleanMessage = [];
$cleanMessage['To'] = $message['to'];
$cleanMessage['From'] = $message['from'];
$cleanMessage['Body'] = $message['content'];
$response = $this->client->request('POST', $this->getEndpoint(), [
'form_params' => $cleanMessage,
'auth' => [
$this->accountId,
$this->apiToken
]]);
} catch (ClientException $e) {
throw new \Matthewbdaly\SMS\Exceptions\ClientException();
} catch (ServerException $e) {
throw new \Matthewbdaly\SMS\Exceptions\ServerException();
} catch (ConnectException $e) {
throw new \Matthewbdaly\SMS\Exceptions\ConnectException();
} catch (RequestException $e) {
throw new \Matthewbdaly\SMS\Exceptions\RequestException();
}
return true;
}
}
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.