text
stringlengths 2
1.04M
| meta
dict |
---|---|
from .base import NewRelicBaseClient
from .exceptions import ItemAlreadyExistsError, ItemNotFoundError
from .synthetics import SyntheticsClient
class AlertClient(NewRelicBaseClient):
def __init__(
self, api_key,
base_url='https://api.newrelic.com',
timeout=10
):
super(AlertClient, self).__init__(api_key, base_url, timeout)
def get_alert_policies(self, name=None):
url = '{}/v2/alerts_policies.json'.format(self.base_url)
if name:
payload = 'filter[name]={}'.format(name)
r = self._get(
url,
headers=self.default_headers,
timeout=self.timeout,
params=payload
)
else:
r = self._get(
url,
headers=self.default_headers,
timeout=self.timeout
)
res = r.json()['policies']
return res
def create_alert_policy(
self,
policy_name,
incident_preference=None,
check_unique=True
):
# New Relic API allows creating multiple alerts policies
# with the same name. Give a possibility to disallow this in client
if check_unique:
policies = self.get_alert_policies(policy_name)
if len(policies) != 0:
raise ItemAlreadyExistsError(
'Alert policy with name "{}" already exists'
.format(policy_name)
)
url = '{}/v2/alerts_policies.json'.format(self.base_url)
payload = {
'policy': {
'name': policy_name
}
}
if incident_preference is not None:
payload['policy']['incident_preference'] = incident_preference
res = self._post(
url,
headers=self.default_headers,
timeout=self.timeout,
json=payload
)
return res.json()['policy']
def delete_alert_policy(self, policy_name):
try:
policy = self.get_alert_policies(policy_name)[0]
except IndexError:
raise ItemNotFoundError(
'Alert Policy with name "{}" not found'.format(policy_name)
)
url = '{}/v2/alerts_policies/{}.json'.format(
self.base_url, policy['id']
)
self._delete(
url,
headers=self.default_headers,
timeout=self.timeout,
)
def get_alert_conditions(self, policy_name):
try:
policy = self.get_alert_policies(policy_name)[0]
except IndexError:
raise ItemNotFoundError(
'Alert policy with name "{}" not found'.format(policy_name)
)
url = '{}/v2/alerts_synthetics_conditions.json'.format(self.base_url)
payload = 'policy_id={}'.format(policy['id'])
r = self._get(
url,
headers=self.default_headers,
timeout=self.timeout,
params=payload
)
return r.json()
def create_synthetics_alert_condition(
self,
policy_name,
condition_name,
monitor_name,
runbook_url=None,
enabled=False,
check_unique=True
):
try:
policy = self.get_alert_policies(policy_name)[0]
except IndexError:
raise ItemNotFoundError(
'Alert policy with "{}" not found'.format(policy_name)
)
synthetics = SyntheticsClient(self.api_key)
monitor = synthetics.get_monitor_by_name(monitor_name)
if not monitor:
raise ItemNotFoundError(
'Monitor with name "{}" not found'.format(monitor_name)
)
# New Relic API allows creating multiple alerts conditions
# from the same monitor using the same alert policy
# to avoid creating lots of duplicate entries - disallow this
if check_unique:
alert_conditions = self.get_alert_conditions(policy_name)
# we are only interested in synthetics conditions
try:
synth_conditions = alert_conditions['synthetics_conditions']
except KeyError:
# we don't have any alert conditions for synthetics
# no duplicates then
pass
else:
for condition in synth_conditions:
if condition['monitor_id'] == monitor['id']:
raise ItemAlreadyExistsError(
'Synthetics Alert Condition for monitor "{}" '
'is already present in policy "{}" with name "{}"'
.format(
monitor_name,
policy_name,
condition['name']
)
)
url = (
'{}/v2/alerts_synthetics_conditions/policies/{}.json'
.format(self.base_url, policy['id'])
)
payload = {
'synthetics_condition': {
'name': condition_name,
'monitor_id': monitor['id'],
'enabled': enabled
}
}
if runbook_url:
payload['synthetics_condition']['runbook_url'] = runbook_url
self._post(
url,
headers=self.default_headers,
timeout=self.timeout,
json=payload
)
def delete_synthetics_alert_conditions(self, policy_name, monitor_name):
"""Deletes all synthetics alert conditions that match pair
policy_name:monitor_name
Returns count of conditions deleted
"""
synthetics = SyntheticsClient(self.api_key)
monitor = synthetics.get_monitor_by_name(monitor_name)
if not monitor:
raise ItemNotFoundError(
'Monitor with name "{}" not found'.format(monitor_name)
)
alert_conditions_deleted = 0
alert_conditions = self.get_alert_conditions(policy_name)
# we are only interested in synthetics conditions
try:
synthetics_conditions = alert_conditions['synthetics_conditions']
except KeyError:
# we don't have any alert conditions for synthetics
# no duplicates then
pass
else:
for condition in synthetics_conditions:
if condition['monitor_id'] == monitor['id']:
url = (
'{}/v2/alerts_synthetics_conditions/{}.json'
.format(self.base_url, condition['id'])
)
self._delete(
url,
headers=self.default_headers,
timeout=self.timeout
)
alert_conditions_deleted += 1
return alert_conditions_deleted
| {
"content_hash": "ec2ce80cc528f9ef379d0ba1cf618e0f",
"timestamp": "",
"source": "github",
"line_count": 205,
"max_line_length": 78,
"avg_line_length": 34.229268292682924,
"alnum_prop": 0.5178851360980476,
"repo_name": "NativeInstruments/newrelic-cli",
"id": "fa7c34dbeabcfcba1e467b470f0416747ce0a2a6",
"size": "7017",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "newrelic_cli/alerts.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "30"
},
{
"name": "Python",
"bytes": "77591"
},
{
"name": "Smarty",
"bytes": "63"
}
],
"symlink_target": ""
} |
[](https://travis-ci.org/Atanas Dragolov/PXQLinkedIn)
[](http://cocoapods.org/pods/PXQLinkedIn)
[](http://cocoapods.org/pods/PXQLinkedIn)
[](http://cocoapods.org/pods/PXQLinkedIn)
## Usage
To run the example project, clone the repo, and run `pod install` from the Example directory first.
## Requirements
## Installation
PXQLinkedIn is available through [CocoaPods](http://cocoapods.org). To install
it, simply add the following line to your Podfile:
```ruby
pod "PXQLinkedIn"
```
## Author
Atanas Dragolov, [email protected]
## License
PXQLinkedIn is available under the MIT license. See the LICENSE file for more info.
| {
"content_hash": "6182d15f79376f75e6551f17e7c58166",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 136,
"avg_line_length": 34.888888888888886,
"alnum_prop": 0.7600849256900213,
"repo_name": "saorsa/saorsa.PXQLinkedIn",
"id": "33d83aa145171b53ab0ae3f5a32d8fe561db3dec",
"size": "957",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "7548"
},
{
"name": "Objective-C",
"bytes": "476221"
},
{
"name": "Ruby",
"bytes": "1646"
},
{
"name": "Shell",
"bytes": "8344"
}
],
"symlink_target": ""
} |
Golang Appengine
The basic app consists of a basic REST endpoint {message:"Hello, World!"} and respond to the /hello endpoint.
go run server.go
http://localhost:3001/hello | {
"content_hash": "0f698478387dd25b7c96147d0417b700",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 109,
"avg_line_length": 24.857142857142858,
"alnum_prop": 0.7701149425287356,
"repo_name": "gibraltargolang/random_hacks_01",
"id": "de7a34f14786b59c0f23d90135695ff44e96e576",
"size": "174",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "golang/appengine/readme.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "9300"
},
{
"name": "Go",
"bytes": "1099"
},
{
"name": "Java",
"bytes": "5802"
},
{
"name": "JavaScript",
"bytes": "2089"
},
{
"name": "Scala",
"bytes": "10993"
},
{
"name": "XSLT",
"bytes": "43014"
}
],
"symlink_target": ""
} |
from __future__ import print_function, division
import matplotlib
import logging
from sys import stdout
matplotlib.use('Agg') # Must be before importing matplotlib.pyplot or pylab!
from neuralnilm import (Net, RealApplianceSource,
BLSTMLayer, DimshuffleLayer,
BidirectionalRecurrentLayer)
from neuralnilm.source import standardise, discretize, fdiff, power_and_fdiff
from neuralnilm.experiment import run_experiment, init_experiment
from neuralnilm.net import TrainingError
from neuralnilm.layers import MixtureDensityLayer
from neuralnilm.objectives import (scaled_cost, mdn_nll,
scaled_cost_ignore_inactive, ignore_inactive,
scaled_cost3)
from neuralnilm.plot import MDNPlotter
from lasagne.nonlinearities import sigmoid, rectify, tanh
from lasagne.objectives import mse
from lasagne.init import Uniform, Normal
from lasagne.layers import (LSTMLayer, DenseLayer, Conv1DLayer,
ReshapeLayer, FeaturePoolLayer, RecurrentLayer)
from lasagne.updates import nesterov_momentum, momentum
from functools import partial
import os
import __main__
from copy import deepcopy
from math import sqrt
import numpy as np
import theano.tensor as T
NAME = os.path.splitext(os.path.split(__main__.__file__)[1])[0]
PATH = "/homes/dk3810/workspace/python/neuralnilm/figures"
SAVE_PLOT_INTERVAL = 5000
GRADIENT_STEPS = 100
source_dict = dict(
filename='/data/dk3810/ukdale.h5',
appliances=[
['fridge freezer', 'fridge', 'freezer'],
'hair straighteners',
'television',
'dish washer',
['washer dryer', 'washing machine']
],
max_appliance_powers=[300, 500, 200, 2500, 2400],
on_power_thresholds=[5] * 5,
max_input_power=5900,
min_on_durations=[60, 60, 60, 1800, 1800],
min_off_durations=[12, 12, 12, 1800, 600],
window=("2013-06-01", "2014-07-01"),
seq_length=512,
output_one_appliance=False,
boolean_targets=False,
train_buildings=[1],
validation_buildings=[1],
skip_probability=0.7,
one_target_per_seq=False,
n_seq_per_batch=16,
subsample_target=2,
include_diff=False,
clip_appliance_power=True,
target_is_prediction=False,
# independently_center_inputs = True,
standardise_input=True,
unit_variance_targets=True,
input_padding=8,
lag=0
# reshape_target_to_2D=True,
# input_stats={'mean': np.array([ 0.05526326], dtype=np.float32),
# 'std': np.array([ 0.12636775], dtype=np.float32)},
# target_stats={
# 'mean': np.array([ 0.04066789, 0.01881946,
# 0.24639061, 0.17608672, 0.10273963],
# dtype=np.float32),
# 'std': np.array([ 0.11449792, 0.07338708,
# 0.26608968, 0.33463112, 0.21250485],
# dtype=np.float32)}
)
N = 50
net_dict = dict(
save_plot_interval=SAVE_PLOT_INTERVAL,
# loss_function=partial(ignore_inactive, loss_func=mdn_nll, seq_length=SEQ_LENGTH),
# loss_function=lambda x, t: mdn_nll(x, t).mean(),
# loss_function=lambda x, t: mse(x, t).mean(),
# loss_function=partial(scaled_cost, loss_func=mse),
# loss_function=ignore_inactive,
loss_function=partial(scaled_cost3, ignore_inactive=False),
updates_func=momentum,
learning_rate=1e-1,
learning_rate_changes_by_iteration={
1000: 1e-2,
2000: 1e-3,
10000: 1e-4
# 500: 1e-3
# 4000: 1e-03,
# 6000: 5e-06,
# 7000: 1e-06
# 2000: 5e-06
# 3000: 1e-05
# 7000: 5e-06,
# 10000: 1e-06,
# 15000: 5e-07,
# 50000: 1e-07
},
do_save_activations=True,
auto_reshape=False
# plotter=MDNPlotter
)
"""
||||||||||
||||||||||
||||||||||
||||||||||
||||||||||
||||||||||
12345678901234567890
"""
def exp_a(name):
global source
# source_dict_copy = deepcopy(source_dict)
# source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
N = 1024
NUM_FILTERS = 50
net_dict_copy['layers_config'] = [
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # (batch, features, time)
},
{
'type': Conv1DLayer, # convolve over the time axis
'num_filters': NUM_FILTERS,
'filter_length': 10,
'stride': 2,
'nonlinearity': rectify,
'W': Normal(std=1/sqrt(source.seq_length))
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # back to (batch, time, features)
},
{
'type': DenseLayer,
'num_units': N,
'W': Normal(std=1/sqrt(N * NUM_FILTERS)),
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N,
'W': Normal(std=1/sqrt(N)),
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N,
'W': Normal(std=1/sqrt(N)),
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': source.output_shape()[1] * source.output_shape()[2],
'W': Normal(std=1/sqrt(N)),
'nonlinearity': T.nnet.softplus
}
]
net = Net(**net_dict_copy)
return net
def main():
# EXPERIMENTS = list('abcdefghijklmnopqrstuvwxyz')
EXPERIMENTS = list('a')
for experiment in EXPERIMENTS:
full_exp_name = NAME + experiment
func_call = init_experiment(PATH, experiment, full_exp_name)
logger = logging.getLogger(full_exp_name)
try:
net = eval(func_call)
run_experiment(net, epochs=None)
except KeyboardInterrupt:
logger.info("KeyboardInterrupt")
break
except Exception as exception:
logger.exception("Exception")
raise
finally:
logging.shutdown()
if __name__ == "__main__":
main()
| {
"content_hash": "f8b03abcbc3004796ea02a2a0d2110de",
"timestamp": "",
"source": "github",
"line_count": 201,
"max_line_length": 86,
"avg_line_length": 31.00995024875622,
"alnum_prop": 0.5725974651050858,
"repo_name": "mmottahedi/neuralnilm_prototype",
"id": "8d27f232604f49dc75f6c2a0ac6c57117e57c4c8",
"size": "6233",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "scripts/e343.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4536723"
}
],
"symlink_target": ""
} |
package com.greenpepper.confluence.utils.stylesheet;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.atlassian.confluence.importexport.resource.ResourceAccessor;
import com.atlassian.confluence.plugin.webresource.ConfluenceWebResourceManager;
import com.atlassian.confluence.spaces.Space;
import com.atlassian.confluence.themes.Theme;
import com.atlassian.confluence.themes.ThemeManager;
import com.atlassian.confluence.util.GeneralUtil;
import com.atlassian.confluence.util.velocity.VelocityUtils;
import com.atlassian.spring.container.ContainerManager;
import com.greenpepper.confluence.velocity.ConfluenceGreenPepper;
/**
* Stylesheet extractor for version from 2.8+
*
* @author oaouattara
* @version $Id: $Id
*/
public class DefaultStyleSheetExtractorImpl
implements StyleSheetExtractor
{
private static final Logger log = LoggerFactory.getLogger(StyleSheetExtractor.class);
private ConfluenceGreenPepper gpUtil = new ConfluenceGreenPepper();
private String tableCssContent;
/** {@inheritDoc} */
public String renderStyleSheet(Space space)
{
StringBuilder css = new StringBuilder();
css.append('\n');
// We always put the tables.css content since the @import url contains the server URL that can be out of reach
// and we want to make sure tables are rendered with borders (too confusing if not!)
css.append(getTablesCssContent());
css.append('\n');
css.append(getCombinedCss(space));
return css.toString();
}
private String getTablesCssContent()
{
if (tableCssContent == null)
{
InputStream tableCssStream = null;
try
{
ResourceAccessor resourceAccessor = (ResourceAccessor)ContainerManager.getComponent("resourceAccessor");
tableCssStream = resourceAccessor.getResource("/includes/css/tables.css");
tableCssContent = IOUtils.toString(tableCssStream);
}
catch (Exception ex)
{
log.error("Failed to get tables stylesheet. Omitting tables styles.", ex);
tableCssContent = "/* Failed to get tables stylesheet. Omitting tables styles. */";
}
finally
{
IOUtils.closeQuietly(tableCssStream);
}
}
return tableCssContent;
}
private String getCombinedCss(final Space space)
{
final ThemeManager themeManager = (ThemeManager) ContainerManager.getComponent("themeManager");
final String spaceKey = space == null ? "" : space.getKey();
Theme activeTheme = themeManager.getGlobalTheme();
if (StringUtils.isNotEmpty(spaceKey))
{
activeTheme = themeManager.getSpaceTheme(spaceKey);
}
final ConfluenceWebResourceManager webResourceManager = (ConfluenceWebResourceManager)ContainerManager.getComponent("webResourceManager");
Map<String, Object> contextMap = new HashMap<String, Object>();
contextMap.put("spaceKey", spaceKey);
contextMap.put("globalPrefix", gpUtil.getBaseUrl() + webResourceManager.getGlobalCssResourcePrefix());
contextMap.put("prefix", gpUtil.getBaseUrl() + webResourceManager.getSpaceCssPrefix(spaceKey));
contextMap.put("theme", activeTheme);
contextMap.put("forWysiwyg", Boolean.TRUE);
contextMap.put("generalUtil", new GeneralUtil());
return VelocityUtils.getRenderedTemplate("styles/combined-css.vm", contextMap);
}
}
| {
"content_hash": "ec149666e4985d7eab526da2792e3f5e",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 140,
"avg_line_length": 31.14814814814815,
"alnum_prop": 0.7699167657550535,
"repo_name": "strator-dev/greenpepper-open",
"id": "4f5e98bfd4fddb8497c86f5062ac487adec91fdd",
"size": "4170",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "confluence/greenpepper-confluence-code/src/main/java/com/greenpepper/confluence/utils/stylesheet/DefaultStyleSheetExtractorImpl.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "678261"
},
{
"name": "Java",
"bytes": "572376"
},
{
"name": "JavaScript",
"bytes": "63878"
},
{
"name": "Shell",
"bytes": "147"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "103987659501ea38591fb30d02e2b9bd",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "70db0897e6cf3be8af2ea64cec7d3d0aa28f020f",
"size": "184",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Saxifragales/Crassulaceae/Tylecodon/Tylecodon hallii/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
#pragma once
// ensure the MCU series is correct
#ifndef STM32PLUS_F4
#error This class can only be used with the STM32F4 series
#endif
namespace stm32plus {
/**
* Feature collection for this DMA channel. Parameterise this class with the features
* that you want to use on this channel.
*/
template<class... Features>
class Dma2Channel6Stream4 : public Dma,
public Features... {
public:
/**
* Constructor
*/
Dma2Channel6Stream4()
: Dma(DMA2_Stream4,DMA_Channel_6,DMA_FLAG_TCIF4,DMA_FLAG_HTIF4,DMA_FLAG_TEIF4),
Features(static_cast<Dma&>(*this))... {
ClockControl<PERIPHERAL_DMA2>::On();
}
};
/**
* Types for the peripherals mapped to this channel
*/
template<class... Features> using Timer1Channel4DmaChannel=Dma2Channel6Stream4<Features...>;
template<class... Features> using Timer1TriggerStream4DmaChannel=Dma2Channel6Stream4<Features...>;
template<class... Features> using Timer1ComDmaChannel=Dma2Channel6Stream4<Features...>;
}
| {
"content_hash": "f19fba4e5e665be045abfafa7f6cff94",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 100,
"avg_line_length": 24.295454545454547,
"alnum_prop": 0.6679139382600561,
"repo_name": "tokoro10g/stm32plus",
"id": "d54b193d132d2c175fda9f34f62d5513ead60ba7",
"size": "1249",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "lib/include/dma/f4/Dma2Channel6Stream4.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "87923"
},
{
"name": "C",
"bytes": "13033053"
},
{
"name": "C#",
"bytes": "119125"
},
{
"name": "C++",
"bytes": "5374380"
},
{
"name": "CMake",
"bytes": "3304"
},
{
"name": "HTML",
"bytes": "46121"
},
{
"name": "JavaScript",
"bytes": "7492"
},
{
"name": "Lua",
"bytes": "5557"
},
{
"name": "Makefile",
"bytes": "5662"
},
{
"name": "Pascal",
"bytes": "6685"
},
{
"name": "Perl",
"bytes": "24683"
},
{
"name": "Python",
"bytes": "21940"
}
],
"symlink_target": ""
} |
// -------------------------------------------------------------------------------------------------
//
// A technical indicator developed by Tushar Chande to numerically identify trends in candlestick charting.
// It is calculated by taking an N period moving average of the difference between the open and closing prices.
// A Qstick value greater than zero means that the majority of the last 'n' days have been up,
// indicating that buying pressure has been increasing.
//
// -------------------------------------------------------------------------------------------------
using cAlgo.API;
using cAlgo.API.Indicators;
namespace cAlgo.Indicators
{
[Indicator(AccessRights = AccessRights.None)]
public class QStick : Indicator
{
private MovingAverage _ma;
private IndicatorDataSeries _price;
[Parameter(DefaultValue = 14)]
public int Period { get; set; }
[Parameter("MA Type", DefaultValue = MovingAverageType.Simple)]
public MovingAverageType MaType { get; set; }
[Output("QStick")]
public IndicatorDataSeries Result { get; set; }
protected override void Initialize()
{
_price = CreateDataSeries();
_ma = Indicators.MovingAverage(_price, Period, MaType);
}
public override void Calculate(int index)
{
_price[index] = MarketSeries.Close[index] - MarketSeries.Open[index];
Result[index] = _ma.Result[index];
}
}
}
| {
"content_hash": "656cc87b95ba09ed2f3992c02274ad68",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 115,
"avg_line_length": 37.85,
"alnum_prop": 0.5647291941875826,
"repo_name": "abhacid/cAlgoBot",
"id": "9bbee836a82e97afbb308091f8178b360da66e07",
"size": "1516",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Sources/Indicators/QStick/QStick/QStick.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "4073176"
}
],
"symlink_target": ""
} |
package org.kman.tests.utils;
import android.content.Context;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.util.Log;
public class PackageUtils {
private static final String TAG = "PackageUtils";
public static int getVersionCode(Context context) {
synchronized (PackageUtils.class) {
if (gVersionCode == 0) {
try {
final String packageName = context.getPackageName();
final PackageManager pm = context.getPackageManager();
final PackageInfo pi = pm.getPackageInfo(packageName, PackageManager.GET_META_DATA);
gVersionCode = pi.versionCode;
} catch (Exception x) {
Log.w(TAG, "Error get package meta", x);
}
}
return gVersionCode;
}
}
private static int gVersionCode;
}
| {
"content_hash": "1de4c55507f8c2c593aaf2429fa9bf0d",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 89,
"avg_line_length": 26.79310344827586,
"alnum_prop": 0.7245817245817245,
"repo_name": "kmansoft/tests",
"id": "1a9666b4fa6f933850b8f8053297017183a169c3",
"size": "777",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Utils/src/org/kman/tests/utils/PackageUtils.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "103781"
}
],
"symlink_target": ""
} |
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
namespace Microsoft.WindowsAzure.Management.SqlDatabase.Test.UnitTests.Database.Cmdlet
{
using System;
using System.Collections.ObjectModel;
using System.Linq;
using System.Management.Automation;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Microsoft.WindowsAzure.Management.SqlDatabase.Test.UnitTests.MockServer;
using Microsoft.WindowsAzure.Management.Test.Utilities.Common;
[TestClass]
public class SetAzureSqlDatabaseTests : TestBase
{
[TestCleanup]
public void CleanupTest()
{
DatabaseTestHelper.SaveDefaultSessionCollection();
}
[TestMethod]
public void SetAzureSqlDatabaseSizeWithSqlAuth()
{
using (System.Management.Automation.PowerShell powershell =
System.Management.Automation.PowerShell.Create())
{
// Create a context
NewAzureSqlDatabaseServerContextTests.CreateServerContextSqlAuth(
powershell,
"$context");
// Create 2 test databases
NewAzureSqlDatabaseTests.CreateTestDatabasesWithSqlAuth(
powershell,
"$context");
HttpSession testSession = DatabaseTestHelper.DefaultSessionCollection.GetSession(
"UnitTests.SetAzureSqlDatabaseSizeWithSqlAuth");
DatabaseTestHelper.SetDefaultTestSessionSettings(testSession);
testSession.RequestValidator =
new Action<HttpMessage, HttpMessage.Request>(
(expected, actual) =>
{
Assert.AreEqual(expected.RequestInfo.Method, actual.Method);
Assert.AreEqual(expected.RequestInfo.UserAgent, actual.UserAgent);
switch (expected.Index)
{
// Request 0-1: Set testdb1 with new MaxSize
case 0:
case 1:
// Request 2: Get updated testdb1
case 2:
DatabaseTestHelper.ValidateHeadersForODataRequest(
expected.RequestInfo,
actual);
break;
default:
Assert.Fail("No more requests expected.");
break;
}
});
using (AsyncExceptionManager exceptionManager = new AsyncExceptionManager())
{
// Create context with both ManageUrl and ServerName overriden
Collection<PSObject> database;
using (new MockHttpServer(
exceptionManager,
MockHttpServer.DefaultServerPrefixUri,
testSession))
{
database = powershell.InvokeBatchScript(
@"Set-AzureSqlDatabase " +
@"-Context $context " +
@"-DatabaseName testdb1 " +
@"-MaxSizeGB 5 " +
@"-Force " +
@"-PassThru");
}
Assert.AreEqual(0, powershell.Streams.Error.Count, "Errors during run!");
Assert.AreEqual(0, powershell.Streams.Warning.Count, "Warnings during run!");
powershell.Streams.ClearStreams();
Assert.IsTrue(
database.Single().BaseObject is Services.Server.Database,
"Expecting a Database object");
Services.Server.Database databaseObj =
(Services.Server.Database)database.Single().BaseObject;
Assert.AreEqual("testdb1", databaseObj.Name, "Expected db name to be testdb1");
Assert.AreEqual("Web", databaseObj.Edition, "Expected edition to be Web");
Assert.AreEqual(5, databaseObj.MaxSizeGB, "Expected max size to be 5 GB");
}
}
}
[TestMethod]
public void SetAzureSqlDatabaseNameWithSqlAuth()
{
using (System.Management.Automation.PowerShell powershell =
System.Management.Automation.PowerShell.Create())
{
// Create a context
NewAzureSqlDatabaseServerContextTests.CreateServerContextSqlAuth(
powershell,
"$context");
// Create 2 test databases
NewAzureSqlDatabaseTests.CreateTestDatabasesWithSqlAuth(
powershell,
"$context");
HttpSession testSession = DatabaseTestHelper.DefaultSessionCollection.GetSession(
"UnitTests.SetAzureSqlDatabaseNameWithSqlAuth");
DatabaseTestHelper.SetDefaultTestSessionSettings(testSession);
testSession.RequestValidator =
new Action<HttpMessage, HttpMessage.Request>(
(expected, actual) =>
{
Assert.AreEqual(expected.RequestInfo.Method, actual.Method);
Assert.AreEqual(expected.RequestInfo.UserAgent, actual.UserAgent);
switch (expected.Index)
{
// Request 1-2: Set testdb1 with new name of testdb2
case 0:
case 1:
// Request 3: Get updated testdb2
case 2:
DatabaseTestHelper.ValidateHeadersForODataRequest(
expected.RequestInfo,
actual);
break;
default:
Assert.Fail("No more requests expected.");
break;
}
});
using (AsyncExceptionManager exceptionManager = new AsyncExceptionManager())
{
// Create context with both ManageUrl and ServerName overriden
Collection<PSObject> database;
using (new MockHttpServer(
exceptionManager,
MockHttpServer.DefaultServerPrefixUri,
testSession))
{
database = powershell.InvokeBatchScript(
@"Set-AzureSqlDatabase " +
@"-Context $context " +
@"-DatabaseName testdb1 " +
@"-NewName testdb3 " +
@"-Force " +
@"-PassThru");
}
Assert.AreEqual(0, powershell.Streams.Error.Count, "Errors during run!");
Assert.AreEqual(0, powershell.Streams.Warning.Count, "Warnings during run!");
powershell.Streams.ClearStreams();
Assert.IsTrue(
database.Single().BaseObject is Services.Server.Database,
"Expecting a Database object");
Services.Server.Database databaseObj =
(Services.Server.Database)database.Single().BaseObject;
Assert.AreEqual("testdb3", databaseObj.Name, "Expected db name to be testdb3");
Assert.AreEqual("Web", databaseObj.Edition, "Expected edition to be Web");
Assert.AreEqual(1, databaseObj.MaxSizeGB, "Expected max size to be 1 GB");
}
}
}
}
}
| {
"content_hash": "9bc9d136272d5282cc2e482e2bca6f6a",
"timestamp": "",
"source": "github",
"line_count": 184,
"max_line_length": 99,
"avg_line_length": 47.630434782608695,
"alnum_prop": 0.5006846188954815,
"repo_name": "akromm/azure-sdk-tools",
"id": "5fba591b2293e5daa1322b1615843d0c669f490a",
"size": "8766",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "WindowsAzurePowershell/src/Management.SqlDatabase.Test/UnitTests/Database/Cmdlet/SetAzureSqlDatabaseTests.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "5452289"
},
{
"name": "JavaScript",
"bytes": "4979"
},
{
"name": "PHP",
"bytes": "41"
},
{
"name": "PowerShell",
"bytes": "218534"
},
{
"name": "Python",
"bytes": "12722"
},
{
"name": "Shell",
"bytes": "17079"
},
{
"name": "VHDL",
"bytes": "161784832"
}
],
"symlink_target": ""
} |
<?php
/**
* PHPMailer language file.
* Norwegian Version
*/
$PHPMAILER_LANG = array();
$PHPMAILER_LANG["provide_address"] = 'Du må ha med minst en' .
'mottager adresse.';
$PHPMAILER_LANG["mailer_not_supported"] = ' mailer er ikke supportert.';
$PHPMAILER_LANG["execute"] = 'Kunne ikke utføre: ';
$PHPMAILER_LANG["instantiate"] = 'Kunne ikke instantiate mail funksjonen.';
$PHPMAILER_LANG["authenticate"] = 'SMTP Feil: Kunne ikke authentisere.';
$PHPMAILER_LANG["from_failed"] = 'Følgende Fra feilet: ';
$PHPMAILER_LANG["recipients_failed"] = 'SMTP Feil: Følgende' .
'mottagere feilet: ';
$PHPMAILER_LANG["data_not_accepted"] = 'SMTP Feil: Data ble ikke akseptert.';
$PHPMAILER_LANG["connect_host"] = 'SMTP Feil: Kunne ikke koble til SMTP host.';
$PHPMAILER_LANG["file_access"] = 'Kunne ikke få tilgang til filen: ';
$PHPMAILER_LANG["file_open"] = 'Fil feil: Kunne ikke åpne filen: ';
$PHPMAILER_LANG["encoding"] = 'Ukjent encoding: '; | {
"content_hash": "3a0a85cbfa539215135a12ecbcea34c2",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 79,
"avg_line_length": 47.09090909090909,
"alnum_prop": 0.6361003861003861,
"repo_name": "vincent03460/frx",
"id": "b99aba79cdb1b4254b3f3bd1b9bbdd1170d0b064",
"size": "1036",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "lib/symfony/vendor/phpmailer/language/phpmailer.lang-no.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "1626"
},
{
"name": "CSS",
"bytes": "78929"
},
{
"name": "HTML",
"bytes": "34"
},
{
"name": "JavaScript",
"bytes": "194190"
},
{
"name": "PHP",
"bytes": "1810657"
},
{
"name": "Shell",
"bytes": "4594"
}
],
"symlink_target": ""
} |
<!doctype html>
<html>
<head>
<title>Mithril</title>
<link href="lib/prism/prism.css" rel="stylesheet" />
<link href="style.css" rel="stylesheet" />
</head>
<body>
<header>
<nav class="container">
<a href="index.html" class="logo"><span>○</span> Mithril</a>
<a href="getting-started.html">Guide</a>
<a href="mithril.html">API</a>
<a href="community.html">Community</a>
<a href="http://lhorie.github.io/mithril-blog">Blog</a>
<a href="mithril.min.zip">Download</a>
<a href="http://github.com/lhorie/mithril.js" target="_blank">Github</a>
</nav>
</header>
<main>
<section class="content">
<div class="container">
<div class="row">
<div class="col(3,3,12)">
<h2 id="api">API (v0.1.19)</h2>
<h3 id="core">Core</h3>
<ul>
<li><a href="mithril.html">m</a></li>
<li><a href="mithril.prop.html">m.prop</a></li>
<li><a href="mithril.withAttr.html">m.withAttr</a></li>
<li><a href="mithril.module.html">m.module</a></li>
<li><a href="mithril.trust.html">m.trust</a></li>
<li><a href="mithril.render.html">m.render</a></li>
<li><a href="mithril.redraw.html">m.redraw</a></li>
</ul>
<h3 id="routing">Routing</h3>
<ul>
<li><a href="mithril.route.html">m.route</a>
<ul>
<li><a href="mithril.route.html#defining-routes">m.route(rootElement, defaultRoute, routes)</a></li>
<li><a href="mithril.route.html#redirecting">m.route(path, params)</a></li>
<li><a href="mithril.route.html#reading-current-route">m.route()</a></li>
<li><a href="mithril.route.html#mode-abstraction">m.route(element)</a></li>
<li><a href="mithril.route.html#mode">m.route.mode</a></li>
<li><a href="mithril.route.html#param">m.route.param</a></li>
</ul>
</li>
</ul>
<h3 id="data">Data</h3>
<ul>
<li><a href="mithril.request.html">m.request</a></li>
<li><a href="mithril.deferred.html">m.deferred</a></li>
<li><a href="mithril.sync.html">m.sync</a></li>
<li><a href="mithril.computation.html">m.startComputation / m.endComputation</a></li>
</ul>
<h2 id="archive">History</h2>
<ul>
<li><a href="roadmap.html">Roadmap</a></li>
<li><a href="change-log.html">Change log</a></li>
</ul>
</div>
<div class="col(9,9,12)">
<h2 id="m-withattr">m.withAttr</h2>
<p>This is an event handler factory. It returns a method that can be bound to a DOM element's event listener.</p>
<p>Typically, it's used in conjunction with <a href="mithril.prop.html"><code>m.prop</code></a> to implement data binding in the view-to-model direction.</p>
<p>This method is provided to decouple the browser's event model from the controller/logic model.</p>
<p>You should use this method and implement similar ones when extracting values from a browser's Event object, instead of hard-coding the extraction code into controllers (or model methods).</p>
<hr>
<h3 id="usage">Usage</h3>
<pre><code class="lang-javascript">//standalone usage
document.body.onclick = m.withAttr("title", function(value) {
//alerts the title of the body element when it's clicked
alert(value);
})</code></pre>
<p>A contrived example of bi-directional data binding</p>
<pre><code class="lang-javascript">var user = {
model: function(name) {
this.name = m.prop(name);
},
controller: function() {
this.user = new user.model("John Doe");
},
view: function(controller) {
m.render("body", [
m("input", {onchange: m.withAttr("value", controller.user.name), value: controller.user.name()})
]);
}
};</code></pre>
<hr>
<h3 id="signature">Signature</h3>
<p><a href="how-to-read-signatures.html">How to read signatures</a></p>
<pre><code class="lang-clike">EventHandler withAttr(String property, void callback(any value))
where:
EventHandler :: void handler(Event e)</code></pre>
<ul>
<li><p><strong>String property</strong></p>
<p>Defines the property of the DOM element whose value will be passed to the callback.</p>
</li>
<li><p><strong>void callback(any value)</strong></p>
<p>This function will be called with the value of the defined property as an argument.</p>
<ul>
<li><p><strong>any value</strong></p>
<p>This is the value of the defined DOM element's property.</p>
</li>
</ul>
</li>
<li><p><strong>returns EventHandler handler</strong></p>
<p>This handler method can be assigned to properties like <code>onclick</code>, or passed as callbacks to <code>addEventListener</code>.</p>
</li>
</ul>
</div>
</div>
</div>
</section>
</main>
<footer>
<div class="container">
Released under the <a href="http://opensource.org/licenses/MIT" target="_blank">MIT license</a>
<br />© 2014 Leo Horie
</div>
</footer>
<script src="lib/prism/prism.js"></script>
</body>
</html> | {
"content_hash": "ab4d532b115be899d302d8d5bd8850d4",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 198,
"avg_line_length": 39.75590551181102,
"alnum_prop": 0.6193305605070311,
"repo_name": "vikbert/mithril.js",
"id": "532b25ea7eb44f857a4fb1bf7e82ee7463422873",
"size": "5049",
"binary": false,
"copies": "7",
"ref": "refs/heads/next",
"path": "archive/v0.1.19/mithril.withAttr.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "205484"
},
{
"name": "HTML",
"bytes": "11048806"
},
{
"name": "JavaScript",
"bytes": "3648866"
}
],
"symlink_target": ""
} |
"""This module exports the jscs plugin class."""
from SublimeLinter.lint import Linter
class Jscs(Linter):
"""Provides an interface to jscs."""
syntax = ('javascript', 'html')
cmd = 'jscs -r checkstyle'
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 1.0.10' # 1.0.10 introduced checkstyle reporter
regex = (
r'^\s+?<error line="(?P<line>\d+)" '
r'column="(?P<col>\d+)" '
# jscs always reports with error severity; show as warning
r'severity="(?P<warning>error)" '
r'message="(?P<message>.+?)"'
)
multiline = True
selectors = {'html': 'source.js.embedded.html'}
tempfile_suffix = 'js'
config_file = ('--config', '.jscsrc', '~')
| {
"content_hash": "4bcadf0fb54d11d64874858ae11d6391",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 78,
"avg_line_length": 30.8,
"alnum_prop": 0.574025974025974,
"repo_name": "Raynos/SublimeLinter-jscs",
"id": "5499b3ae747142c092a884029755d320c6b5564d",
"size": "948",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "linter.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
class CreateBars < ActiveRecord::Migration
def change
create_table :bars do |t|
t.string :name
t.integer :idf
t.timestamps null: false
end
end
end
| {
"content_hash": "f023e8dd29f4e9bbec65a4442a15a0c1",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 42,
"avg_line_length": 17.8,
"alnum_prop": 0.6404494382022472,
"repo_name": "ricardobaumann/rails_generators",
"id": "ca57f4c8e239da61a2d268a21abcdf62fa3316bf",
"size": "178",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "db/migrate/20150428192606_create_bars.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3886"
},
{
"name": "CoffeeScript",
"bytes": "2532"
},
{
"name": "HTML",
"bytes": "29025"
},
{
"name": "JavaScript",
"bytes": "661"
},
{
"name": "Ruby",
"bytes": "70068"
}
],
"symlink_target": ""
} |
AliGenerator* AddMCEMCocktailV2( Int_t collisionsSystem = 200,
Int_t centrality = 0,
Int_t decayMode = 1,
Int_t selectedMothers = 62591,
TString paramFile = "",
TString paramFileDir = "",
Int_t numberOfParticles = 1000,
Double_t minPt = 0.,
Double_t maxPt = 20,
Int_t pythiaErrorTolerance = 2000,
Bool_t externalDecayer = 0,
Bool_t decayLongLived = 0,
Bool_t dynamicalPtRange = 0,
Bool_t useYWeights = 0,
TString paramV2FileDir = "",
Bool_t toFixEP = 0,
Double_t yGenRange = 1.0
)
{
// collisions systems defined:
// 0 : pp 900 GeV
// 100 : pp 2.76 TeV
// 200 : pp 7 TeV
// 300 : pPb 5.023 TeV
// 400 : PbPb 2.76 TeV
// load libraries
gSystem->Load("liblhapdf"); // Parton density functions
gSystem->Load("libpythia6_4_25");
gSystem->Load("libEGPythia6");
gSystem->Load("libAliPythia6");
// Create and Initialize Generator
AliGenEMCocktailV2 *gener = new AliGenEMCocktailV2();
//=======================================================================
// Set External decayer
TVirtualMCDecayer *decayer = new AliDecayerPythia();
if (externalDecayer) decayer->AliDecayerPythia::SetDecayerExodus();
if (decayLongLived) decayer->AliDecayerPythia::DecayLongLivedParticles();
gener->SetParametrizationFile(paramFile);
gener->SetParametrizationFileDirectory(paramFileDir);
gener->SetNPart(numberOfParticles); // source multiplicity per event
gener->SetPtRange(minPt,maxPt);
gener->SetFixedEventPlane(toFixEP) ;
gener->SetDynamicalPtRange(dynamicalPtRange);
gener->SetUseYWeighting(useYWeights);
gener->SetYRange(-yGenRange,yGenRange);
gener->SetPhiRange(0., 360.);
gener->SetOrigin(0.,0.,0.);
gener->SetSigma(0.,0.,0.);
gener->SetVertexSmear(kPerEvent);
gener->SetTrackingFlag(0);
gener->SelectMotherParticles(selectedMothers);
gener->SetCollisionSystem(collisionsSystem); //pp 7 TeV
gener->SetCentrality(centrality); // kpp
if(paramV2FileDir.Length()>0)
gener->SetParametrizationFileV2Directory(paramV2FileDir);
(AliPythia::Instance())->SetMSTU(22, pythiaErrorTolerance); // tolerance for error due to rhos
if (decayMode == 1){
gener->SetDecayMode(kGammaEM); // kGammaEM => single photon
} else if (decayMode == 2){
gener->SetDecayMode(kElectronEM); // kElectronEM => single electron
} else if (decayMode == 3){
gener->SetDecayMode(kDiElectronEM); // kDiElectronEM => electron-positron
}
gener->SetDecayer(decayer);
gener->SetWeightingMode(kNonAnalog); // select weighting:
// kNonAnalog => weight ~ dN/dp_T
// kAnalog => weight ~ 1
gener->CreateCocktail();
gener->Init();
return gener;
}
| {
"content_hash": "33e233d7e3dfc0db5f1e994ff8a22673",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 98,
"avg_line_length": 44.883116883116884,
"alnum_prop": 0.5254629629629629,
"repo_name": "dstocco/AliPhysics",
"id": "6ef92493c87b4cbe443d330f8192758d4830644a",
"size": "3456",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "PWG/Cocktail/macros/AddMCEMCocktailV2.C",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "65492082"
},
{
"name": "C++",
"bytes": "113266180"
},
{
"name": "CMake",
"bytes": "585834"
},
{
"name": "CSS",
"bytes": "5189"
},
{
"name": "Fortran",
"bytes": "134275"
},
{
"name": "HTML",
"bytes": "34737"
},
{
"name": "JavaScript",
"bytes": "3536"
},
{
"name": "Makefile",
"bytes": "25080"
},
{
"name": "Objective-C",
"bytes": "169708"
},
{
"name": "Perl",
"bytes": "17128"
},
{
"name": "Python",
"bytes": "758511"
},
{
"name": "Shell",
"bytes": "956557"
},
{
"name": "TeX",
"bytes": "392122"
}
],
"symlink_target": ""
} |
package net.peterkuterna.android.apps.devoxxsched.util;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.TransitionDrawable;
import android.widget.ImageView;
public class ImageViewUtils {
/**
* Sets a new {@link Bitmap} on an {@link ImageView} with a fade.
*
* @param iv
* @param bitmap
*/
public static void setBitmapWithFade(ImageView iv, Bitmap bitmap) {
Resources resources = iv.getResources();
BitmapDrawable bitmapDrawable = new BitmapDrawable(resources, bitmap);
setDrawableWithFade(iv, bitmapDrawable);
}
/**
* Sets a new {@link Drawable} on an {@link ImageView} with a fade.
*
* @param iv
* @param bitmap
*/
public static void setDrawableWithFade(ImageView iv, Drawable drawable) {
Drawable currentDrawable = iv.getDrawable();
if (currentDrawable != null) {
Drawable[] arrayDrawable = new Drawable[2];
arrayDrawable[0] = currentDrawable;
arrayDrawable[1] = drawable;
TransitionDrawable transitionDrawable = new TransitionDrawable(
arrayDrawable);
transitionDrawable.setCrossFadeEnabled(true);
iv.setImageDrawable(transitionDrawable);
transitionDrawable.startTransition(250);
} else {
iv.setImageDrawable(drawable);
}
}
}
| {
"content_hash": "a6d3dc213e8398525f390fc22c968525",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 74,
"avg_line_length": 28.208333333333332,
"alnum_prop": 0.7474150664697193,
"repo_name": "epiphany27/devoxx-schedule",
"id": "68592e391f284794a8e6e956061af8cbd0853b56",
"size": "1951",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "devoxx-android-client/src/net/peterkuterna/android/apps/devoxxsched/util/ImageViewUtils.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "1161"
},
{
"name": "Java",
"bytes": "3048209"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<title>Backpack pages for Chrome™</title>
<meta charset="utf-8">
<link rel="stylesheet" href="css/bootstrap.min.css">
<link rel="stylesheet" href="css/custom-theme/jquery-ui-1.8.22.custom.css">
<link rel="stylesheet" href="css/application.css">
<script src="js/jquery-1.7.2.min.js"></script>
<script src="js/jquery-ui-1.8.22.custom.min.js"></script>
<script src="js/bootstrap.min.js"></script>
<script src="js/utils.js"></script>
<script src="js/options.js"></script>
</head>
<body>
<header>
<h1>Backpack pages for Chrome</h1>
</header>
<form action="#" id="options-form">
<label for="username">Backpack username</label>
<input type="text" id="username" class="input-xlarge">
<label for="use-ssl">Use SSL with my Backpack account
<input type="checkbox" id="use-ssl">
</label>
<input type="submit" id="save" value="Save">
<span id="save-success">
<img src="img/success.png" alt="success">
</span>
</form>
</body>
</html> | {
"content_hash": "7b26661a96fe39d48992999def18ce98",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 77,
"avg_line_length": 33.25806451612903,
"alnum_prop": 0.6450048496605237,
"repo_name": "gaelian/backpack-pages-for-chrome",
"id": "61cbdf59e55ea821eec7df7b2f9dc87423eea966",
"size": "1033",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "options.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "11310"
}
],
"symlink_target": ""
} |
static const NSTimeInterval kDefaultCacheMaxCacheAge = 60 * 60 * 24 * 7; // 1 week
@interface AUCache ()
- (void)createStorageDirectoryIfNeeded;
@end
@implementation AUCache
+ (instancetype)sharedCache {
static dispatch_once_t pred;
static id __sharedCache;
dispatch_once(&pred, ^{
__sharedCache = [[self alloc] init];
});
return __sharedCache;
}
- (id)init {
return [self initWithNamespace:@"default"];
}
- (id)initWithNamespace:(NSString *)aNamespace {
self = [super init];
if (self) {
// save default max cache age
_maxCacheAge = kDefaultCacheMaxCacheAge;
// create memory cache
_memoryCache = [NSMapTable mapTableWithKeyOptions:NSMapTableStrongMemory
valueOptions:NSMapTableStrongMemory];
// create path component
NSString *pathComponent = [NSString stringWithFormat:@"com.appunite.%@", aNamespace];
// use setter to create path if needed
self.defaultCachePath = [[self cacheDirectory] stringByAppendingPathComponent:pathComponent];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(removeOutdatedDiskCache)
name:UIApplicationWillTerminateNotification
object:nil];
}
return self;
}
- (void)dealloc {
// remove all observers
[[NSNotificationCenter defaultCenter] removeObserver:self];
// remove outdated cache
#ifndef DEBUG
[self removeOutdatedDiskCache];
#else
[self removeDiskCache];
#endif
}
#pragma mark - Class methods
+ (NSString *)uniqueKeyForString:(NSString *)string {
return [string sha1Hash];
}
- (BOOL)isCacheForKey:(NSString *)key policy:(AUCachePolicy)cachePolicy {
BOOL result = NO;
// check policy
if (cachePolicy & AUCachePolicyMemory) {
// check if cache exist
result = ([_memoryCache objectForKey:key] != nil);
}
// check policy
if (!result && cachePolicy & AUCachePolicyDisk) {
@synchronized(self) {
// check if file exist
result = ([_storedKeys containsObject:key]);
}
}
return result;
}
- (void)removeDiskCache {
// get file manager
NSFileManager *fileManager = [NSFileManager defaultManager];
// get path
NSString *path = self.defaultCachePath;
// remove directory
[fileManager removeItemAtPath:path error:nil];
// recreate storage directory
[self createStorageDirectoryIfNeeded];
}
- (void)removeOutdatedDiskCache {
// get file manager
NSFileManager *fileManager = [NSFileManager defaultManager];
// calculate expiration date
NSDate *expirationDate = [NSDate dateWithTimeIntervalSinceNow:(-1 * _maxCacheAge)];
// get path
NSString *path = self.defaultCachePath;
// get file enumerator
NSDirectoryEnumerator *fileEnumerator = [fileManager enumeratorAtPath:path];
// iterate all items
for (NSString *fileName in fileEnumerator) {
// get full item path
NSString *filePath = [path stringByAppendingPathComponent:fileName];
// get file attributes
NSDictionary *attrs = [fileManager attributesOfItemAtPath:filePath error:nil];
// compare date and remove item if needed
if ([[attrs fileModificationDate] compare:expirationDate] == NSOrderedAscending) {
[fileManager removeItemAtPath:filePath error:nil];
}
}
// create dict (if needed), reload cached keys
[self createStorageDirectoryIfNeeded];
}
- (void)removeMemoryCache {
[_memoryCache removeAllObjects];
}
#pragma mark - Setters
- (void)setDefaultCachePath:(NSString *)defaultCachePath {
if (defaultCachePath != _defaultCachePath) {
_defaultCachePath = defaultCachePath;
// create storage directory if needed
if (defaultCachePath != nil) {
[self createStorageDirectoryIfNeeded];
}
}
}
- (void)createStorageDirectoryIfNeeded {
// create cache directory on disk
NSFileManager *fileManager = [NSFileManager defaultManager];
if (![fileManager fileExistsAtPath:self.defaultCachePath isDirectory:nil]) {
[fileManager createDirectoryAtPath:self.defaultCachePath
withIntermediateDirectories:YES
attributes:nil
error:nil];
}
@synchronized(self) {
// load cached files key
_storedKeys = [NSMutableSet setWithArray:[fileManager contentsOfDirectoryAtPath:self.defaultCachePath error:NULL]];
// create new empty directory if needed
if (!_storedKeys) {
_storedKeys = [NSMutableSet new];
}
}
}
#pragma mark - Private
- (NSString *)cacheDirectory {
return [NSHomeDirectory() stringByAppendingPathComponent:@"Library/Caches/AUCache"];
}
@end
| {
"content_hash": "8bffb5da73389be64229654e1209bac8",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 123,
"avg_line_length": 28.7909604519774,
"alnum_prop": 0.630298273155416,
"repo_name": "appunite/AUImageCache",
"id": "c80d7e29c259fe3701131c229492040efcea68c0",
"size": "5288",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "AUImageCache/AUImageCache/AUCache.m",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Objective-C",
"bytes": "52684"
},
{
"name": "Ruby",
"bytes": "1209"
}
],
"symlink_target": ""
} |
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Threading.Tasks;
using System;
using MiHomeLib.Commands;
using MiHomeLib.Contracts;
namespace MiHomeLib
{
public class UdpTransport: IMessageTransport
{
private readonly string _gwPassword;
private readonly string _multicastAddress;
private readonly int _serverPort;
private readonly UdpClient _udpClient;
public string Token { get; set; }
public UdpTransport(string gwPassword, string multicastAddress = "224.0.0.50", int serverPort = 9898)
{
_gwPassword = gwPassword;
_multicastAddress = multicastAddress;
_serverPort = serverPort;
_udpClient = new UdpClient(new IPEndPoint(IPAddress.Any, _serverPort));
_udpClient.JoinMulticastGroup(IPAddress.Parse(_multicastAddress));
}
public int SendCommand(Command command)
{
var buffer = Encoding.ASCII.GetBytes(command.ToString());
return _udpClient.Send(buffer, buffer.Length, new IPEndPoint(IPAddress.Parse(_multicastAddress), _serverPort));
}
public int SendWriteCommand(string sid, string type, Command data)
{
var key = CryptoProvider.BuildKey(Token, _gwPassword).ToHex();
return SendCommand(new WriteCommand(sid, type, key, data));
}
public async Task<string> ReceiveAsync()
{
var data = (await _udpClient.ReceiveAsync()).Buffer;
return Encoding.ASCII.GetString(data);
}
public void Dispose()
{
_udpClient?.Dispose();
}
}
} | {
"content_hash": "f30ab709283a25c2cfa592a1dff8eb8d",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 123,
"avg_line_length": 30.392857142857142,
"alnum_prop": 0.6233842538190364,
"repo_name": "sergey-brutsky/mi-home",
"id": "acff9f118c96c13984665a1638e935e3054523d5",
"size": "1702",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "MiHomeLib/Transport/UdpTransport.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "200840"
},
{
"name": "Makefile",
"bytes": "1106"
}
],
"symlink_target": ""
} |
$varAlmserv = Get-VstsInput -Name 'varAlmserv' -Require
$varUserName = Get-VstsInput -Name 'varUserName' -Require
$varPass = Get-VstsInput -Name 'varPass'
$varDomain = Get-VstsInput -Name 'varDomain' -Require
$varProject = Get-VstsInput -Name 'varProject' -Require
$varEnvId = Get-VstsInput -Name 'varEnvId' -Require
$javaHomeSelection = Get-VstsInput -Name 'javaHomeSelection' -Require
$createNewNamed = Get-VstsInput -Name 'createNewNamed'
$assignMessage = Get-VstsInput -Name 'assignMessage'
$jdkUserInputPath = Get-VstsInput -Name 'jdkUserInputPath'
$varPathToJSON = Get-VstsInput -Name 'varPathToJSON'
$paramOnlyFirst = Get-VstsInput -Name 'paramOnlyFirst' -Require
$AddParam1 = Get-VstsInput -Name 'AddParam1'
$paramType1 = Get-VstsInput -Name 'paramType1'
$paramName1 = Get-VstsInput -Name 'paramName1'
$paramValue1 = Get-VstsInput -Name 'paramValue1'
$AddParam2 = Get-VstsInput -Name 'AddParam2'
$paramType2 = Get-VstsInput -Name 'paramType2'
$paramName2 = Get-VstsInput -Name 'paramName2'
$paramValue2 = Get-VstsInput -Name 'paramValue2'
$AddParam3 = Get-VstsInput -Name 'AddParam3'
$paramType3 = Get-VstsInput -Name 'paramType3'
$paramName3 = Get-VstsInput -Name 'paramName3'
$paramValue3 = Get-VstsInput -Name 'paramValue3'
$AddParam4 = Get-VstsInput -Name 'AddParam4'
$paramType4 = Get-VstsInput -Name 'paramType4'
$paramName4 = Get-VstsInput -Name 'paramName4'
$paramValue4 = Get-VstsInput -Name 'paramValue4'
$AddParam5 = Get-VstsInput -Name 'AddParam5'
$paramType5 = Get-VstsInput -Name 'paramType5'
$paramName5 = Get-VstsInput -Name 'paramName5'
$paramValue5 = Get-VstsInput -Name 'paramValue5'
$AddParam6 = Get-VstsInput -Name 'AddParam6'
$paramType6 = Get-VstsInput -Name 'paramType6'
$paramName6 = Get-VstsInput -Name 'paramName6'
$paramValue6 = Get-VstsInput -Name 'paramValue6'
$AddParam7 = Get-VstsInput -Name 'AddParam7'
$paramType7 = Get-VstsInput -Name 'paramType7'
$paramName7 = Get-VstsInput -Name 'paramName7'
$paramValue7 = Get-VstsInput -Name 'paramValue7'
$AddParam8 = Get-VstsInput -Name 'AddParam8'
$paramType8 = Get-VstsInput -Name 'paramType8'
$paramName8 = Get-VstsInput -Name 'paramName8'
$paramValue8 = Get-VstsInput -Name 'paramValue8'
$AddParam9 = Get-VstsInput -Name 'AddParam9'
$paramType9 = Get-VstsInput -Name 'paramType9'
$paramName9 = Get-VstsInput -Name 'paramName9'
$paramValue9 = Get-VstsInput -Name 'paramValue9'
$AddParam10 = Get-VstsInput -Name 'AddParam10'
$paramType10 = Get-VstsInput -Name 'paramType10'
$paramName10 = Get-VstsInput -Name 'paramName10'
$paramValue10 = Get-VstsInput -Name 'paramValue10'
#Import-Module "Microsoft.TeamFoundation.DistributedTask.Task.Common"
$uftworkdir = $env:UFT_LAUNCHER
$stdout = "$uftworkdir\temp_build.log"
$stderr = "$uftworkdir\temp_error_build.log"
$jar = """$uftworkdir\bin\hpe.application.automation.tfs.almrestrunner-1.0-jar-with-dependencies.jar"""
$args = "-jar $jar lep ""$varAlmserv"" ""$varUserName"" ""pass:$varPass"" ""$varDomain"" ""$varProject"" ""$varEnvId"" ""$javaHomeSelection"" ""newnamed:$createNewNamed"" ""assign:$assignMessage"" ""useasexisting:$jdkUserInputPath"" ""jsonpath:$varPathToJSON"" ""$paramOnlyFirst"""
if ($AddParam1 -eq $True)
{
$args = "$($args) ""partype1:$paramType1"" ""parname1:$paramName1"" ""parval1:$paramValue1"""
if ($AddParam2 -eq $True)
{
$args = "$($args) ""partype2:$paramType2"" ""parname2:$paramName2"" ""parval2:$paramValue2"""
if ($AddParam3 -eq $True)
{
$args = "$($args) ""partype3:$paramTyp3"" ""parname3:$paramNam3"" ""parval3:$paramValue3"""
if ($AddParam4 -eq $True)
{
$args = "$($args) ""partype4:$paramType4"" ""parname4:$paramName4"" ""parval4:$paramValue4"""
if ($AddParam5 -eq $True)
{
$args = "$($args) ""partype5:$paramTyp5"" ""parname5:$paramNam5"" ""parval5:$paramValue5"""
if ($AddParam6 -eq $True)
{
$args = "$($args) ""partype6:$paramType6"" ""parname6:$paramName6"" ""parval6:$paramValue6"""
if ($AddParam7 -eq $True)
{
$args = "$($args) ""partype7:$paramType7"" ""parname7:$paramName7"" ""parval7:$paramValue7"""
if ($AddParam8 -eq $True)
{
$args = "$($args) ""partype8:$paramType8"" ""parname8:$paramName8"" ""parval8:$paramValue8"""
if ($AddParam9 -eq $True)
{
$args = "$($args) ""partype9:$paramType9"" ""parname9:$paramName9"" ""parval9:$paramValue9"""
if ($AddParam10 -eq $True)
{
$args = "$($args) ""partype10:$paramType10"" ""parname10:$paramName10"" ""parval10:$paramValue10"""
}
}
}
}
}
}
}
}
}
}
$updateVariableFile = Join-Path $env:UFT_LAUNCHER -ChildPath "res\updateVariable.txt"
if (Test-Path $updateVariableFile)
{
Remove-Item $updateVariableFile
}
$process = (Start-Process java -ArgumentList $args -RedirectStandardOutput $stdout -RedirectStandardError $stderr -PassThru -Wait)
if ($process.ExitCode -ne 0)
{
$content = [IO.File]::ReadAllText($stdout)
Write-Error ($content)
$content = [IO.File]::ReadAllText($stderr)
Write-Error ($content)
}
else
{
if (Test-Path $stdout)
{
Get-Content $stdout
}
if (Test-Path $stderr)
{
Get-Content $stderr
}
if ($assignMessage)
{
if (Test-Path $updateVariableFile)
{
$content = [IO.File]::ReadAllText($updateVariableFile)
Set-TaskVariable $assignMessage $content
$varVal = Get-TaskVariable $distributedTaskContext $assignMessage
Write-Host "Variable '$($assignMessage)' updated with a new value '$($varVal)'"
}
}
}
if (Test-Path $stdout)
{
Remove-Item $stdout
}
if (Test-Path $stderr)
{
Remove-Item $stderr
}
| {
"content_hash": "f2c2f17f9a4ce893bf57c277fbe2fba8",
"timestamp": "",
"source": "github",
"line_count": 164,
"max_line_length": 282,
"avg_line_length": 34.0609756097561,
"alnum_prop": 0.6970998925886144,
"repo_name": "hpsa/ADM-TFS-Extension",
"id": "2ed6b9d3f234e5cb09e7dec50841f46caeaa55ab",
"size": "5606",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "UFTBuildTask/AlmLabEnvPrepareTask/localTask.ps1",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "40901"
},
{
"name": "JavaScript",
"bytes": "59179"
},
{
"name": "PowerShell",
"bytes": "12040"
},
{
"name": "TypeScript",
"bytes": "2991"
}
],
"symlink_target": ""
} |
import Ember from 'ember';
import among from './macros/among';
import allEqual from './macros/all-equal';
import encodeURIComponent from './macros/encode-uri-component';
import encodeURI from './macros/encode-uri';
import firstPresent from './macros/first-present';
import fmt from './macros/fmt';
import htmlEscape from './macros/html-escape';
import ifNull from './macros/if-null';
import notAmong from './macros/not-among';
import notEqual from './macros/not-equal';
import notMatch from './macros/not-match';
import promise from './macros/promise';
import safeString from './macros/safe-string';
import join from './macros/join';
import sumBy from './macros/sum-by';
import sum from './macros/sum';
import concat from './macros/concat';
import conditional from './macros/conditional';
import product from './macros/product';
import quotient from './macros/quotient';
import difference from './macros/difference';
import not from './macros/not';
import asFloat from './macros/as-float';
import asInt from './macros/as-int';
function reverseMerge(dest, source) {
for (var key in source) {
if (source.hasOwnProperty(key) && !dest.hasOwnProperty(key)) {
dest[key] = source[key];
}
}
}
var VERSION = '1.3.1';
var Macros = {
not: not,
among: among,
allEqual: allEqual,
encodeURIComponent: encodeURIComponent,
encodeURI: encodeURI,
firstPresent: firstPresent,
fmt: fmt,
htmlEscape: htmlEscape,
ifNull: ifNull,
notAmong: notAmong,
notEqual: notEqual,
notMatch: notMatch,
promise: promise,
safeString: safeString,
join: join,
sumBy: sumBy,
sum: sum,
difference: difference,
concat: concat,
conditional: conditional,
asFloat: asFloat,
asInt: asInt,
quotient: quotient,
product: product
};
var install = function(){ reverseMerge(Ember.computed, Macros); };
if (Ember.libraries) {
Ember.libraries.register('Ember-CPM', VERSION);
}
export {
VERSION,
Macros,
install
};
export default {
VERSION: VERSION,
Macros: Macros,
install: install
};
| {
"content_hash": "1989c02396bdcbdd8da95b02338cc5be",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 66,
"avg_line_length": 25.531645569620252,
"alnum_prop": 0.7139315815567675,
"repo_name": "lolmaus/ember-cpm",
"id": "f7e7870c076b1711186f928ed4b8b1cd8840a43b",
"size": "2017",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "addon/index.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "31"
},
{
"name": "HTML",
"bytes": "1626"
},
{
"name": "JavaScript",
"bytes": "95897"
},
{
"name": "Shell",
"bytes": "1775"
}
],
"symlink_target": ""
} |
<CartCreateResponse xmlns="http://webservices.amazon.com/AWSECommerceService/2011-08-01">
<OperationRequest>
<HTTPHeaders>
<Header Name="UserAgent" Value="Python-urllib/2.6"/>
</HTTPHeaders>
<RequestId>1N5G10MR33W1J99KSMPB</RequestId>
<Arguments>
<Argument Name="Service" Value="AWSECommerceService"/>
<Argument Name="Item.1.Quantity" Value="2"/>
<Argument Name="Operation" Value="CartCreate"/>
<Argument Name="Timestamp" Value="2011-11-08T21:47:16Z"/>
<Argument Name="Version" Value="2011-08-01"/>
<Argument Name="AssociateTag" Value="XXXXXXXXXXXXXXX"/>
<Argument Name="Item.1.ASIN" Value="0201896842"/>
<Argument Name="Signature" Value="XXXXXXXXXXXXXXX"/>
<Argument Name="Item.2.ASIN" Value="0201896834"/>
<Argument Name="Item.2.Quantity" Value="1"/>
<Argument Name="AWSAccessKeyId" Value="XXXXXXXXXXXXXXX"/>
</Arguments>
<RequestProcessingTime>0.183966875076294</RequestProcessingTime>
</OperationRequest>
<Cart>
<Request>
<IsValid>True</IsValid>
<CartCreateRequest>
<Items>
<Item>
<ASIN>0201896842</ASIN>
<Quantity>2</Quantity>
</Item>
<Item>
<ASIN>0201896834</ASIN>
<Quantity>1</Quantity>
</Item>
</Items>
</CartCreateRequest>
</Request>
<CartId>187-0447201-3975268</CartId>
<HMAC>7pWaikL1bmSDEjP1UEUh1CyJUmg=</HMAC>
<URLEncodedHMAC>7pWaikL1bmSDEjP1UEUh1CyJUmg%3D</URLEncodedHMAC>
<PurchaseURL>https://www.amazon.ca/gp/cart/aws-merge.html?cart-id=187-0447201-3975268%26associate-id=redtoad-21%26hmac=7pWaikL1bmSDEjP1UEUh1CyJUmg=%26SubscriptionId=AKIAJTJID3GQ4PAFYOEQ%26MergeCart=False</PurchaseURL>
<SubTotal>
<Amount>16271</Amount>
<CurrencyCode>CAD</CurrencyCode>
<FormattedPrice>CDN$ 162.71</FormattedPrice>
</SubTotal>
<CartItems>
<SubTotal>
<Amount>16271</Amount>
<CurrencyCode>CAD</CurrencyCode>
<FormattedPrice>CDN$ 162.71</FormattedPrice>
</SubTotal>
<CartItem>
<CartItemId>U1BO8U3ZYW2PV6</CartItemId>
<ASIN>0201896842</ASIN>
<SellerNickname>Amazon.ca</SellerNickname>
<Quantity>2</Quantity>
<Title>Art of Computer Programming, Volume 2: Seminumerical Algorithms</Title>
<ProductGroup>Book</ProductGroup>
<Price>
<Amount>4976</Amount>
<CurrencyCode>CAD</CurrencyCode>
<FormattedPrice>CDN$ 49.76</FormattedPrice>
</Price>
<ItemTotal>
<Amount>9952</Amount>
<CurrencyCode>CAD</CurrencyCode>
<FormattedPrice>CDN$ 99.52</FormattedPrice>
</ItemTotal>
</CartItem>
<CartItem>
<CartItemId>U24U0TK50FRSUK</CartItemId>
<ASIN>0201896834</ASIN>
<SellerNickname>Amazon.ca</SellerNickname>
<Quantity>1</Quantity>
<Title>Art of Computer Programming, Volume 1: Fundamental Algorithms</Title>
<ProductGroup>Book</ProductGroup>
<Price>
<Amount>6319</Amount>
<CurrencyCode>CAD</CurrencyCode>
<FormattedPrice>CDN$ 63.19</FormattedPrice>
</Price>
<ItemTotal>
<Amount>6319</Amount>
<CurrencyCode>CAD</CurrencyCode>
<FormattedPrice>CDN$ 63.19</FormattedPrice>
</ItemTotal>
</CartItem>
</CartItems>
</Cart>
</CartCreateResponse>
| {
"content_hash": "8378faab6898e99686c7fb276ae681e9",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 221,
"avg_line_length": 37.81318681318681,
"alnum_prop": 0.6378959604766057,
"repo_name": "prats226/python-amazon-product-api-0.2.8",
"id": "a3540ac37594d081b65cfefcb8daeebdd5f74920",
"size": "3441",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/2011-08-01/CartModify-ca-modifying-empty-items-fails.xml",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "559563"
}
],
"symlink_target": ""
} |
/******************************************************************
iLBC Speech Coder ANSI-C Source Code
WebRtcIlbcfix_LsfInterpolate2PloyEnc.c
******************************************************************/
#include "defines.h"
#include "interpolate.h"
#include "lsf_to_poly.h"
/*----------------------------------------------------------------*
* lsf interpolator and conversion from lsf to a coefficients
* (subrutine to SimpleInterpolateLSF)
*---------------------------------------------------------------*/
void WebRtcIlbcfix_LsfInterpolate2PloyEnc(
WebRtc_Word16 *a, /* (o) lpc coefficients Q12 */
WebRtc_Word16 *lsf1, /* (i) first set of lsf coefficients Q13 */
WebRtc_Word16 *lsf2, /* (i) second set of lsf coefficients Q13 */
WebRtc_Word16 coef, /* (i) weighting coefficient to use between
lsf1 and lsf2 Q14 */
WebRtc_Word16 length /* (i) length of coefficient vectors */
) {
/* Stack based */
WebRtc_Word16 lsftmp[LPC_FILTERORDER];
/* interpolate LSF */
WebRtcIlbcfix_Interpolate(lsftmp, lsf1, lsf2, coef, length);
/* Compute the filter coefficients from the LSF */
WebRtcIlbcfix_Lsf2Poly(a, lsftmp);
return;
}
| {
"content_hash": "ca665b48f46fc0a5dc5a11c8f62a31ac",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 69,
"avg_line_length": 32.8421052631579,
"alnum_prop": 0.5160256410256411,
"repo_name": "wilebeast/FireFox-OS",
"id": "3b0a34dd9b5f452e0cf6b15aeae8ac3d89917a0b",
"size": "1660",
"binary": false,
"copies": "38",
"ref": "refs/heads/master",
"path": "B2G/gecko/media/webrtc/trunk/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.c",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
```
usage: archive [-h] {configure,aws,local} ...
Bioinformatics archiving tools.
positional arguments:
{configure,aws,local}
configure Configure the data archiving tool.
aws AWS data archiving tool. Uploads files to AWS and
stores S3 metadata.
local Local data archiving tool. Uploads files to a local
archive target and stores metadata.
optional arguments:
-h, --help show this help message and exit
```
The `archive` tool has three sub programs:
### Configure
The `configure` tool should be run first to create some logging directories
and can be used to store your archiving resource (AWS or a local resource) information in a file so that the information
does not need to be reentered on the command line every time.
### AWS
The `aws` tool archives data to an Amazon Web Services S3 bucket that is configured to migrate data to the Glacier
low-cost archival storage system.
#### Usage
```
usage: archive aws [-h] --files FILES [--delete] [--bucket BUCKET]
optional arguments:
-h, --help show this help message and exit
--files FILES Files to be archived.
--delete Delete local files if archiving is successful.
--bucket BUCKET AWS S3 bucket name. Overrides bucket set in the
configuration file.
```
### Local
The `local` tool archives data to any local computer resource that is accessible via SSH/Rsync.
#### Usage
```
usage: archive local [-h] --files FILES [--delete] [--hostname HOSTNAME]
[--username USERNAME] [--path PATH]
optional arguments:
-h, --help show this help message and exit
--files FILES Files to be archived.
--delete Delete local files if archiving is successful.
--hostname HOSTNAME Local archive target computer/server hostname/IP.
--username USERNAME Local archive target computer/server username/login.
--path PATH Local archive target computer/server volume path.
```
| {
"content_hash": "49c09266bda67c37f592ca824b4e47b6",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 120,
"avg_line_length": 33.57377049180328,
"alnum_prop": 0.67138671875,
"repo_name": "danforthcenter/htcondor-tools",
"id": "16208fc1b8b8bfbb3b1d4af4788f6fd1f2d283e2",
"size": "2118",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "archive/README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "57703"
},
{
"name": "Shell",
"bytes": "7260"
},
{
"name": "TSQL",
"bytes": "2338"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>Uses of Package com.cloudera.oryx.common.text (Oryx 2.4.1 API)</title>
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Package com.cloudera.oryx.common.text (Oryx 2.4.1 API)";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li>Class</li>
<li class="navBarCell1Rev">Use</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?com/cloudera/oryx/common/text/package-use.html" target="_top">Frames</a></li>
<li><a href="package-use.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h1 title="Uses of Package com.cloudera.oryx.common.text" class="title">Uses of Package<br>com.cloudera.oryx.common.text</h1>
</div>
<div class="contentContainer">No usage of com.cloudera.oryx.common.text</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li>Class</li>
<li class="navBarCell1Rev">Use</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?com/cloudera/oryx/common/text/package-use.html" target="_top">Frames</a></li>
<li><a href="package-use.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2014–2017. All rights reserved.</small></p>
</body>
</html>
| {
"content_hash": "1bb27ec86587456635d580dcfc16f467",
"timestamp": "",
"source": "github",
"line_count": 125,
"max_line_length": 125,
"avg_line_length": 33.56,
"alnum_prop": 0.6066746126340882,
"repo_name": "oncewang/oryx2",
"id": "836a22b8f9b2557cb2e381cf1b71874787fef927",
"size": "4195",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/apidocs/com/cloudera/oryx/common/text/package-use.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "1217032"
},
{
"name": "Scala",
"bytes": "17280"
},
{
"name": "Shell",
"bytes": "15756"
}
],
"symlink_target": ""
} |
package provision
import (
"github.com/pkg/errors"
"github.com/tsuru/tsuru/db"
"gopkg.in/mgo.v2"
"gopkg.in/mgo.v2/bson"
)
var (
ErrPublicDefaultPollCantHaveTeams = errors.New("Public/Default pool can't have teams.")
ErrDefaultPoolAlreadyExists = errors.New("Default pool already exists.")
ErrPoolNameIsRequired = errors.New("Pool name is required.")
ErrPoolNotFound = errors.New("Pool does not exist.")
)
type Pool struct {
Name string `bson:"_id"`
Teams []string
Public bool
Default bool
Provisioner string
}
type AddPoolOptions struct {
Name string
Public bool
Default bool
Force bool
Provisioner string
}
type UpdatePoolOptions struct {
Default *bool
Public *bool
Force bool
Provisioner string
}
func (p *Pool) GetProvisioner() (Provisioner, error) {
if p.Provisioner != "" {
return Get(p.Provisioner)
}
return GetDefault()
}
func AddPool(opts AddPoolOptions) error {
if opts.Name == "" {
return ErrPoolNameIsRequired
}
conn, err := db.Conn()
if err != nil {
return err
}
defer conn.Close()
if opts.Default {
err = changeDefaultPool(opts.Force)
if err != nil {
return err
}
}
pool := Pool{Name: opts.Name, Public: opts.Public, Default: opts.Default, Provisioner: opts.Provisioner}
return conn.Pools().Insert(pool)
}
func changeDefaultPool(force bool) error {
conn, err := db.Conn()
if err != nil {
return err
}
defer conn.Close()
p, err := listPools(bson.M{"default": true})
if err != nil {
return err
}
if len(p) > 0 {
if !force {
return ErrDefaultPoolAlreadyExists
}
return conn.Pools().UpdateId(p[0].Name, bson.M{"$set": bson.M{"default": false}})
}
return nil
}
func RemovePool(poolName string) error {
conn, err := db.Conn()
if err != nil {
return err
}
defer conn.Close()
err = conn.Pools().Remove(bson.M{"_id": poolName})
if err == mgo.ErrNotFound {
return ErrPoolNotFound
}
return err
}
func AddTeamsToPool(poolName string, teams []string) error {
conn, err := db.Conn()
if err != nil {
return err
}
defer conn.Close()
var pool Pool
err = conn.Pools().Find(bson.M{"_id": poolName}).One(&pool)
if err == mgo.ErrNotFound {
return ErrPoolNotFound
}
if err != nil {
return err
}
if pool.Public || pool.Default {
return ErrPublicDefaultPollCantHaveTeams
}
for _, newTeam := range teams {
for _, team := range pool.Teams {
if newTeam == team {
return errors.New("Team already exists in pool.")
}
}
}
return conn.Pools().UpdateId(poolName, bson.M{"$push": bson.M{"teams": bson.M{"$each": teams}}})
}
func RemoveTeamsFromPool(poolName string, teams []string) error {
conn, err := db.Conn()
if err != nil {
return err
}
defer conn.Close()
err = conn.Pools().UpdateId(poolName, bson.M{"$pullAll": bson.M{"teams": teams}})
if err == mgo.ErrNotFound {
return ErrPoolNotFound
}
return err
}
func ListPossiblePools(teams []string) ([]Pool, error) {
query := bson.M{}
if teams != nil {
filter := bson.M{
"default": false,
"public": false,
"teams": bson.M{"$in": teams},
}
query["$or"] = []bson.M{{"public": true}, {"default": true}, filter}
}
return listPools(query)
}
func ListPoolsForTeam(team string) ([]Pool, error) {
return listPools(bson.M{"teams": team})
}
func listPools(query bson.M) ([]Pool, error) {
conn, err := db.Conn()
if err != nil {
return nil, err
}
defer conn.Close()
pools := []Pool{}
err = conn.Pools().Find(query).All(&pools)
if err != nil {
return nil, err
}
return pools, nil
}
// GetPoolByName finds a pool by name
func GetPoolByName(name string) (*Pool, error) {
conn, err := db.Conn()
if err != nil {
return nil, err
}
defer conn.Close()
var p Pool
err = conn.Pools().FindId(name).One(&p)
if err != nil {
if err == mgo.ErrNotFound {
return nil, ErrPoolNotFound
}
return nil, err
}
return &p, nil
}
func GetDefaultPool() (*Pool, error) {
conn, err := db.Conn()
if err != nil {
return nil, err
}
defer conn.Close()
var pool Pool
err = conn.Pools().Find(bson.M{"default": true}).One(&pool)
if err != nil {
if err == mgo.ErrNotFound {
return nil, ErrPoolNotFound
}
return nil, err
}
return &pool, nil
}
func PoolUpdate(name string, opts UpdatePoolOptions) error {
conn, err := db.Conn()
if err != nil {
return err
}
defer conn.Close()
if opts.Default != nil && *opts.Default {
err = changeDefaultPool(opts.Force)
if err != nil {
return err
}
}
query := bson.M{}
if opts.Default != nil {
query["default"] = *opts.Default
}
if opts.Public != nil {
query["public"] = *opts.Public
}
if opts.Provisioner != "" {
query["provisioner"] = opts.Provisioner
}
err = conn.Pools().UpdateId(name, bson.M{"$set": query})
if err == mgo.ErrNotFound {
return ErrPoolNotFound
}
return err
}
| {
"content_hash": "b1788a483f15b4a7b99d60ecd0f63b37",
"timestamp": "",
"source": "github",
"line_count": 231,
"max_line_length": 105,
"avg_line_length": 20.99134199134199,
"alnum_prop": 0.6436378634770056,
"repo_name": "gwmoura/tsuru",
"id": "dfb75abbe3fdabf672bfcd0b1eba61d480b9bb02",
"size": "5008",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "provision/pool.go",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Go",
"bytes": "3694151"
},
{
"name": "HTML",
"bytes": "315"
},
{
"name": "Makefile",
"bytes": "3447"
},
{
"name": "Shell",
"bytes": "12791"
}
],
"symlink_target": ""
} |
package org.gradle.cache.internal;
import com.google.common.collect.Sets;
import org.gradle.util.GradleVersion;
import java.util.SortedSet;
public class UsedGradleVersionsFromGradleUserHomeCaches implements UsedGradleVersions {
private final VersionSpecificCacheDirectoryScanner directoryScanner;
public UsedGradleVersionsFromGradleUserHomeCaches(CacheScopeMapping cacheScopeMapping) {
directoryScanner = new VersionSpecificCacheDirectoryScanner(cacheScopeMapping.getRootDirectory(null));
}
@Override
public SortedSet<GradleVersion> getUsedGradleVersions() {
SortedSet<GradleVersion> result = Sets.newTreeSet();
for (VersionSpecificCacheDirectory cacheDir : directoryScanner.getExistingDirectories()) {
result.add(cacheDir.getVersion());
}
return result;
}
}
| {
"content_hash": "32287f41801b7212b729ef51a2db3433",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 110,
"avg_line_length": 32.42307692307692,
"alnum_prop": 0.7722419928825622,
"repo_name": "robinverduijn/gradle",
"id": "965813d3ec59163a8634e300505a9eba3f87f7df",
"size": "1458",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "subprojects/core/src/main/java/org/gradle/cache/internal/UsedGradleVersionsFromGradleUserHomeCaches.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "277"
},
{
"name": "Brainfuck",
"bytes": "54"
},
{
"name": "C",
"bytes": "98580"
},
{
"name": "C++",
"bytes": "1805886"
},
{
"name": "CSS",
"bytes": "188237"
},
{
"name": "CoffeeScript",
"bytes": "620"
},
{
"name": "GAP",
"bytes": "424"
},
{
"name": "Gherkin",
"bytes": "191"
},
{
"name": "Groovy",
"bytes": "25537093"
},
{
"name": "HTML",
"bytes": "77104"
},
{
"name": "Java",
"bytes": "24906063"
},
{
"name": "JavaScript",
"bytes": "209481"
},
{
"name": "Kotlin",
"bytes": "2846791"
},
{
"name": "Objective-C",
"bytes": "840"
},
{
"name": "Objective-C++",
"bytes": "441"
},
{
"name": "Perl",
"bytes": "37849"
},
{
"name": "Python",
"bytes": "57"
},
{
"name": "Ruby",
"bytes": "16"
},
{
"name": "Scala",
"bytes": "29814"
},
{
"name": "Shell",
"bytes": "7212"
},
{
"name": "Swift",
"bytes": "6972"
},
{
"name": "XSLT",
"bytes": "42845"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<HTML>
<HEAD>
<TITLE> UPNP_E_INTERNAL_ERROR [-911]</TITLE>
<META NAME="GENERATOR" CONTENT="DOC++ 3.4.10">
</HEAD>
<BODY BGCOLOR="#ffffff">
<H2> <A HREF="#DOC.DOCU">UPNP_E_INTERNAL_ERROR [-911]</A></H2></H2><A NAME="DOC.DOCU"></A>
<BLOCKQUOTE><TT>UPNP_E_INTERNAL_ERROR</TT> is the generic error code for internal
conditions not covered by other error codes.</BLOCKQUOTE>
<DL><DT><DD></DL><P><P><I><A HREF="index.html">Alphabetic index</A></I></P><HR>
<BR>
This page was generated with the help of <A HREF="http://docpp.sourceforge.net">DOC++</A>.
</BODY>
</HTML>
| {
"content_hash": "6600e94910fb5eb73dadf452e4e35640",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 90,
"avg_line_length": 37.23529411764706,
"alnum_prop": 0.660347551342812,
"repo_name": "commshare/pupnp",
"id": "b736bc58bec33c1c59dfb5cf7433e7112fd35e2f",
"size": "633",
"binary": false,
"copies": "13",
"ref": "refs/heads/master",
"path": "docs/dist/html/upnp/UPNP_E_INTERNAL_ERROR-911.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1589517"
},
{
"name": "C++",
"bytes": "5353"
},
{
"name": "Shell",
"bytes": "1328"
},
{
"name": "TeX",
"bytes": "58305"
}
],
"symlink_target": ""
} |
const HtmlRelation = require('../HtmlRelation');
class HtmlStyleAttribute extends HtmlRelation {
static getRelationsFromNode(node) {
if (node.nodeType === node.ELEMENT_NODE && node.matches('[style]')) {
return {
type: 'HtmlStyleAttribute',
to: {
type: 'Css',
isExternalizable: false,
text: `bogusselector {${node.getAttribute('style')}}`,
},
node,
};
}
}
get href() {
return this.node.getAttribute('style');
}
set href(href) {
return this.node.setAttribute('style', href);
}
inlineHtmlRelation() {
this.href = this.to.text.replace(/^bogusselector\s*\{\s*|\s*}\s*$/g, '');
this.from.markDirty();
}
attach() {
throw new Error('HtmlStyleAttribute.attach: Not supported.');
}
detach() {
this.node.removeAttribute('style');
this.node = undefined;
return super.detach();
}
}
Object.assign(HtmlStyleAttribute.prototype, {
targetType: 'Css',
_hrefType: 'inline',
});
module.exports = HtmlStyleAttribute;
| {
"content_hash": "cd028ee008d718c284e656db614df420",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 77,
"avg_line_length": 22.25531914893617,
"alnum_prop": 0.607074569789675,
"repo_name": "assetgraph/assetgraph",
"id": "4bf53d24237dce8a7f9117488ee2c9c3ea7c3b48",
"size": "1046",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/relations/Html/HtmlStyleAttribute.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "1140587"
}
],
"symlink_target": ""
} |
package initServlet;
import java.sql.Timestamp;
import java.util.List;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServlet;
import org.apache.commons.lang.StringUtils;
import org.springframework.web.context.support.WebApplicationContextUtils;
import com.swfarm.biz.chain.bo.Currency;
import com.swfarm.biz.chain.srv.ChainService;
import com.swfarm.biz.oa.bo.SiteInfoConfig;
import com.swfarm.biz.oa.bo.TabItem;
import com.swfarm.biz.oa.srv.ProjectPlanService;
import com.swfarm.biz.oa.srv.SysConfigService;
import com.swfarm.biz.oa.srv.UserService;
import com.swfarm.biz.product.srv.ProductService;
import com.swfarm.biz.schedule.srv.ScheduleService;
import com.swfarm.biz.warehouse.bo.AllocationProductVoucher;
import com.swfarm.biz.warehouse.srv.WarehouseService;
import com.swfarm.pub.framework.Config;
import com.swfarm.pub.framework.Env;
import com.swfarm.pub.framework.FormNumberCache;
import com.swfarm.pub.utils.DateUtils;
import com.swfarm.pub.utils.SpringUtils;
import com.swfarm.pub.utils.VelocityUtils;
public class InitServlet extends HttpServlet {
public void init() {
Config.initConfig();
ServletContext context = getServletContext();
SpringUtils.setApplicationContext(WebApplicationContextUtils
.getWebApplicationContext(context));
SysConfigService sysConfigService = (SysConfigService) SpringUtils
.getBean("sysConfigService");
SiteInfoConfig siteInfoConfig = sysConfigService.findSiteInfoConfig();
if (siteInfoConfig != null) {
context.setAttribute("siteInfoConfig", siteInfoConfig);
if (StringUtils.isNotEmpty(siteInfoConfig.getFtpUrl())) {
Env.FILE_SERVER_HOST = siteInfoConfig.getFtpUrl();
Env.FILE_SERVER_PORT = siteInfoConfig.getFtpPort();
Env.FILE_SERVER_USERNAME = siteInfoConfig.getUsername();
Env.FILE_SERVER_PASSWORD = siteInfoConfig.getPassword();
}
}
Env.APP_REAL_PATH = context.getRealPath("/");
if ((!Env.APP_REAL_PATH.endsWith("/"))
&& (!Env.APP_REAL_PATH.endsWith("\\"))) {
Env.APP_REAL_PATH += "/";
}
UserService userService = (UserService) SpringUtils
.getBean("userService");
List tabItems = userService.findAllTabItems();
for (int i = 0; i < tabItems.size(); i++) {
TabItem tabItem = (TabItem) tabItems.get(i);
context.setAttribute(tabItem.getName(), tabItem);
}
ChainService chainService = (ChainService) SpringUtils
.getBean("chainService");
Currency currency = chainService.findCurrencyByCode("USD");
if (currency != null) {
Env.CURRENCY_USD = Double.valueOf(currency.getBuyingRate()
.doubleValue() / 100.0D);
}
currency = chainService.findCurrencyByCode("GBP");
if (currency != null) {
Env.CURRENCY_GBP = Double.valueOf(currency.getBuyingRate()
.doubleValue() / 100.0D);
}
currency = chainService.findCurrencyByCode("EUR");
if (currency != null) {
Env.CURRENCY_EUR = Double.valueOf(currency.getBuyingRate()
.doubleValue() / 100.0D);
}
currency = chainService.findCurrencyByCode("AUD");
if (currency != null) {
Env.CURRENCY_AUD = Double.valueOf(currency.getBuyingRate()
.doubleValue() / 100.0D);
}
currency = chainService.findCurrencyByCode("PHP");
if (currency != null) {
Env.CURRENCY_PHP = Double.valueOf(currency.getBuyingRate()
.doubleValue() / 100.0D);
}
VelocityUtils.init();
ProjectPlanService projectPlanService = (ProjectPlanService) SpringUtils
.getBean("projectPlanService");
ProductService productService = (ProductService) SpringUtils
.getBean("productService");
WarehouseService warehouseService = (WarehouseService) SpringUtils
.getBean("warehouseService");
FormNumberCache.putToNumberMap("PT", projectPlanService
.getMaxProjectTaskNo());
FormNumberCache.putToNumberMap("CG", chainService
.getMaxPurchaseTaskNo());
FormNumberCache.putToNumberMap("PRV", chainService
.getMaxPurchaseRequestVoucherNo());
FormNumberCache.putToNumberMap("ZJD", chainService
.getMaxInspectionVoucherNo());
FormNumberCache.putToNumberMap("SSSR", chainService
.getMaxSkuSaleStatisticReportNo());
FormNumberCache.putToNumberMap("PI", chainService
.getMaxProformaInvoiceVoucherNo());
FormNumberCache.putToNumberMap("TH", chainService
.getMaxRefundVoucherNo());
FormNumberCache.putToNumberMap("WCOV", warehouseService
.getMaxWarehouseCheckOutVoucherNo());
FormNumberCache.putToNumberMap("WCIV", warehouseService
.getMaxWarehouseCheckInVoucherNo());
FormNumberCache.putToNumberMap("WTV", warehouseService
.getMaxWarehouseTransferVoucherNo());
FormNumberCache.putToNumberMap(AllocationProductVoucher.PREFIX,
warehouseService.getMaxAllocationProductVoucherNo());
FormNumberCache.putToNumberMap("APVL", warehouseService
.getMaxAllocationProductVoucherListNo());
FormNumberCache.putToNumberMap("ARTICLE_NUMBER", productService
.getMaxArticleNumber());
FormNumberCache.putToNumberMap("CUSTOMER_CODE", chainService
.getMaxCustomerCode());
FormNumberCache.putToNumberMap("VENDOR_CODE", chainService
.getMaxVendorCode());
if ((Env.JOB_SERVER.booleanValue())
&& (Env.JOB_SERVER_AUTO.booleanValue())) {
ScheduleService scheduleService = (ScheduleService) SpringUtils
.getBean("scheduleService");
scheduleService.startScheduleService();
}
Env.SERVER_STARTTIME = new Timestamp(System.currentTimeMillis());
}
public void destroy() {
super.destroy();
}
public static void main(String[] args) {
DateUtils.dateToString(new Timestamp(System.currentTimeMillis()),
"yyyy/MM/dd HH:mm:SS");
}
} | {
"content_hash": "cf5d85e391f554fa3a50b90ff073ccbd",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 74,
"avg_line_length": 40.212765957446805,
"alnum_prop": 0.7400352733686067,
"repo_name": "zhangqiang110/my4j",
"id": "35c43a53166a792dfee4bdcf4d279398dc064ca7",
"size": "5670",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pms/src/test/java/initServlet/InitServlet.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "41428"
}
],
"symlink_target": ""
} |
#if !defined(FUSION_LESS_EQUAL_05052005_1141)
#define FUSION_LESS_EQUAL_05052005_1141
#include <lslboost/mpl/bool.hpp>
#include <lslboost/fusion/iterator/deref.hpp>
#include <lslboost/fusion/iterator/next.hpp>
#include <lslboost/fusion/iterator/equal_to.hpp>
namespace lslboost { namespace fusion { namespace detail
{
template <typename Seq1, typename Seq2>
struct sequence_less_equal
{
typedef typename result_of::end<Seq1>::type end1_type;
typedef typename result_of::end<Seq2>::type end2_type;
template <typename I1, typename I2>
static bool
call(I1 const&, I2 const&, mpl::true_)
{
return true;
}
template <typename I1, typename I2>
static bool
call(I1 const& a, I2 const& b, mpl::false_)
{
return *a <= *b
&& (!(*b <= *a) || call(fusion::next(a), fusion::next(b)));
}
template <typename I1, typename I2>
static bool
call(I1 const& a, I2 const& b)
{
typename result_of::equal_to<I1, end1_type>::type eq;
return call(a, b, eq);
}
};
}}}
#endif
| {
"content_hash": "cfba3305d94072118496d2b493810fef",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 75,
"avg_line_length": 27.209302325581394,
"alnum_prop": 0.5837606837606838,
"repo_name": "gazzlab/LSL-gazzlab-branch",
"id": "0247beae9c7e61b0620c88c0bbdd1197b052220d",
"size": "1574",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "liblsl/external/lslboost/fusion/sequence/comparison/detail/less_equal.hpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "63003"
},
{
"name": "C++",
"bytes": "53791427"
},
{
"name": "Objective-C",
"bytes": "39516"
},
{
"name": "Perl",
"bytes": "2051"
},
{
"name": "Python",
"bytes": "4119"
},
{
"name": "Shell",
"bytes": "2310"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
Index Fungorum
#### Published in
Hedwigia 65: 253 (1925)
#### Original name
Phyllosticta gentianellae C. Massal.
### Remarks
null | {
"content_hash": "fd999e04c0e53f3ed6d849745d9a429a",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 36,
"avg_line_length": 12.307692307692308,
"alnum_prop": 0.7125,
"repo_name": "mdoering/backbone",
"id": "3b8468a6d59e43d5250b74b394929c19963ad7a0",
"size": "227",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Fungi/Ascomycota/Dothideomycetes/Asteromella/Asteromella gentianellae/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
require 'fileutils'
module Milkshake
RAILS_VERSION = "2.3.4"
autoload :DependencyResolver, 'milkshake/dependency_resolver'
autoload :Environment, 'milkshake/environment'
autoload :Validator, 'milkshake/validator'
autoload :Template, 'milkshake/template'
autoload :Extender, 'milkshake/extender'
autoload :Linker, 'milkshake/linker'
autoload :Loader, 'milkshake/loader'
autoload :Cache, 'milkshake/cache'
autoload :App, 'milkshake/app'
module RailsExtentions
autoload :Configuration, 'milkshake/rails_extentions/configuration'
autoload :Initializer, 'milkshake/rails_extentions/initializer'
autoload :Migrator, 'milkshake/rails_extentions/migrations'
autoload :GemBoot, 'milkshake/rails_extentions/boot'
autoload :VendorBoot, 'milkshake/rails_extentions/boot'
end
module RubygemsExtentions
autoload :Specification, 'milkshake/rubygems_extentions/specification'
end
class << self
attr_accessor :configuration_file
attr_accessor :cache_file
def load!
cache
validator
environment
loader
linker
extender
end
def environment
self.configuration_file ||= File.join(RAILS_ROOT, 'config', 'milkshake.yml')
@environment ||= Environment.load(self.cache, self.configuration_file)
end
def validator
@validator ||= Validator.new(self.cache)
end
def loader
@loader ||= Loader.new(self.environment, self.cache)
end
def linker
@linker ||= Linker.new(self.environment, self.validator, self.cache)
end
def extender
@extender ||= Extender.new
end
def cache
@cache ||= Cache.new(self.cache_file)
end
def cache_file
@cache_file ||= File.join(RAILS_ROOT, 'tmp', 'cache', 'milkshake.cache')
end
def persist!
cache.persist!
end
end
end
| {
"content_hash": "9d2d103fea40b98c4e0e4aa896ccd0e8",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 82,
"avg_line_length": 26.36842105263158,
"alnum_prop": 0.6372255489021956,
"repo_name": "historian/milkshake",
"id": "a92f1aa72ca639b9da6b3caa8108198940b9107d",
"size": "2005",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/milkshake.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "41151"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Linq;
namespace EFSqlTranslator.Translation.Extensions
{
/// <summary> Provides a set of static methods for querying objects that implement <see cref="T:System.Collections.Generic.IEnumerable`1" /> </summary>
public static class EnumerableExtensions
{
/// <summary> Returns a number that represents how many distinct elements in the specified sequence satisfy a condition </summary>
/// <typeparam name="TSource"> The type of the elements of source </typeparam>
/// <param name="source"> A sequence that contains elements to be counted </param>
/// <param name="selector"> A function to test each element for a condition </param>
/// <returns> A number that represents how many distinct elements in the sequence satisfy the condition in the predicate function </returns>
public static int DistinctCount<TSource, TOut>(this IEnumerable<TSource> source, Func<TSource, TOut> selector)
{
return source.Select(selector).Distinct().Count();
}
}
}
| {
"content_hash": "48bb1876f2a0d50d4d712a84cf70cfec",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 155,
"avg_line_length": 54.8,
"alnum_prop": 0.708029197080292,
"repo_name": "ethanli83/EFSqlTranslator",
"id": "fd29df5f3fa222ff0b182f5791c026fab963164b",
"size": "1098",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "EFSqlTranslator.Translation/Extensions/EnumerableExtensions.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "320806"
}
],
"symlink_target": ""
} |
module.exports = {
throwMissingTranslationError: true,
html5Mode: true,
baseUrl: "https://faucetto.io"
};
| {
"content_hash": "3317c32925c936b22284f92464221599",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 39,
"avg_line_length": 23.6,
"alnum_prop": 0.6779661016949152,
"repo_name": "Faucette/faucette-web",
"id": "651bada5c8f78c9a96d44260bf85c980ca82f75c",
"size": "118",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config/settings/production.js",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "600"
},
{
"name": "HTML",
"bytes": "1068"
},
{
"name": "JavaScript",
"bytes": "78611"
}
],
"symlink_target": ""
} |
package vn.com.hiringviet.common;
// TODO: Auto-generated Javadoc
/**
* The Enum StatusEnum.
*/
public enum StatusEnum {
/** The inactive. */
INACTIVE(0),
/** The active. */
ACTIVE(1),
/** The delete. */
DELETE(9);
/** The value. */
private int value;
/**
* Instantiates a new status enum.
*
* @param value the value
*/
private StatusEnum(int value) {
this.value = value;
}
/**
* Gets the value.
*
* @return the value
*/
public int getValue() {
return this.value;
}
}
| {
"content_hash": "1b3f532e1a97aa121cf866ea63e2c460",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 35,
"avg_line_length": 13.179487179487179,
"alnum_prop": 0.5933852140077821,
"repo_name": "aholake/hiringviet",
"id": "eed5c8c509e904cff29b6515d544d0fec9af8206",
"size": "514",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/vn/com/hiringviet/common/StatusEnum.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "377195"
},
{
"name": "HTML",
"bytes": "204009"
},
{
"name": "Java",
"bytes": "692220"
},
{
"name": "JavaScript",
"bytes": "377271"
},
{
"name": "PHP",
"bytes": "2157"
}
],
"symlink_target": ""
} |
package org.apache.commons.math4.legacy.stat.inference;
import org.apache.commons.statistics.distribution.NormalDistribution;
import org.apache.commons.math4.legacy.exception.ConvergenceException;
import org.apache.commons.math4.legacy.exception.DimensionMismatchException;
import org.apache.commons.math4.legacy.exception.MaxCountExceededException;
import org.apache.commons.math4.legacy.exception.NoDataException;
import org.apache.commons.math4.legacy.exception.NullArgumentException;
import org.apache.commons.math4.legacy.exception.NumberIsTooLargeException;
import org.apache.commons.math4.legacy.stat.ranking.NaNStrategy;
import org.apache.commons.math4.legacy.stat.ranking.NaturalRanking;
import org.apache.commons.math4.legacy.stat.ranking.TiesStrategy;
import org.apache.commons.math4.core.jdkmath.JdkMath;
/**
* An implementation of the Wilcoxon signed-rank test.
*
*/
public class WilcoxonSignedRankTest {
/** Ranking algorithm. */
private NaturalRanking naturalRanking;
/**
* Create a test instance where NaN's are left in place and ties get
* the average of applicable ranks. Use this unless you are very sure
* of what you are doing.
*/
public WilcoxonSignedRankTest() {
naturalRanking = new NaturalRanking(NaNStrategy.FIXED,
TiesStrategy.AVERAGE);
}
/**
* Create a test instance using the given strategies for NaN's and ties.
* Only use this if you are sure of what you are doing.
*
* @param nanStrategy
* specifies the strategy that should be used for Double.NaN's
* @param tiesStrategy
* specifies the strategy that should be used for ties
*/
public WilcoxonSignedRankTest(final NaNStrategy nanStrategy,
final TiesStrategy tiesStrategy) {
naturalRanking = new NaturalRanking(nanStrategy, tiesStrategy);
}
/**
* Ensures that the provided arrays fulfills the assumptions.
*
* @param x first sample
* @param y second sample
* @throws NullArgumentException if {@code x} or {@code y} are {@code null}.
* @throws NoDataException if {@code x} or {@code y} are zero-length.
* @throws DimensionMismatchException if {@code x} and {@code y} do not
* have the same length.
*/
private void ensureDataConformance(final double[] x, final double[] y)
throws NullArgumentException, NoDataException, DimensionMismatchException {
if (x == null ||
y == null) {
throw new NullArgumentException();
}
if (x.length == 0 ||
y.length == 0) {
throw new NoDataException();
}
if (y.length != x.length) {
throw new DimensionMismatchException(y.length, x.length);
}
}
/**
* Calculates y[i] - x[i] for all i.
*
* @param x first sample
* @param y second sample
* @return z = y - x
*/
private double[] calculateDifferences(final double[] x, final double[] y) {
final double[] z = new double[x.length];
for (int i = 0; i < x.length; ++i) {
z[i] = y[i] - x[i];
}
return z;
}
/**
* Calculates |z[i]| for all i.
*
* @param z sample
* @return |z|
* @throws NullArgumentException if {@code z} is {@code null}
* @throws NoDataException if {@code z} is zero-length.
*/
private double[] calculateAbsoluteDifferences(final double[] z)
throws NullArgumentException, NoDataException {
if (z == null) {
throw new NullArgumentException();
}
if (z.length == 0) {
throw new NoDataException();
}
final double[] zAbs = new double[z.length];
for (int i = 0; i < z.length; ++i) {
zAbs[i] = JdkMath.abs(z[i]);
}
return zAbs;
}
/**
* Computes the <a
* href="http://en.wikipedia.org/wiki/Wilcoxon_signed-rank_test">
* Wilcoxon signed ranked statistic</a> comparing mean for two related
* samples or repeated measurements on a single sample.
* <p>
* This statistic can be used to perform a Wilcoxon signed ranked test
* evaluating the null hypothesis that the two related samples or repeated
* measurements on a single sample has equal mean.
* </p>
* <p>
* Let X<sub>i</sub> denote the i'th individual of the first sample and
* Y<sub>i</sub> the related i'th individual in the second sample. Let
* Z<sub>i</sub> = Y<sub>i</sub> - X<sub>i</sub>.
* </p>
* <p>
* <strong>Preconditions</strong>:
* <ul>
* <li>The differences Z<sub>i</sub> must be independent.</li>
* <li>Each Z<sub>i</sub> comes from a continuous population (they must be
* identical) and is symmetric about a common median.</li>
* <li>The values that X<sub>i</sub> and Y<sub>i</sub> represent are
* ordered, so the comparisons greater than, less than, and equal to are
* meaningful.</li>
* </ul>
*
* @param x the first sample
* @param y the second sample
* @return wilcoxonSignedRank statistic (the larger of W+ and W-)
* @throws NullArgumentException if {@code x} or {@code y} are {@code null}.
* @throws NoDataException if {@code x} or {@code y} are zero-length.
* @throws DimensionMismatchException if {@code x} and {@code y} do not
* have the same length.
*/
public double wilcoxonSignedRank(final double[] x, final double[] y)
throws NullArgumentException, NoDataException, DimensionMismatchException {
ensureDataConformance(x, y);
// throws IllegalArgumentException if x and y are not correctly
// specified
final double[] z = calculateDifferences(x, y);
final double[] zAbs = calculateAbsoluteDifferences(z);
final double[] ranks = naturalRanking.rank(zAbs);
double wPlus = 0;
for (int i = 0; i < z.length; ++i) {
if (z[i] > 0) {
wPlus += ranks[i];
}
}
final int n = x.length;
final double wMinus = (((double) (n * (n + 1))) / 2.0) - wPlus;
return JdkMath.max(wPlus, wMinus);
}
/**
* Algorithm inspired by.
* http://www.fon.hum.uva.nl/Service/Statistics/Signed_Rank_Algorihms.html#C
* by Rob van Son, Institute of Phonetic Sciences & IFOTT,
* University of Amsterdam
*
* @param wMax largest Wilcoxon signed rank value
* @param n number of subjects (corresponding to x.length)
* @return two-sided exact p-value
*/
private double calculateExactPValue(final double wMax, final int n) {
// Total number of outcomes (equal to 2^N but a lot faster)
final int m = 1 << n;
int largerRankSums = 0;
for (int i = 0; i < m; ++i) {
int rankSum = 0;
// Generate all possible rank sums
for (int j = 0; j < n; ++j) {
// (i >> j) & 1 extract i's j-th bit from the right
if (((i >> j) & 1) == 1) {
rankSum += j + 1;
}
}
if (rankSum >= wMax) {
++largerRankSums;
}
}
/*
* largerRankSums / m gives the one-sided p-value, so it's multiplied
* with 2 to get the two-sided p-value
*/
return 2 * ((double) largerRankSums) / ((double) m);
}
/**
* @param wMin smallest Wilcoxon signed rank value
* @param n number of subjects (corresponding to x.length)
* @return two-sided asymptotic p-value
*/
private double calculateAsymptoticPValue(final double wMin, final int n) {
final double es = (double) (n * (n + 1)) / 4.0;
/* Same as (but saves computations):
* final double VarW = ((double) (N * (N + 1) * (2*N + 1))) / 24;
*/
final double varS = es * ((double) (2 * n + 1) / 6.0);
// - 0.5 is a continuity correction
final double z = (wMin - es - 0.5) / JdkMath.sqrt(varS);
// No try-catch or advertised exception because args are valid
// pass a null rng to avoid unneeded overhead as we will not sample from this distribution
final NormalDistribution standardNormal = NormalDistribution.of(0, 1);
return 2*standardNormal.cumulativeProbability(z);
}
/**
* Returns the <i>observed significance level</i>, or <a href=
* "http://www.cas.lancs.ac.uk/glossary_v1.1/hyptest.html#pvalue">
* p-value</a>, associated with a <a
* href="http://en.wikipedia.org/wiki/Wilcoxon_signed-rank_test">
* Wilcoxon signed ranked statistic</a> comparing mean for two related
* samples or repeated measurements on a single sample.
* <p>
* Let X<sub>i</sub> denote the i'th individual of the first sample and
* Y<sub>i</sub> the related i'th individual in the second sample. Let
* Z<sub>i</sub> = Y<sub>i</sub> - X<sub>i</sub>.
* </p>
* <p>
* <strong>Preconditions</strong>:
* <ul>
* <li>The differences Z<sub>i</sub> must be independent.</li>
* <li>Each Z<sub>i</sub> comes from a continuous population (they must be
* identical) and is symmetric about a common median.</li>
* <li>The values that X<sub>i</sub> and Y<sub>i</sub> represent are
* ordered, so the comparisons greater than, less than, and equal to are
* meaningful.</li>
* </ul>
*
* @param x the first sample
* @param y the second sample
* @param exactPValue
* if the exact p-value is wanted (only works for x.length >= 30,
* if true and x.length < 30, this is ignored because
* calculations may take too long)
* @return p-value
* @throws NullArgumentException if {@code x} or {@code y} are {@code null}.
* @throws NoDataException if {@code x} or {@code y} are zero-length.
* @throws DimensionMismatchException if {@code x} and {@code y} do not
* have the same length.
* @throws NumberIsTooLargeException if {@code exactPValue} is {@code true}
* and {@code x.length} > 30
* @throws ConvergenceException if the p-value can not be computed due to
* a convergence error
* @throws MaxCountExceededException if the maximum number of iterations
* is exceeded
*/
public double wilcoxonSignedRankTest(final double[] x, final double[] y,
final boolean exactPValue)
throws NullArgumentException, NoDataException, DimensionMismatchException,
NumberIsTooLargeException, ConvergenceException, MaxCountExceededException {
ensureDataConformance(x, y);
final int n = x.length;
final double wMax = wilcoxonSignedRank(x, y);
if (exactPValue && n > 30) {
throw new NumberIsTooLargeException(n, 30, true);
}
if (exactPValue) {
return calculateExactPValue(wMax, n);
} else {
final double wMin = ( (double)(n*(n+1)) / 2.0 ) - wMax;
return calculateAsymptoticPValue(wMin, n);
}
}
}
| {
"content_hash": "a3e13a1ae4353ad26432714550e58955",
"timestamp": "",
"source": "github",
"line_count": 308,
"max_line_length": 98,
"avg_line_length": 36.396103896103895,
"alnum_prop": 0.6073148974130241,
"repo_name": "apache/commons-math",
"id": "2f0b4c994231811f40f2bb3d00d28698f84ab16c",
"size": "12012",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "commons-math-legacy/src/main/java/org/apache/commons/math4/legacy/stat/inference/WilcoxonSignedRankTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "3412"
},
{
"name": "Gnuplot",
"bytes": "3294"
},
{
"name": "HTML",
"bytes": "5012"
},
{
"name": "Java",
"bytes": "10237626"
},
{
"name": "R",
"bytes": "56667"
},
{
"name": "Shell",
"bytes": "2107"
},
{
"name": "XSLT",
"bytes": "2509"
}
],
"symlink_target": ""
} |
var path = require('path')
var webpack = require('webpack')
module.exports = {
entry: './src/main.js',
output: {
path: path.resolve(__dirname, './dist'),
publicPath: '/dist/',
filename: 'build.js'
},
module: {
rules: [
{
test: /\.vue$/,
loader: 'vue-loader',
options: {
loaders: {
// Since sass-loader (weirdly) has SCSS as its default parse mode, we map
// the "scss" and "sass" values for the lang attribute to the right configs here.
// other preprocessors should work out of the box, no loader config like this necessary.
'scss': 'vue-style-loader!css-loader!sass-loader',
'sass': 'vue-style-loader!css-loader!sass-loader?indentedSyntax'
}
// other vue-loader options go here
}
},
{
test: /\.js$/,
loader: 'babel-loader',
exclude: /node_modules/
},
{
test: /\.(png|jpg|gif|svg)$/,
loader: 'file-loader',
options: {
name: '[name].[ext]?[hash]'
}
}
]
},
resolve: {
alias: {
'vue$': 'vue/dist/vue.esm.js'
}
},
devServer: {
historyApiFallback: true,
noInfo: true,
port:3000
},
performance: {
hints: false
},
devtool: '#eval-source-map'
}
if (process.env.NODE_ENV === 'production') {
module.exports.devtool = '#source-map'
// http://vue-loader.vuejs.org/en/workflow/production.html
module.exports.plugins = (module.exports.plugins || []).concat([
new webpack.DefinePlugin({
'process.env': {
NODE_ENV: '"production"'
}
}),
new webpack.optimize.UglifyJsPlugin({
sourceMap: true,
compress: {
warnings: false
}
}),
new webpack.LoaderOptionsPlugin({
minimize: true
})
])
}
| {
"content_hash": "898540ce463e0a701f92823884f393eb",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 100,
"avg_line_length": 24.355263157894736,
"alnum_prop": 0.5386277687736358,
"repo_name": "cheungzh/vue-study",
"id": "5a5cffea52aae94ed4c88ca30036647016eadcd7",
"size": "1851",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vue/webpack.config.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "355"
},
{
"name": "JavaScript",
"bytes": "2980"
},
{
"name": "Vue",
"bytes": "30593"
}
],
"symlink_target": ""
} |
package org.apache.wicket.request.resource.caching.version;
import java.io.Serializable;
import java.util.Map;
import org.apache.wicket.MetaDataKey;
import org.apache.wicket.ThreadContext;
import org.apache.wicket.request.cycle.RequestCycle;
import org.apache.wicket.request.resource.caching.IStaticCacheableResource;
import org.apache.wicket.util.lang.Args;
import org.apache.wicket.util.lang.Generics;
/**
* Caches the results of a delegating {@link IResourceVersion} instance
* for the lifetime of the current http request.
*
* @author Peter Ertl
*
* @since 1.5
*/
public class RequestCycleCachedResourceVersion implements IResourceVersion
{
private static final MetaDataKey<Map<Serializable, String>> CACHE_KEY =
new MetaDataKey<Map<Serializable, String>>()
{
private static final long serialVersionUID = 1L;
};
/**
* resource version provider which will actually do
* the hard work and retrieve the version
*/
private final IResourceVersion delegate;
/**
* create request-scoped resource provider cache
*
* @param delegate
* resource version provider to cache
*/
public RequestCycleCachedResourceVersion(IResourceVersion delegate)
{
this.delegate = Args.notNull(delegate, "delegate");
}
@Override
public String getVersion(IStaticCacheableResource resource)
{
// get current request cycle
final RequestCycle requestCycle = ThreadContext.getRequestCycle();
// cache instance
Map<Serializable, String> cache = null;
// cache key
Serializable key = null;
// is request cycle available?
if (requestCycle != null)
{
// retrieve cache from current request cycle
cache = requestCycle.getMetaData(CACHE_KEY);
// create caching key
key = resource.getCacheKey();
// does cache exist within current request cycle?
if (cache == null)
{
// no, so create it
requestCycle.setMetaData(CACHE_KEY, cache = Generics.newHashMap());
}
else if (cache.containsKey(key))
{
// lookup timestamp from cache (may contain NULL values which are valid)
return cache.get(key);
}
}
// no cache entry found, query version from delegate
final String version = delegate.getVersion(resource);
// store value in cache (if it is available)
if (cache != null && key != null)
{
cache.put(key, version);
}
return version;
}
}
| {
"content_hash": "a58f6f6a86d5b568c9b103a70bf7eb49",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 76,
"avg_line_length": 25.543478260869566,
"alnum_prop": 0.7221276595744681,
"repo_name": "martin-g/wicket-osgi",
"id": "dbcec9df633adfe00e69e1f52a96c92239ffa9bf",
"size": "3152",
"binary": false,
"copies": "3",
"ref": "refs/heads/wicket-osgi",
"path": "wicket-core/src/main/java/org/apache/wicket/request/resource/caching/version/RequestCycleCachedResourceVersion.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Groovy",
"bytes": "24843"
},
{
"name": "Java",
"bytes": "10416259"
},
{
"name": "JavaScript",
"bytes": "297152"
},
{
"name": "Shell",
"bytes": "8011"
}
],
"symlink_target": ""
} |
package cloud::azure::storage::storageaccount::mode::filesharecount;
use base qw(centreon::plugins::templates::counter);
use strict;
use warnings;
sub prefix_metric_output {
my ($self, %options) = @_;
return "Resource '" . $options{instance_value}->{display} . "' " . $options{instance_value}->{stat} . " ";
}
sub set_counters {
my ($self, %options) = @_;
$self->{maps_counters_type} = [
{ name => 'metric', type => 1, cb_prefix_output => 'prefix_metric_output', message_multiple => "All count metrics are ok", skipped_code => { -10 => 1 } },
];
foreach my $aggregation ('average', 'total') {
foreach my $metric ('FileShareCount') {
my $metric_label = lc($metric);
my $entry = { label => $metric_label . '-' . $aggregation, set => {
key_values => [ { name => $metric_label . '_' . $aggregation }, { name => 'display' }, { name => 'stat' } ],
output_template => $metric . ': %s',
perfdatas => [
{ label => $metric_label . '_' . $aggregation, value => $metric_label . '_' . $aggregation ,
template => '%s', label_extra_instance => 1, instance_use => 'display',
min => 0 },
],
}
};
push @{$self->{maps_counters}->{metric}}, $entry;
}
}
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments =>
{
"resource:s@" => { name => 'resource' },
"resource-group:s" => { name => 'resource_group' },
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::check_options(%options);
if (!defined($self->{option_results}->{resource})) {
$self->{output}->add_option_msg(short_msg => "Need to specify either --resource <name> with --resource-group option or --resource <id>.");
$self->{output}->option_exit();
}
$self->{az_resource} = $self->{option_results}->{resource};
$self->{az_resource_group} = $self->{option_results}->{resource_group} if (defined($self->{option_results}->{resource_group}));
$self->{az_resource_type} = 'storageAccounts';
$self->{az_resource_namespace} = 'Microsoft.Storage';
$self->{az_timeframe} = defined($self->{option_results}->{timeframe}) ? $self->{option_results}->{timeframe} : 3600;
$self->{az_interval} = defined($self->{option_results}->{interval}) ? $self->{option_results}->{interval} : "PT1H";
$self->{az_aggregations} = ['Average'];
if (defined($self->{option_results}->{aggregation})) {
$self->{az_aggregations} = [];
foreach my $stat (@{$self->{option_results}->{aggregation}}) {
if ($stat ne '') {
push @{$self->{az_aggregations}}, ucfirst(lc($stat));
}
}
}
foreach my $metric ('FileShareCount') {
push @{$self->{az_metrics}}, $metric;
}
}
sub manage_selection {
my ($self, %options) = @_;
my %metric_results;
foreach my $resource (@{$self->{az_resource}}) {
my $resource_group = $self->{az_resource_group};
my $resource_name = $resource;
my $namespace_full = '/fileServices/default';
if ($resource_name =~ /^\/subscriptions\/.*\/resourceGroups\/(.*)\/providers\/Microsoft\.Storage\/storageAccounts\/(.*)$/) {
$resource_group = $1;
$resource_name = $2;
}
($metric_results{$resource_name}, undef, undef) = $options{custom}->azure_get_metrics(
resource => $resource_name . $namespace_full,
resource_group => $resource_group,
resource_type => $self->{az_resource_type},
resource_namespace => $self->{az_resource_namespace},
metrics => $self->{az_metrics},
aggregations => $self->{az_aggregations},
timeframe => $self->{az_timeframe},
interval => $self->{az_interval},
);
foreach my $metric (@{$self->{az_metrics}}) {
my $metric_name = lc($metric);
$metric_name =~ s/ /_/g;
foreach my $aggregation (@{$self->{az_aggregations}}) {
next if (!defined($metric_results{$resource_name}->{$metric_name}->{lc($aggregation)}) && !defined($self->{option_results}->{zeroed}));
$self->{metric}->{$resource_name . "_" . lc($aggregation)}->{display} = $resource_name;
$self->{metric}->{$resource_name . "_" . lc($aggregation)}->{stat} = lc($aggregation);
$self->{metric}->{$resource_name . "_" . lc($aggregation)}->{$metric_name . "_" . lc($aggregation)} = defined($metric_results{$resource_name}->{$metric_name}->{lc($aggregation)}) ? $metric_results{$resource_name}->{$metric_name}->{lc($aggregation)} : 0;
}
}
}
if (scalar(keys %{$self->{metric}}) <= 0) {
$self->{output}->add_option_msg(short_msg => 'No metrics. Check your options or use --zeroed option to set 0 on undefined values');
$self->{output}->option_exit();
}
}
1;
__END__
=head1 MODE
Check storage account resources file share count metric.
Example:
Using resource name :
perl centreon_plugins.pl --plugin=cloud::azure::storage::storageaccount::plugin --custommode=azcli --mode=file-share-count
--resource=MYFILER --resource-group=MYHOSTGROUP --aggregation='average' --critical-filesharecount-average='10' --verbose
Using resource id :
perl centreon_plugins.pl --plugin=cloud::azure::storage::storageaccount::plugin --custommode=azcli --mode=file-share-count
--resource='/subscriptions/xxx/resourceGroups/xxx/providers/Microsoft.Storage/storageAccounts/xxx/fileServices/default'
--aggregation='average' --critical-filesharecount-average='10' --verbose
Default aggregation: 'average' / Total and average are valid.
=over 8
=item B<--resource>
Set resource name or id (Required).
=item B<--resource-group>
Set resource group (Required if resource's name is used).
=item B<--warning-filesharecount-*>
Thresholds warning (* can be: 'average', 'total').
=item B<--critical-filesharecount-*>
Thresholds critical (* can be: 'average', 'total').
=back
=cut
| {
"content_hash": "c625e66dbf8b20bee1d85c9ef0f2ed9b",
"timestamp": "",
"source": "github",
"line_count": 169,
"max_line_length": 269,
"avg_line_length": 38.84023668639053,
"alnum_prop": 0.553016453382084,
"repo_name": "Tpo76/centreon-plugins",
"id": "c58935a43e78792e3350d3cc5d2a958577aa8cef",
"size": "7324",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cloud/azure/storage/storageaccount/mode/filesharecount.pm",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "719"
},
{
"name": "Perl",
"bytes": "19128067"
}
],
"symlink_target": ""
} |
using namespace std;
QString TransactionDesc::FormatTxStatus(const CWalletTx& wtx)
{
AssertLockHeld(cs_main);
if (!IsFinalTx(wtx, chainActive.Height() + 1))
{
if (wtx.nLockTime < LOCKTIME_THRESHOLD)
return tr("Open for %n more block(s)", "", wtx.nLockTime - chainActive.Height());
else
return tr("Open until %1").arg(GUIUtil::dateTimeStr(wtx.nLockTime));
}
else
{
int signatures = wtx.GetTransactionLockSignatures();
QString strUsingIX = "";
if(signatures >= 0){
if(signatures >= INSTANTX_SIGNATURES_REQUIRED){
int nDepth = wtx.GetDepthInMainChain();
if (nDepth < 0)
return tr("conflicted");
else if (GetAdjustedTime() - wtx.nTimeReceived > 2 * 60 && wtx.GetRequestCount() == 0)
return tr("%1/offline (verified via instantx)").arg(nDepth);
else if (nDepth < 6)
return tr("%1/confirmed (verified via instantx)").arg(nDepth);
else
return tr("%1 confirmations (verified via instantx)").arg(nDepth);
} else {
if(!wtx.IsTransactionLockTimedOut()){
int nDepth = wtx.GetDepthInMainChain();
if (nDepth < 0)
return tr("conflicted");
else if (GetAdjustedTime() - wtx.nTimeReceived > 2 * 60 && wtx.GetRequestCount() == 0)
return tr("%1/offline (InstantX verification in progress - %2 of %3 signatures)").arg(nDepth).arg(signatures).arg(INSTANTX_SIGNATURES_TOTAL);
else if (nDepth < 6)
return tr("%1/confirmed (InstantX verification in progress - %2 of %3 signatures )").arg(nDepth).arg(signatures).arg(INSTANTX_SIGNATURES_TOTAL);
else
return tr("%1 confirmations (InstantX verification in progress - %2 of %3 signatures)").arg(nDepth).arg(signatures).arg(INSTANTX_SIGNATURES_TOTAL);
} else {
int nDepth = wtx.GetDepthInMainChain();
if (nDepth < 0)
return tr("conflicted");
else if (GetAdjustedTime() - wtx.nTimeReceived > 2 * 60 && wtx.GetRequestCount() == 0)
return tr("%1/offline (InstantX verification failed)").arg(nDepth);
else if (nDepth < 6)
return tr("%1/confirmed (InstantX verification failed)").arg(nDepth);
else
return tr("%1 confirmations").arg(nDepth);
}
}
} else {
int nDepth = wtx.GetDepthInMainChain();
if (nDepth < 0)
return tr("conflicted");
else if (GetAdjustedTime() - wtx.nTimeReceived > 2 * 60 && wtx.GetRequestCount() == 0)
return tr("%1/offline").arg(nDepth);
else if (nDepth < 6)
return tr("%1/unconfirmed").arg(nDepth);
else
return tr("%1 confirmations").arg(nDepth);
}
}
}
QString TransactionDesc::toHTML(CWallet *wallet, CWalletTx &wtx, TransactionRecord *rec, int unit)
{
QString strHTML;
LOCK2(cs_main, wallet->cs_wallet);
strHTML.reserve(4000);
strHTML += "<html><font face='verdana, arial, helvetica, sans-serif'>";
int64_t nTime = wtx.GetTxTime();
CAmount nCredit = wtx.GetCredit(ISMINE_ALL);
CAmount nDebit = wtx.GetDebit(ISMINE_ALL);
CAmount nNet = nCredit - nDebit;
strHTML += "<b>" + tr("Status") + ":</b> " + FormatTxStatus(wtx);
int nRequests = wtx.GetRequestCount();
if (nRequests != -1)
{
if (nRequests == 0)
strHTML += tr(", has not been successfully broadcast yet");
else if (nRequests > 0)
strHTML += tr(", broadcast through %n node(s)", "", nRequests);
}
strHTML += "<br>";
strHTML += "<b>" + tr("Date") + ":</b> " + (nTime ? GUIUtil::dateTimeStr(nTime) : "") + "<br>";
//
// From
//
if (wtx.IsCoinBase())
{
strHTML += "<b>" + tr("Source") + ":</b> " + tr("Generated") + "<br>";
}
else if (wtx.mapValue.count("from") && !wtx.mapValue["from"].empty())
{
// Online transaction
strHTML += "<b>" + tr("From") + ":</b> " + GUIUtil::HtmlEscape(wtx.mapValue["from"]) + "<br>";
}
else
{
// Offline transaction
if (nNet > 0)
{
// Credit
if (CBitcoinAddress(rec->address).IsValid())
{
CTxDestination address = CBitcoinAddress(rec->address).Get();
if (wallet->mapAddressBook.count(address))
{
strHTML += "<b>" + tr("From") + ":</b> " + tr("unknown") + "<br>";
strHTML += "<b>" + tr("To") + ":</b> ";
strHTML += GUIUtil::HtmlEscape(rec->address);
QString addressOwned = (::IsMine(*wallet, address) == ISMINE_SPENDABLE) ? tr("own address") : tr("watch-only");
if (!wallet->mapAddressBook[address].name.empty())
strHTML += " (" + addressOwned + ", " + tr("label") + ": " + GUIUtil::HtmlEscape(wallet->mapAddressBook[address].name) + ")";
else
strHTML += " (" + addressOwned + ")";
strHTML += "<br>";
}
}
}
}
//
// To
//
if (wtx.mapValue.count("to") && !wtx.mapValue["to"].empty())
{
// Online transaction
std::string strAddress = wtx.mapValue["to"];
strHTML += "<b>" + tr("To") + ":</b> ";
CTxDestination dest = CBitcoinAddress(strAddress).Get();
if (wallet->mapAddressBook.count(dest) && !wallet->mapAddressBook[dest].name.empty())
strHTML += GUIUtil::HtmlEscape(wallet->mapAddressBook[dest].name) + " ";
strHTML += GUIUtil::HtmlEscape(strAddress) + "<br>";
}
//
// Amount
//
if (wtx.IsCoinBase() && nCredit == 0)
{
//
// Coinbase
//
CAmount nUnmatured = 0;
BOOST_FOREACH(const CTxOut& txout, wtx.vout)
nUnmatured += wallet->GetCredit(txout, ISMINE_ALL);
strHTML += "<b>" + tr("Credit") + ":</b> ";
if (wtx.IsInMainChain())
strHTML += BitcoinUnits::formatHtmlWithUnit(unit, nUnmatured)+ " (" + tr("matures in %n more block(s)", "", wtx.GetBlocksToMaturity()) + ")";
else
strHTML += "(" + tr("not accepted") + ")";
strHTML += "<br>";
}
else if (nNet > 0)
{
//
// Credit
//
strHTML += "<b>" + tr("Credit") + ":</b> " + BitcoinUnits::formatHtmlWithUnit(unit, nNet) + "<br>";
}
else
{
isminetype fAllFromMe = ISMINE_SPENDABLE;
BOOST_FOREACH(const CTxIn& txin, wtx.vin)
{
isminetype mine = wallet->IsMine(txin);
if(fAllFromMe > mine) fAllFromMe = mine;
}
isminetype fAllToMe = ISMINE_SPENDABLE;
BOOST_FOREACH(const CTxOut& txout, wtx.vout)
{
isminetype mine = wallet->IsMine(txout);
if(fAllToMe > mine) fAllToMe = mine;
}
if (fAllFromMe)
{
if(fAllFromMe == ISMINE_WATCH_ONLY)
strHTML += "<b>" + tr("From") + ":</b> " + tr("watch-only") + "<br>";
//
// Debit
//
BOOST_FOREACH(const CTxOut& txout, wtx.vout)
{
// Ignore change
isminetype toSelf = wallet->IsMine(txout);
if ((toSelf == ISMINE_SPENDABLE) && (fAllFromMe == ISMINE_SPENDABLE))
continue;
if (!wtx.mapValue.count("to") || wtx.mapValue["to"].empty())
{
// Offline transaction
CTxDestination address;
if (ExtractDestination(txout.scriptPubKey, address))
{
strHTML += "<b>" + tr("To") + ":</b> ";
if (wallet->mapAddressBook.count(address) && !wallet->mapAddressBook[address].name.empty())
strHTML += GUIUtil::HtmlEscape(wallet->mapAddressBook[address].name) + " ";
strHTML += GUIUtil::HtmlEscape(CBitcoinAddress(address).ToString());
if(toSelf == ISMINE_SPENDABLE)
strHTML += " (own address)";
else if(toSelf == ISMINE_WATCH_ONLY)
strHTML += " (watch-only)";
strHTML += "<br>";
}
}
strHTML += "<b>" + tr("Debit") + ":</b> " + BitcoinUnits::formatHtmlWithUnit(unit, -txout.nValue) + "<br>";
if(toSelf)
strHTML += "<b>" + tr("Credit") + ":</b> " + BitcoinUnits::formatHtmlWithUnit(unit, txout.nValue) + "<br>";
}
if (fAllToMe)
{
// Payment to self
CAmount nChange = wtx.GetChange();
CAmount nValue = nCredit - nChange;
strHTML += "<b>" + tr("Total debit") + ":</b> " + BitcoinUnits::formatHtmlWithUnit(unit, -nValue) + "<br>";
strHTML += "<b>" + tr("Total credit") + ":</b> " + BitcoinUnits::formatHtmlWithUnit(unit, nValue) + "<br>";
}
CAmount nTxFee = nDebit - wtx.GetValueOut();
if (nTxFee > 0)
strHTML += "<b>" + tr("Transaction fee") + ":</b> " + BitcoinUnits::formatHtmlWithUnit(unit, -nTxFee) + "<br>";
}
else
{
//
// Mixed debit transaction
//
BOOST_FOREACH(const CTxIn& txin, wtx.vin)
if (wallet->IsMine(txin))
strHTML += "<b>" + tr("Debit") + ":</b> " + BitcoinUnits::formatHtmlWithUnit(unit, -wallet->GetDebit(txin, ISMINE_ALL)) + "<br>";
BOOST_FOREACH(const CTxOut& txout, wtx.vout)
if (wallet->IsMine(txout))
strHTML += "<b>" + tr("Credit") + ":</b> " + BitcoinUnits::formatHtmlWithUnit(unit, wallet->GetCredit(txout, ISMINE_ALL)) + "<br>";
}
}
strHTML += "<b>" + tr("Net amount") + ":</b> " + BitcoinUnits::formatHtmlWithUnit(unit, nNet, true) + "<br>";
//
// Message
//
if (wtx.mapValue.count("message") && !wtx.mapValue["message"].empty())
strHTML += "<br><b>" + tr("Message") + ":</b><br>" + GUIUtil::HtmlEscape(wtx.mapValue["message"], true) + "<br>";
if (wtx.mapValue.count("comment") && !wtx.mapValue["comment"].empty())
strHTML += "<br><b>" + tr("Comment") + ":</b><br>" + GUIUtil::HtmlEscape(wtx.mapValue["comment"], true) + "<br>";
strHTML += "<b>" + tr("Transaction ID") + ":</b> " + TransactionRecord::formatSubTxId(wtx.GetHash(), rec->idx) + "<br>";
// Message from normal putic:URI (putic:XyZ...?message=example)
foreach (const PAIRTYPE(string, string)& r, wtx.vOrderForm)
if (r.first == "Message")
strHTML += "<br><b>" + tr("Message") + ":</b><br>" + GUIUtil::HtmlEscape(r.second, true) + "<br>";
//
// PaymentRequest info:
//
foreach (const PAIRTYPE(string, string)& r, wtx.vOrderForm)
{
if (r.first == "PaymentRequest")
{
PaymentRequestPlus req;
req.parse(QByteArray::fromRawData(r.second.data(), r.second.size()));
QString merchant;
if (req.getMerchant(PaymentServer::getCertStore(), merchant))
strHTML += "<b>" + tr("Merchant") + ":</b> " + GUIUtil::HtmlEscape(merchant) + "<br>";
}
}
if (wtx.IsCoinBase())
{
quint32 numBlocksToMaturity = COINBASE_MATURITY + 1;
strHTML += "<br>" + tr("Generated coins must mature %1 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to \"not accepted\" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.").arg(QString::number(numBlocksToMaturity)) + "<br>";
}
//
// Debug view
//
if (fDebug)
{
strHTML += "<hr><br>" + tr("Debug information") + "<br><br>";
BOOST_FOREACH(const CTxIn& txin, wtx.vin)
if(wallet->IsMine(txin))
strHTML += "<b>" + tr("Debit") + ":</b> " + BitcoinUnits::formatHtmlWithUnit(unit, -wallet->GetDebit(txin, ISMINE_ALL)) + "<br>";
BOOST_FOREACH(const CTxOut& txout, wtx.vout)
if(wallet->IsMine(txout))
strHTML += "<b>" + tr("Credit") + ":</b> " + BitcoinUnits::formatHtmlWithUnit(unit, wallet->GetCredit(txout, ISMINE_ALL)) + "<br>";
strHTML += "<br><b>" + tr("Transaction") + ":</b><br>";
strHTML += GUIUtil::HtmlEscape(wtx.ToString(), true);
strHTML += "<br><b>" + tr("Inputs") + ":</b>";
strHTML += "<ul>";
BOOST_FOREACH(const CTxIn& txin, wtx.vin)
{
COutPoint prevout = txin.prevout;
CCoins prev;
if(pcoinsTip->GetCoins(prevout.hash, prev))
{
if (prevout.n < prev.vout.size())
{
strHTML += "<li>";
const CTxOut &vout = prev.vout[prevout.n];
CTxDestination address;
if (ExtractDestination(vout.scriptPubKey, address))
{
if (wallet->mapAddressBook.count(address) && !wallet->mapAddressBook[address].name.empty())
strHTML += GUIUtil::HtmlEscape(wallet->mapAddressBook[address].name) + " ";
strHTML += QString::fromStdString(CBitcoinAddress(address).ToString());
}
strHTML = strHTML + " " + tr("Amount") + "=" + BitcoinUnits::formatHtmlWithUnit(unit, vout.nValue);
strHTML = strHTML + " IsMine=" + (wallet->IsMine(vout) & ISMINE_SPENDABLE ? tr("true") : tr("false"));
strHTML = strHTML + " IsWatchOnly=" + (wallet->IsMine(vout) & ISMINE_WATCH_ONLY ? tr("true") : tr("false")) + "</li>";
}
}
}
strHTML += "</ul>";
}
strHTML += "</font></html>";
return strHTML;
}
| {
"content_hash": "388ca9f3e0b0a8c0153c1d9201bd9dc3",
"timestamp": "",
"source": "github",
"line_count": 336,
"max_line_length": 442,
"avg_line_length": 43.145833333333336,
"alnum_prop": 0.5038973580740843,
"repo_name": "putinclassic/putic",
"id": "9421655cce87a7f4a8f0e3592e7a367ec465c708",
"size": "15082",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/qt/transactiondesc.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "7639"
},
{
"name": "C",
"bytes": "990005"
},
{
"name": "C++",
"bytes": "4184156"
},
{
"name": "CSS",
"bytes": "39900"
},
{
"name": "HTML",
"bytes": "50621"
},
{
"name": "Java",
"bytes": "2100"
},
{
"name": "M4",
"bytes": "141709"
},
{
"name": "Makefile",
"bytes": "87255"
},
{
"name": "Objective-C",
"bytes": "3909"
},
{
"name": "Objective-C++",
"bytes": "7238"
},
{
"name": "Protocol Buffer",
"bytes": "2308"
},
{
"name": "Python",
"bytes": "211238"
},
{
"name": "QMake",
"bytes": "26228"
},
{
"name": "Roff",
"bytes": "17909"
},
{
"name": "Shell",
"bytes": "45874"
}
],
"symlink_target": ""
} |
using System.Threading.Tasks;
using Elfisk.ECS.Core;
namespace Wilderness.Game.Blueprint.Rendering
{
public class RenderSystem : ISystem
{
#region Dependencies
public IEntityRepository Entities { get; set; }
public IPlayersBus PlayersBus { get; set; }
#endregion
public async Task Update(GameEnvironment environment)
{
foreach (var viewport in Entities.GetComponents<ViewPortComponent>())
{
await viewport.Refresh(Entities, PlayersBus);
}
}
}
}
| {
"content_hash": "53aee46294a0e7afced42981e16b95f1",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 75,
"avg_line_length": 20.48,
"alnum_prop": 0.69140625,
"repo_name": "JornWildt/Wilderness",
"id": "ea5e4fa3768952c42c04e57bd2886fee55c10523",
"size": "514",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Wilderness.Game.Blueprint/Rendering/RenderSystem.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "81"
},
{
"name": "C#",
"bytes": "45182"
},
{
"name": "CSS",
"bytes": "86"
},
{
"name": "JavaScript",
"bytes": "134482"
}
],
"symlink_target": ""
} |
if Rails::VERSION::MAJOR <= 4
module ThreadsafeAttributes
private
def get_threadsafe_attribute(name, main_thread = Thread.main)
if threadsafe_attribute_defined_by_thread?(name, Thread.current)
get_threadsafe_attribute_by_thread(name, Thread.current)
elsif threadsafe_attribute_defined_by_thread?(name, main_thread)
value = get_threadsafe_attribute_by_thread(name, main_thread)
value = value.dup if value.duplicable?
set_threadsafe_attribute_by_thread(name, value, Thread.current)
value
end
end
end
end
| {
"content_hash": "51832bde950f017957d7cbb63544c6a4",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 71,
"avg_line_length": 36.0625,
"alnum_prop": 0.6984402079722704,
"repo_name": "MelnikVasya/exactonline-api-ruby-client",
"id": "a35baff77c8ce6a4d3f0dd335dec4185525d7a2a",
"size": "635",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/patches/active_resource/threadsafe_attributes.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "149426"
},
{
"name": "Shell",
"bytes": "621"
}
],
"symlink_target": ""
} |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
using System;
using System.IO;
using System.Globalization;
using System.Linq;
using AutoRest.CompositeSwagger.Model;
using AutoRest.CompositeSwagger.Properties;
using AutoRest.Core;
using AutoRest.Core.Model;
using AutoRest.Core.Logging;
using AutoRest.Core.Utilities;
using AutoRest.Swagger;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using AutoRest.Core.Validation;
using System.Collections.Generic;
using static AutoRest.Core.Utilities.DependencyInjection;
using YamlDotNet.RepresentationModel;
using AutoRest.Core.Parsing;
namespace AutoRest.CompositeSwagger
{
public class CompositeSwaggerModeler : Modeler
{
public CompositeSwaggerModeler()
{
}
public override string Name
{
get { return "CompositeSwagger"; }
}
public override CodeModel Build()
{
var compositeSwaggerModel = Parse(Settings.Input);
if (compositeSwaggerModel == null)
{
throw ErrorManager.CreateError(Resources.ErrorParsingSpec);
}
if (!compositeSwaggerModel.Documents.Any())
{
throw ErrorManager.CreateError(string.Format(CultureInfo.InvariantCulture, "{0}. {1}",
Resources.ErrorParsingSpec, "Documents collection can not be empty."));
}
if (compositeSwaggerModel.Info == null)
{
throw ErrorManager.CreateError(Resources.InfoSectionMissing);
}
// Ensure all the docs are absolute URIs or rooted paths
for (var i = 0; i < compositeSwaggerModel.Documents.Count; i++)
{
var compositeDocument = compositeSwaggerModel.Documents[i];
if (!Settings.FileSystemInput.IsCompletePath(compositeDocument) || !Settings.FileSystemInput.FileExists(compositeDocument))
{
// Otherwise, root it from the current path
compositeSwaggerModel.Documents[i] = Settings.FileSystemInput.MakePathRooted(Settings.FileSystemInput.GetParentDir(Settings.Input), compositeDocument);
}
}
// construct merged swagger document
var mergedSwagger = new YamlMappingNode();
mergedSwagger.Set("info", (Settings.FileSystemInput.ReadAllText(Settings.Input).ParseYaml() as YamlMappingNode)?.Get("info") as YamlMappingNode);
// merge child swaggers
foreach (var childSwaggerPath in compositeSwaggerModel.Documents)
{
var childSwaggerRaw = Settings.FileSystemInput.ReadAllText(childSwaggerPath);
childSwaggerRaw = SwaggerParser.Normalize(childSwaggerPath, childSwaggerRaw);
var childSwagger = childSwaggerRaw.ParseYaml() as YamlMappingNode;
if (childSwagger == null)
{
throw ErrorManager.CreateError("Failed parsing referenced Swagger file {0}.", childSwaggerPath);
}
// remove info
var info = childSwagger.Get("info") as YamlMappingNode;
var version = info.Get("version");
info.Remove("title");
info.Remove("description");
info.Remove("version");
// fix up api version
var apiVersionParam = (childSwagger.Get("parameters") as YamlMappingNode)?.Children?.FirstOrDefault(param => ((param.Value as YamlMappingNode)?.Get("name") as YamlScalarNode)?.Value == "api-version");
var apiVersionParamName = (apiVersionParam?.Key as YamlScalarNode)?.Value;
if (apiVersionParamName != null)
{
var paths =
((childSwagger.Get("paths") as YamlMappingNode)?.Children?.Values ?? Enumerable.Empty<YamlNode>()).Concat
((childSwagger.Get("x-ms-paths") as YamlMappingNode)?.Children?.Values ?? Enumerable.Empty<YamlNode>());
var methods = paths.OfType<YamlMappingNode>().SelectMany(path => path.Children.Values.OfType<YamlMappingNode>());
var parameters = methods.SelectMany(method => (method.Get("parameters") as YamlSequenceNode)?.Children?.OfType<YamlMappingNode>() ?? Enumerable.Empty<YamlMappingNode>());
var apiVersionParams = parameters.Where(param => (param.Get("$ref") as YamlScalarNode)?.Value == $"#/parameters/{apiVersionParamName}");
foreach (var param in apiVersionParams)
{
param.Remove("$ref");
foreach (var child in (apiVersionParam?.Value as YamlMappingNode).Children)
{
param.Children.Add(child);
}
param.Set("enum", new YamlSequenceNode(version));
}
}
// merge
mergedSwagger = mergedSwagger.MergeWith(childSwagger);
}
// remove apiVersion client property
var mergedSwaggerApiVersionParam = (mergedSwagger.Get("parameters") as YamlMappingNode)?.Children?.FirstOrDefault(param => ((param.Value as YamlMappingNode)?.Get("name") as YamlScalarNode)?.Value == "api-version");
var mergedSwaggerApiVersionParamName = (mergedSwaggerApiVersionParam?.Key as YamlScalarNode)?.Value;
if (mergedSwaggerApiVersionParamName != null)
{
(mergedSwagger.Get("parameters") as YamlMappingNode).Remove(mergedSwaggerApiVersionParamName);
}
// CodeModel compositeClient = InitializeServiceClient(compositeSwaggerModel);
using (NewContext)
{
var swaggerModeler = new SwaggerModeler();
return swaggerModeler.Build(SwaggerParser.Parse(Settings.Input, mergedSwagger.Serialize()));
}
}
private CompositeServiceDefinition Parse(string input)
{
var inputBody = Settings.FileSystemInput.ReadAllText(input);
try
{
var settings = new JsonSerializerSettings
{
TypeNameHandling = TypeNameHandling.None,
MetadataPropertyHandling = MetadataPropertyHandling.Ignore
};
return JsonConvert.DeserializeObject<CompositeServiceDefinition>(inputBody, settings);
}
catch (JsonException ex)
{
throw ErrorManager.CreateError(string.Format(CultureInfo.InvariantCulture, "{0}. {1}",
Resources.ErrorParsingSpec, ex.Message), ex);
}
}
/// <summary>
/// Copares two versions of the same service specification.
/// </summary>
/// <returns></returns>
public override IEnumerable<ComparisonMessage> Compare()
{
throw new NotImplementedException("Version comparison of compositions. Please run the comparison on individual specifications");
}
}
} | {
"content_hash": "d0733ec579db3742e4ebdd100b6c2087",
"timestamp": "",
"source": "github",
"line_count": 157,
"max_line_length": 226,
"avg_line_length": 46.46496815286624,
"alnum_prop": 0.6084989718985606,
"repo_name": "annatisch/autorest",
"id": "e2703d091add92d628a1824c5ee5a50accb3e51d",
"size": "7297",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/modeler/AutoRest.CompositeSwagger/CompositeSwaggerModeler.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "14890514"
},
{
"name": "CSS",
"bytes": "110"
},
{
"name": "Go",
"bytes": "147203"
},
{
"name": "HTML",
"bytes": "274"
},
{
"name": "Java",
"bytes": "6723411"
},
{
"name": "JavaScript",
"bytes": "4544571"
},
{
"name": "PowerShell",
"bytes": "58927"
},
{
"name": "Python",
"bytes": "2065397"
},
{
"name": "Ruby",
"bytes": "182074"
},
{
"name": "Shell",
"bytes": "142"
},
{
"name": "Smalltalk",
"bytes": "3"
},
{
"name": "TypeScript",
"bytes": "179609"
}
],
"symlink_target": ""
} |
echo "Set global variable to Database"
sleep 1
mysql -uroot -pnam123 -h10.164.180.87 -e "set global log_bin_trust_function_creators=1"
echo "Start to upgrade Glance on controller1"
sleep 2
ssh controller1 "bash /root/test/glance/1_upgrade_source_and_database.sh"
ssh controller1 "bash /root/test/glance/2_migrate_data.sh"
ssh controller1 "bash /root/test/glance/3_start_services.sh"
echo "Start to upgrade Glance on controller2"
sleep 2
ssh controller2 "bash /root/test/glance/1_upgrade.sh"
echo "Start to upgrade Glance on controller3"
sleep 2
ssh controller3 "bash /root/test/glance/1_upgrade_and_contract.sh" | {
"content_hash": "058e4d6bf64630ee1e643c2f1599da1c",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 87,
"avg_line_length": 40.8,
"alnum_prop": 0.7843137254901961,
"repo_name": "daikk115/openstack_upgrade_test",
"id": "9715d97d89cb1fafac5da8810154ac7be02ea3ec",
"size": "636",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "upgrade_script/upgrade/glance.sh",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "26602"
},
{
"name": "Shell",
"bytes": "18763"
}
],
"symlink_target": ""
} |
package io.cloudslang.content.dca.controllers;
import com.fasterxml.jackson.databind.JsonNode;
import org.jetbrains.annotations.NotNull;
import static org.apache.commons.lang3.StringUtils.EMPTY;
public class GetCredentialFromManagerController {
private static final String USERNAME = "username";
private static final String PASSWORD = "password";
private static final String KEY = "key";
private static final String VALUE = "value";
@NotNull
public static String getUsernameFromDataArray(@NotNull final JsonNode dataArray) {
return getValueFromDataArray(dataArray, USERNAME);
}
@NotNull
public static String getPasswordFromDataArray(@NotNull final JsonNode dataArray) {
return getValueFromDataArray(dataArray, PASSWORD);
}
@NotNull
public static String getValueFromDataArray(@NotNull final JsonNode dataArray, @NotNull final String keyName) {
if (dataArray.isArray()) {
for (final JsonNode nodeElement : dataArray) {
if (nodeElement.get(KEY).asText(EMPTY).equalsIgnoreCase(keyName)) {
return nodeElement.get(VALUE).asText(EMPTY);
}
}
}
return EMPTY;
}
}
| {
"content_hash": "f4bafadead6f3a18c9d57c6e52b73da1",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 114,
"avg_line_length": 31.58974358974359,
"alnum_prop": 0.6923701298701299,
"repo_name": "CloudSlang/cs-actions",
"id": "e428bafaf6d6a62369e23460c880f77c7b564cec",
"size": "1857",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cs-microfocus-dca/src/main/java/io/cloudslang/content/dca/controllers/GetCredentialFromManagerController.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "168"
},
{
"name": "Java",
"bytes": "10254264"
},
{
"name": "Scala",
"bytes": "480429"
},
{
"name": "XSLT",
"bytes": "544"
}
],
"symlink_target": ""
} |
/*****************************************************************************
rsa.cpp -- An implementation of the RSA public-key cipher. The
following implementation is based on the one given in Cormen et
al., Inroduction to Algorithms, 1991. I'll refer to this book as
CLR because of its authors. This implementation shows the usage of
arbitrary precision types of SystemC. That is, these types in
SystemC can be used to implement algorithmic examples regarding
arbitrary precision integers. The algorithms used are not the most
efficient ones; however, they are intended for explanatory
purposes, so they are simple and perform their job correctly.
Below, NBITS shows the maximum number of bits in n, the variable
that is a part of both the public and secret keys, P and S,
respectively. NBITS can be made larger at the expense of longer
running time. For example, CLR mentions that the RSA cipher uses
large primes that contain approximately 100 decimal digits. This
means that NBITS should be set to approximately 560.
Some background knowledge: A prime number p > 1 is an integer that
has only two divisiors, 1 and p itself. For example, 2, 3, 5, 7,
and 11 are all primes. If p is not a prime number, it is called a
composite number. If we are given two primes p and q, it is easy
to find their product p * q; however, if we are given a number m
which happens to be the product of two primes p and q that we do
not know, it is very difficult to find p and q if m is very large,
i.e., it is very difficult to factor m. The RSA public-key
cryptosystem is based on this fact. Internally, we use the
Miller-Rabin randomized primality test to deal with primes. More
information can be obtained from pp. 831-836 in CLR, the first
edition.
Original Author: Ali Dasdan, Synopsys, Inc.
*****************************************************************************/
/*****************************************************************************
MODIFICATION LOG - modifiers, enter your name, affiliation, date and
changes you are making here.
Name, Affiliation, Date:
Description of Modification:
*****************************************************************************/
#include <stdlib.h>
#include <sys/types.h>
#include <time.h>
#include <stdlib.h> // drand48, srand48
#include "systemc.h"
#define DEBUG_SYSTEMC // #undef this to disable assertions.
// NBITS is the number of bits in n of public and secret keys P and
// S. HALF_NBITS is the number of bits in p and q, which are the prime
// factors of n.
#define NBITS 250
#define HALF_NBITS ( NBITS / 2 )
// +2 is for the format specifier '0b' to make the string binary.
#define STR_SIZE ( NBITS + 2 )
#define HALF_STR_SIZE ( HALF_NBITS + 2 )
typedef sc_bigint<NBITS> bigint;
// Return the absolute value of x.
inline
bigint
abs_val( const sc_signed& x )
{
return ( x < 0 ? -x : x );
}
// Initialize the random number generator. If seed == -1, the
// generator will be initialized with the system time. If not, it will
// be initialized with the given seed. This way, an experiment with
// random numbers becomes reproducible.
inline
long
randomize( int seed )
{
long in_seed; // time_t is long.
in_seed = ( seed <= 0 ? time( 0 ) : seed );
#ifndef WIN32
srand48( in_seed );
#else
srand( ( unsigned ) in_seed );
#endif
return in_seed;
}
// Flip a coin with probability p.
#ifndef WIN32
inline
bool
flip( double p )
{
return ( drand48() < p );
}
#else
inline
bool
flip( double p )
{
const int MAX_VAL = ( 1 << 15 );
// rand() produces an integer between 0 and 2^15-1, so rand() /
// MAX_VAL is a number between 0 and 1, which is required to compare
// with p.
return ( rand() < ( int ) ( p * MAX_VAL ) );
}
#endif
// Randomly generate a bit string with nbits bits. str has a length
// of nbits + 1. This function is used to generate random messages to
// process.
inline
void
rand_bitstr( char *str, int nbits )
{
assert( nbits >= 4 );
str[ 0 ] = '0';
str[ 1 ] = 'b';
str[ 2 ] = '0'; // Sign for positive numbers.
for ( int i = 3; i < nbits; ++i )
str[ i ] = ( flip( 0.5 ) == true ? '1' : '0' );
str[ nbits ] = '\0';
}
// Generate "111..111" with nbits bits for masking.
// str has a length of nbits + 1.
inline
void
max_bitstr( char *str, int nbits )
{
assert( nbits >= 4 );
str[ 0 ] = '0';
str[ 1 ] = 'b';
str[ 2 ] = '0'; // Sign for positive numbers.
for ( int i = 3; i < nbits; ++i )
str[ i ] = '1';
str[ nbits ] = '\0';
}
// Return a positive remainder.
inline
bigint
ret_pos( const bigint& x, const bigint& n )
{
if ( x < 0 )
return x + n;
return x;
}
// Compute the greatest common divisor ( gcd ) of a and b. This is
// Euclid's algorithm. This algorithm is at least 2,300 years old! The
// non-recursive version of this algorithm is not as elegant.
bigint
gcd( const bigint& a, const bigint& b )
{
if ( b == 0 )
return a;
return gcd( b, a % b );
}
// Compute d, x, and y such that d = gcd( a, b ) = ax + by. x and y can
// be zero or negative. This algorithm is also Euclid's algorithm but
// it is extended to also find x and y. Recall that the existence of x
// and y is guaranteed by Euclid's algorithm.
void
euclid( const bigint& a, const bigint& b, bigint& d, bigint& x, bigint& y )
{
if ( b != 0 ) {
euclid( b, a % b, d, x, y );
bigint tmp = x;
x = y;
y = tmp - ( a / b ) * y;
}
else {
d = a;
x = 1;
y = 0;
}
}
// Return d = a^b % n, where ^ represents exponentiation.
inline
bigint
modular_exp( const bigint& a, const bigint& b, const bigint& n )
{
bigint d = 1;
for ( int i = b.length() - 1; i >= 0; --i )
{
d = ( d * d ) % n;
if ( b[ i ] )
d = ( d * a ) % n;
}
return ret_pos( d, n );
}
// Return the multiplicative inverse of a, modulo n, when a and n are
// relatively prime. Recall that x is a multiplicative inverse of a,
// modulo n, if a * x = 1 ( mod n ).
inline
bigint
inverse( const bigint& a, const bigint& n )
{
bigint d, x, y;
euclid( a, n, d, x, y );
assert( d == 1 );
x %= n;
return ret_pos( x, n );
}
// Find a small odd integer a that is relatively prime to n. I do not
// know an efficient algorithm to do that but the loop below seems to
// work; it usually iterates a few times. Recall that a is relatively
// prime to n if their only common divisor is 1, i.e., gcd( a, n ) ==
// 1.
inline
bigint
find_rel_prime( const bigint& n )
{
bigint a = 3;
while ( true ) {
if ( gcd( a, n ) == 1 )
break;
a += 2;
#ifdef DEBUG_SYSTEMC
assert( a < n );
#endif
}
return a;
}
// Return true if and only if a is a witness to the compositeness of
// n, i.e., a can be used to prove that n is composite.
inline
bool
witness( const bigint& a, const bigint& n )
{
bigint n_minus1 = n - 1;
bigint x;
bigint d = 1;
// Compute d = a^( n-1 ) % n.
for ( int i = n.length() - 1; i >= 0; --i )
{
// Sun's SC5 bug when compiling optimized version
// makes the wrong assignment if abs_val() is inlined
//x = (sc_signed)d<0?-(sc_signed)d:(sc_signed)d;//abs_val( d );
if(d<0)
{
x = -d;
assert(x==-d);
}
else
{
x = d;
assert(x==d);
}
d = ( d * d ) % n;
// x is a nontrivial square root of 1 modulo n ==> n is composite.
if ( ( abs_val( d ) == 1 ) && ( x != 1 ) && ( x != n_minus1 ) )
return true;
if ( n_minus1[ i ] )
d = ( d * a ) % n;
}
// d = a^( n-1 ) % n != 1 ==> n is composite.
if ( abs_val( d ) != 1 )
return true;
return false;
}
// Check to see if n has any small divisors. For small numbers, we do
// not have to run the Miller-Rabin primality test. We define "small"
// to be less than 1023. You can change it if necessary.
inline
bool
div_test( const bigint& n )
{
int limit;
if ( n < 1023 )
limit = n.to_int() - 2;
else
limit = 1023;
for ( int i = 3; i <= limit; i += 2 ) {
if ( n % i == 0 )
return false; // n is composite.
}
return true; // n may be prime.
}
// Return true if n is almost surely prime, return false if n is
// definitely composite. This test, called the Miller-Rabin primality
// test, errs with probaility at most 2^(-s). CLR suggests s = 50 for
// any imaginable application, and s = 3 if we are trying to find
// large primes by applying miller_rabin to randomly chosen large
// integers. Even though we are doing the latter here, we will still
// choose s = 50. The probability of failure is at most
// 0.00000000000000088817, a pretty small number.
inline
bool
miller_rabin( const bigint& n )
{
if ( n <= 2 )
return false;
if ( ! div_test( n ) )
return false;
char str[ STR_SIZE + 1 ];
int s = 50;
for ( int j = 1; j <= s; ++j ) {
// Choose a random number.
rand_bitstr( str, STR_SIZE );
// Set a to the chosen number.
bigint a = str;
// Make sure that a is in [ 1, n - 1 ].
a = ( a % ( n - 1 ) ) + 1;
// Check to see if a is a witness.
if ( witness( a, n ) )
return false; // n is definitely composite.
}
return true; // n is almost surely prime.
}
// Return a randomly generated, large prime number using the
// Miller-Rabin primality test.
inline
bigint
find_prime( const bigint& r )
{
char p_str[ HALF_STR_SIZE + 1 ];
rand_bitstr( p_str, HALF_STR_SIZE );
p_str[ HALF_STR_SIZE - 1 ] = '1'; // Force p to be an odd number.
bigint p = p_str;
#ifdef DEBUG_SYSTEMC
assert( ( p > 0 ) && ( p % 2 == 1 ) );
#endif
// p is randomly determined. Now, we'll look for a prime in the
// vicinity of p. By the prime number theorem, executing the
// following loop approximately ln ( 2^NBITS ) iterations should
// find a prime.
#ifdef DEBUG_SYSTEMC
// A very large counter to check against infinite loops.
sc_bigint<NBITS> niter = 0;
#endif
while ( ! miller_rabin( p ) ) {
p = ( p + 2 ) % r;
#ifdef DEBUG_SYSTEMC
assert( ++niter > 0 );
#endif
}
return p;
}
// Encode or cipher the message in msg using the RSA public key P=( e, n ).
inline
bigint
cipher( const bigint& msg, const bigint& e, const bigint& n )
{
return modular_exp( msg, e, n );
}
// Dencode or decipher the message in msg using the RSA secret key S=( d, n ).
inline
bigint
decipher( const bigint& msg, const bigint& d, const bigint& n )
{
return modular_exp( msg, d, n );
}
// The RSA cipher.
inline
void
rsa( int seed )
{
// Generate all 1's in r.
char r_str[ HALF_STR_SIZE + 1 ];
max_bitstr( r_str, HALF_STR_SIZE );
bigint r = r_str;
#ifdef DEBUG_SYSTEMC
assert( r > 0 );
#endif
// Initialize the random number generator.
cout << "\nRandom number generator seed = " << randomize( seed ) << endl;
cout << endl;
// Find two large primes p and q.
bigint p = find_prime( r );
bigint q = find_prime( r );
#ifdef DEBUG_SYSTEMC
assert( ( p > 0 ) && ( q > 0 ) );
#endif
// Compute n and ( p - 1 ) * ( q - 1 ) = m.
bigint n = p * q;
bigint m = ( p - 1 ) * ( q - 1 );
#ifdef DEBUG_SYSTEMC
assert( ( n > 0 ) && ( m > 0 ) );
#endif
// Find a small odd integer e that is relatively prime to m.
bigint e = find_rel_prime( m );
#ifdef DEBUG_SYSTEMC
assert( e > 0 );
#endif
// Find the multiplicative inverse d of e, modulo m.
bigint d = inverse( e, m );
#ifdef DEBUG_SYSTEMC
assert( d > 0 );
#endif
// Output public and secret keys.
cout << "RSA public key P: P=( e, n )" << endl;
cout << "e = " << e << endl;
cout << "n = " << n << endl;
cout << endl;
cout << "RSA secret key S: S=( d, n )" << endl;
cout << "d = " << d << endl;
cout << "n = " << n << endl;
cout << endl;
// Cipher and decipher a randomly generated message msg.
char msg_str[ STR_SIZE + 1 ];
rand_bitstr( msg_str, STR_SIZE );
bigint msg = msg_str;
msg %= n; // Make sure msg is smaller than n. If larger, this part
// will be a block of the input message.
#ifdef DEBUG_SYSTEMC
assert( msg > 0 );
#endif
cout << "Message to be ciphered = " << endl;
cout << msg << endl;
bigint msg2 = cipher( msg, e, n );
cout << "\nCiphered message = " << endl;
cout << msg2 << endl;
msg2 = decipher( msg2, d, n );
cout << "\nDeciphered message = " << endl;
cout << msg2 << endl;
// Make sure that the original message is recovered.
if ( msg == msg2 ) {
cout << "\nNote that the original message == the deciphered message, " << endl;
cout << "showing that this algorithm and implementation work correctly.\n" << endl;
}
else {
// This case is unlikely.
cout << "\nNote that the original message != the deciphered message, " << endl;
cout << "showing that this implementation works incorrectly.\n" << endl;
}
return;
}
int sc_main( int argc, char *argv[] )
{
if ( argc <= 1 )
rsa( -1 );
else
rsa( atoi( argv[ 1 ] ) );
return 0;
}
// End of file
| {
"content_hash": "d4e5eac42496babfbdc7ba01c1e60947",
"timestamp": "",
"source": "github",
"line_count": 524,
"max_line_length": 87,
"avg_line_length": 26.25,
"alnum_prop": 0.5630679752817157,
"repo_name": "pombredanne/metamorphosys-desktop",
"id": "04de7f570a1c2fdb39241d34a5a05a77dffe508c",
"size": "14676",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "metamorphosys/tonka/models/SystemC/systemc-2.3.0/examples/sysc/rsa/rsa.cpp",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "10683"
},
{
"name": "Assembly",
"bytes": "117345"
},
{
"name": "Awk",
"bytes": "3591"
},
{
"name": "Batchfile",
"bytes": "228118"
},
{
"name": "BitBake",
"bytes": "4526"
},
{
"name": "C",
"bytes": "3613212"
},
{
"name": "C#",
"bytes": "11617773"
},
{
"name": "C++",
"bytes": "51448188"
},
{
"name": "CMake",
"bytes": "3055"
},
{
"name": "CSS",
"bytes": "109563"
},
{
"name": "Clojure",
"bytes": "37831"
},
{
"name": "Eagle",
"bytes": "3782687"
},
{
"name": "Emacs Lisp",
"bytes": "8514"
},
{
"name": "GAP",
"bytes": "49124"
},
{
"name": "Groff",
"bytes": "2178"
},
{
"name": "Groovy",
"bytes": "7686"
},
{
"name": "HTML",
"bytes": "4025250"
},
{
"name": "Inno Setup",
"bytes": "35715"
},
{
"name": "Java",
"bytes": "489537"
},
{
"name": "JavaScript",
"bytes": "167454"
},
{
"name": "Lua",
"bytes": "1660"
},
{
"name": "Makefile",
"bytes": "97209"
},
{
"name": "Mathematica",
"bytes": "26"
},
{
"name": "Matlab",
"bytes": "80874"
},
{
"name": "Max",
"bytes": "78198"
},
{
"name": "Modelica",
"bytes": "44541139"
},
{
"name": "Objective-C",
"bytes": "34004"
},
{
"name": "Perl",
"bytes": "19285"
},
{
"name": "PostScript",
"bytes": "400254"
},
{
"name": "PowerShell",
"bytes": "19749"
},
{
"name": "Processing",
"bytes": "1477"
},
{
"name": "Prolog",
"bytes": "3121"
},
{
"name": "Protocol Buffer",
"bytes": "58995"
},
{
"name": "Python",
"bytes": "5517835"
},
{
"name": "Ruby",
"bytes": "4483"
},
{
"name": "Shell",
"bytes": "956773"
},
{
"name": "Smarty",
"bytes": "37892"
},
{
"name": "TeX",
"bytes": "4183594"
},
{
"name": "Visual Basic",
"bytes": "22546"
},
{
"name": "XSLT",
"bytes": "332312"
}
],
"symlink_target": ""
} |
<Type Name="PreserveAttribute" FullName="MonoMac.Foundation.PreserveAttribute">
<TypeSignature Language="C#" Value="public sealed class PreserveAttribute : Attribute" />
<TypeSignature Language="ILAsm" Value=".class public auto ansi sealed beforefieldinit PreserveAttribute extends System.Attribute" />
<AssemblyInfo>
<AssemblyName>MonoMac</AssemblyName>
<AssemblyVersion>0.0.0.0</AssemblyVersion>
</AssemblyInfo>
<Base>
<BaseTypeName>System.Attribute</BaseTypeName>
</Base>
<Interfaces />
<Attributes>
<Attribute>
<AttributeName>System.AttributeUsage(System.AttributeTargets.Assembly | System.AttributeTargets.Class | System.AttributeTargets.Struct | System.AttributeTargets.Enum | System.AttributeTargets.Constructor | System.AttributeTargets.Method | System.AttributeTargets.Property | System.AttributeTargets.Field | System.AttributeTargets.Event | System.AttributeTargets.Interface | System.AttributeTargets.Delegate | System.AttributeTargets.All)</AttributeName>
</Attribute>
</Attributes>
<Docs>
<summary>Prevents the MonoMac linker from linking the target.</summary>
<remarks>
<para>
This attribute is used at link time by the MonoMac linker to skip certain classes, structures, enumerations or other objects from being linked.
</para>
<para>
By applying this attribute all of the members of the target will be kept as if they had been referenced by the code.
</para>
<para>
This attribute is useful for example when using classes that use reflection (for example web services) and that use this information for serialization and deserialization.
</para>
</remarks>
</Docs>
<Members>
<Member MemberName=".ctor">
<MemberSignature Language="C#" Value="public PreserveAttribute ();" />
<MemberSignature Language="ILAsm" Value=".method public hidebysig specialname rtspecialname instance void .ctor() cil managed" />
<MemberType>Constructor</MemberType>
<AssemblyInfo>
<AssemblyVersion>0.0.0.0</AssemblyVersion>
</AssemblyInfo>
<Parameters />
<Docs>
<summary>To be added.</summary>
<remarks>To be added.</remarks>
</Docs>
</Member>
<Member MemberName="AllMembers">
<MemberSignature Language="C#" Value="public bool AllMembers;" />
<MemberSignature Language="ILAsm" Value=".field public bool AllMembers" />
<MemberType>Field</MemberType>
<AssemblyInfo>
<AssemblyVersion>0.0.0.0</AssemblyVersion>
</AssemblyInfo>
<ReturnValue>
<ReturnType>System.Boolean</ReturnType>
</ReturnValue>
<Docs>
<summary>Ensures that all members of this type are preserved.</summary>
<remarks>All members of this type, including fields, properties, methods, subclasses are preserved during linking.</remarks>
</Docs>
</Member>
<Member MemberName="Conditional">
<MemberSignature Language="C#" Value="public bool Conditional;" />
<MemberSignature Language="ILAsm" Value=".field public bool Conditional" />
<MemberType>Field</MemberType>
<AssemblyInfo>
<AssemblyVersion>0.0.0.0</AssemblyVersion>
</AssemblyInfo>
<ReturnValue>
<ReturnType>System.Boolean</ReturnType>
</ReturnValue>
<Docs>
<summary>Flags the method as a method to preserve during linking if the container class is pulled in.</summary>
<remarks>
<para>
If the Conditional value is set on a Preserve attribute on a method, then the method will be preserved if the containing NSObject is kept after the linker has done its job.
</para>
<para>
You would typically use this for callbacks that you know will be called in your code dynamically (for example with a selector invocation from Objective-C) since a static linker would not be able to infer that this particual method is required.
</para>
</remarks>
</Docs>
</Member>
</Members>
</Type>
| {
"content_hash": "55edc2b3b02eb30b69caa2e211265c51",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 459,
"avg_line_length": 47.963855421686745,
"alnum_prop": 0.7081135393117307,
"repo_name": "PlayScriptRedux/monomac",
"id": "38b20b9321d4e97eb90e7f086d520e257a3b08b3",
"size": "3981",
"binary": false,
"copies": "4",
"ref": "refs/heads/playscript",
"path": "docs/en/MonoMac.Foundation/PreserveAttribute.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "8631093"
},
{
"name": "Makefile",
"bytes": "8784"
}
],
"symlink_target": ""
} |
package io.crate.analyze;
import io.crate.metadata.Schemas;
import io.crate.metadata.TableIdent;
import io.crate.metadata.blob.BlobSchemaInfo;
import io.crate.metadata.blob.BlobTableInfo;
public class AlterBlobTableAnalyzedStatement extends AbstractDDLAnalyzedStatement {
private final Schemas schemas;
private BlobTableInfo tableInfo;
public AlterBlobTableAnalyzedStatement(Schemas schemas) {
this.schemas = schemas;
}
public void table(TableIdent tableIdent) {
assert BlobSchemaInfo.NAME.equals(tableIdent.schema());
tableInfo = (BlobTableInfo) schemas.getTableInfo(tableIdent);
}
public BlobTableInfo table() {
return tableInfo;
}
@Override
public <C, R> R accept(AnalyzedStatementVisitor<C, R> analyzedStatementVisitor, C context) {
return analyzedStatementVisitor.visitAlterBlobTableStatement(this, context);
}
}
| {
"content_hash": "a4ae9da1c983a1af181df8be57d3d1fc",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 96,
"avg_line_length": 28.46875,
"alnum_prop": 0.7453347969264544,
"repo_name": "puneetjaiswal/crate",
"id": "0c89bc5979b932a899bbd6ed4e5a5f232c768018",
"size": "1930",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "sql/src/main/java/io/crate/analyze/AlterBlobTableAnalyzedStatement.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "2809"
},
{
"name": "GAP",
"bytes": "70815"
},
{
"name": "Java",
"bytes": "7495540"
},
{
"name": "Python",
"bytes": "5286"
},
{
"name": "Shell",
"bytes": "11354"
}
],
"symlink_target": ""
} |
.. -*- coding: utf-8 -*-
.. URL: https://docs.docker.com/engine/reference/commandline/plugin_disable/
.. SOURCE: https://github.com/docker/docker/blob/master/docs/reference/commandline/plugin_disable.md
doc version: 1.12
https://github.com/docker/docker/commits/master/docs/reference/commandline/plugin_disable.md
.. check date: 2016/06/16
.. Commits on Jun 15, 2016 e79873c27c2b3f404db02682bb4f11b5a046602e
.. -------------------------------------------------------------------
.. plugin disable
=======================================
plugin disable (実験的)
=======================================
.. code-block:: bash
使い方: docker plugin disable PLUGIN
プラグインを無効化
--help 使い方の表示
.. Disables a plugin. The plugin must be installed before it can be disabled, see docker plugin install.
プラグインを無効化します。無効にするプラグインをインストールしている必要があります。詳しくは :doc:`docker plugin install <plugin_install>` をご覧ください。
.. The following example shows that the no-remove plugin is currently installed and active:
以下の例は ``no-remove`` がインストール済みかつアクティブな状態を表します。
.. code-block:: bash
$ docker plugin ls
NAME TAG ACTIVE
tiborvass/no-remove latest true
.. To disable the plugin, use the following command:
プラグインを無効化するには、次のコマンドを実行します。
.. code-block:: bash
$ docker plugin disable tiborvass/no-remove:latest
.. After the plugin is disabled, it appears as "inactive" in the list of plugins:
プラグインの無効後は、プラグインの一覧で「inactive」(動作していない)と表示されます。
.. code-block:: bash
$ docker plugin ls
NAME VERSION ACTIVE
tiborvass/no-remove latest false
関連情報
----------
* :doc:`plugin_ls`
* :doc:`plugin_enable`
* :doc:`plugin_inspect`
* :doc:`plugin_install`
* :doc:`plugin_rm`
.. seealso::
plugin disable
https://docs.docker.com/engine/reference/commandline/plugin_disable/
| {
"content_hash": "8bcb902f1c9ab5959cf8b7a427396efd",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 104,
"avg_line_length": 27,
"alnum_prop": 0.6419753086419753,
"repo_name": "imoyoukan/docs.docker.jp",
"id": "d70e0dbba048e016fcc3adabd3ad6e98285df313",
"size": "2215",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "engine/reference/commandline/plugin_disable.rst",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "104838"
},
{
"name": "HTML",
"bytes": "19821"
},
{
"name": "JavaScript",
"bytes": "5701"
},
{
"name": "Makefile",
"bytes": "1062"
},
{
"name": "Python",
"bytes": "10472"
}
],
"symlink_target": ""
} |
layout: article
title: "Challenges in building Unity native plugins"
headline: ""
date: 2015-08-05T04:20:38+02:00
estimate: "10 mins"
categories: [unity3d, getsocial]
post: true
external: true
sourceName: "The GetSocial Blog"
sourceUrl: "https://www.getsocial.im/blog/challenges-in-building-unity-native-plugins/"
---
### Prehistory
Half a year ago GetSocial Mobile team had to prepare and release SDK v.3.0 with a lot of new cool features and breaking changes from 2.x version. Just in that time I've joined a company and had a chance to make a complete revamp of Unity SDK.Half a year ago GetSocial Mobile team released SDK v.3.0 with a lot of new cool features and breaking changes from 2.x version. Since then, I’ve joined GetSocial and have had an opportunity to make a complete revamp of their Unity SDK.
In this series of posts, I’ll describe what decisions we made and what challenges our team faced with developing the [GetSocial Unity SDK](https://github.com/getsocial-im/getsocial-unity-sdk).
### Approaches in building Unity SDK
First, we needed to decide which approach to go for in order to create an SDK.
At that moment, we had [Android SDK](https://github.com/getsocial-im/getsocial-android-sdk) written in Java and [iOS SDK](https://github.com/getsocial-im/getsocial-ios-sdk) in ObjectiveC, both completely native to achieve the best performance and user experience. Unity supports three scripting languages: C#, UnityScript (aka JavaScript), and Boo. We decided to expose API in C# as it's [the most widely used one](http://blogs.unity3d.com/2014/09/03/documentation-unity-scripting-languages-and-you/).
Let’s check which options we had to achieve the goal...
##### Pure C# SDK
The best approach from developer and user perspective as
- it delivers the best performance;
- it is simple to setup;
- we could make SDK available to 21 platforms supported by Unity.
Looks like an ideal solution, but there are few huge disadvantages:
- at the end we'll have a third huge codebase that should be kept in sync with iOS and Android ones;
- all UI should be reimplemented in C# that is not the easiest task as all games use different UI frameworks.
##### Cross-compilation to C\#
According Google [report](http://gmailblog.blogspot.nl/2014/11/going-under-hood-of-inbox.html), that they managed to reuse 70% of code between Android, iOS and Web Inbox clients, we were so impressed by the idea that decided to give it a try.
Because of similarity between Java and C# we digged in a direction of recompiling Android SDK to C#.
From a positive side, we found that:
- there are tools for cross-compilation like [Java Language Conversion Assistant](http://www.microsoft.com/en-us/download/details.aspx?id=14349) from Microsoft;
- in theory, in the end we get C# library, that is fast;
- and possible to use on all Unity supported platforms.
On the other hand:
- it's not possible to cross-compile presentation layer as it uses Android APIs. That means we need to replicate UI part of the SDK manually on C#;
- cross-compilation required a clear separation between business and presentation logic, which would lead to a huge refactoring in Android SDK;
- it would be hard to establish automated C# SDK regenerations when Java source is updated, as cross-compilers produce [upgrade issues](https://msdn.microsoft.com/en-us/library/5atsz094(v=vs.71).aspx) that should be fixed manually.
##### Unity native plugin
Native plugins are platform-specific native code libraries. They can access features like OS calls and third-party code libraries that would otherwise not be available to Unity.
Pros:
- very few code needed; a bridge is a thin adapter layer between API exposed in C# and native SDKs;
- 100% code reuse from native SDKs;
- UI rendering is handled by native side => no problem with variety of Unity UI frameworks.
Cons:
- we’d be able to deploy only to iOS and Android Unity games;
- native SDKs API should be in sync to avoid huge amount of adapter's code in a bridge;
- Unity Java Native Interface implementation has issues (Unity engineers fix them quickly but tend to add a new one on each release).
### Selected approach
We decided to go with Unity native plugin approach as it required the least effort, gave plenty of advantages and flexibility. Now GetSocial Unity SDK has the following architecture:

Between C# API and native libraries, we have a bridge layer on Unity and native side. The main task for the bridge is to invoke methods between platforms and convert data to primitive types as we can’t pass objects between platforms.
To communicate between C# code and native side, Unity exposes a wide variety of APIs. On Android: Unity Messaging (the one we use to send messages between GameObjects) and wrappers around Java Native Interface. On iOS, we can invoke extern methods from ObjectiveC and use MonoPInvokeCallbackAttribute to call C# methods from the iOS world.
### Everything is simple, right?
The theory is simple: thin bridge layer, few calls between C# and native platforms, nice demo app and problem solved. But a simple solution doesn’t come without any challenges. Here are a few of my favorites:
- **performance**: potentially bridge could become a performance-wise bottleneck, especially if calls will be made on each frame (keep in mind [16ms rule](https://www.youtube.com/watch?v=CaMTIgxCSqU));
- **testability**: with Unity native plugin approach most of the SDK will be in the native code, that means we can test only on iOS or Android device/simulator, but not Unity player;
- **continuous delivery**: SDK consist of 3 parts, each one has it own build process, setting up build pipeline could be complicated;
- **version fragmentation**: Unity is a very dynamic platform, each release brings tons of cool features and ... new bugs. Unlike game developers who can select Unity version and use it during whole game life, we have to support all Unity versions starting, at least, from 4.1.
### Want to know more?
It was only a first post from the series, follow GetSocial on social networks in order not to miss updates and don't hesitate to share your thoughts and experience with building Unity plugins in comments.
| {
"content_hash": "9e7e4927cc7fd709b40890e041990c9d",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 501,
"avg_line_length": 63.48,
"alnum_prop": 0.7763074984247007,
"repo_name": "zasadnyy-inc/v-zasadnyy-com",
"id": "5d5d5ac90e243c2ea653a5f539644a82990eb924",
"size": "6364",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/_posts/2015-08-05-challenges-in-building-unity-native-plugins-intro.markdown",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "18423"
},
{
"name": "HTML",
"bytes": "26482"
},
{
"name": "JavaScript",
"bytes": "9768"
},
{
"name": "Ruby",
"bytes": "1023"
}
],
"symlink_target": ""
} |
namespace SIM.Tool.Windows.UserControls.Download
{
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using SIM.Tool.Base;
using SIM.Tool.Base.Profiles;
using SIM.Tool.Base.Wizards;
using SIM.Tool.Windows.Pipelines.Download;
using Sitecore.Diagnostics.Base;
using Sitecore.Diagnostics.Base.Annotations;
using Sitecore.Diagnostics.Logging;
using Sitecore.Diagnostics.InformationService.Client.Model;
using SIM.Core;
#region
#endregion
public partial class Downloads : IWizardStep, ICustomButton, IFlowControl
{
#region Fields
private readonly List<ProductDownloadInCheckbox> checkBoxItems = new List<ProductDownloadInCheckbox>();
#endregion
#region Constructors
public Downloads()
{
this.InitializeComponent();
}
#endregion
#region Properties
public string CustomButtonText
{
get
{
return "Open Folder";
}
}
#endregion
#region Public Methods
#region ICustomButton Members
public void CustomButtonClick()
{
CoreApp.OpenFolder(ProfileManager.Profile.LocalRepository);
}
#endregion
#region IFlowControl Members
#region Public methods
public bool OnMovingBack(WizardArgs wizardArgs)
{
return true;
}
public bool OnMovingNext(WizardArgs wizardArgs)
{
var args = (DownloadWizardArgs)wizardArgs;
bool canMoveNext = args.Products.Count > 0;
if (!canMoveNext)
{
WindowHelper.HandleError("You didn't select any download, please select one to go further", false);
}
Exception ex = null;
WindowHelper.LongRunningTask(() => { ex = this.PrepareData(args); }, "Sitecore Versions Downloader", Window.GetWindow(this), "Preparing for downloading");
if (ex != null)
{
WindowHelper.ShowMessage("Failed to prepare the data. " + ex + "\r\nMessage: " + ex.Message + "\r\nStackTrace:\r\n" + ex.StackTrace);
return false;
}
return canMoveNext;
}
#endregion
#region Private methods
private void CheckFileSize(UriBasedCollection<long> fileSizes, Uri url, string cookies)
{
Assert.IsNotNull(url, nameof(url));
try
{
using (var response = WebRequestHelper.RequestAndGetResponse(url, 60000, 60000, cookies))
{
var fileSize = response.ContentLength;
fileSizes[url] = fileSize;
}
}
catch (Exception ex)
{
Log.Error(ex, "Error while downloading {0}", url.ToString());
}
}
private ReadOnlyCollection<Uri> GetLinks(DownloadWizardArgs args)
{
return new ReadOnlyCollection<Uri>(args.Products.SelectMany(product => product.Value).ToArray());
}
private UriBasedCollection<long> GetSizes(ReadOnlyCollection<Uri> urls, string cookies)
{
UriBasedCollection<long> sizes = new UriBasedCollection<long>();
var parallelDownloadsNumber = WindowsSettings.AppDownloaderParallelThreads.Value;
for (int i = 0; i < urls.Count; i += parallelDownloadsNumber)
{
var remains = urls.Count - i;
var tasks = urls
.Skip(i)
.Take(Math.Min(parallelDownloadsNumber, remains))
.Select(url => Task.Factory.StartNew(() => this.CheckFileSize(sizes, url, cookies)))
.ToArray();
Task.WaitAll(tasks, remains * 60000);
}
return sizes;
}
private Exception PrepareData(DownloadWizardArgs args)
{
try
{
var links = this.GetLinks(args);
args.Links = links;
var sizes = this.GetSizes(links, args.Cookies);
Assert.IsTrue(sizes.Count == args.Links.Count, "The length of the sizes array differs from links count");
Assert.IsTrue(sizes.All(s => s.Value > 0), "Some SDN packages are said to have 0 length");
args.Sizes = sizes;
}
catch (Exception ex)
{
return ex;
}
return null;
}
#endregion
#endregion
#region IStateControl Members
#region Public properties
public static WebBrowser WebBrowser { get; private set; }
public WizardArgs WizardArgs { get; set; }
#endregion
#region Public methods
public bool SaveChanges(WizardArgs wizardArgs)
{
var args = (DownloadWizardArgs)wizardArgs;
var selected = this.checkBoxItems.Where(mm => mm.IsChecked);
args.Products.AddRange(selected);
return true;
}
#endregion
#endregion
#endregion
#region Methods
#region Public methods
public void InitializeStep(WizardArgs wizardArgs)
{
var args = (DownloadWizardArgs)wizardArgs;
this.checkBoxItems.Clear();
this.Append(args.Releases);
foreach (var product in args.Products)
{
var selectedPRoduct = product;
ProductDownloadInCheckbox checkBoxItem = this.checkBoxItems.SingleOrDefault(cbi => cbi.Value.Equals(selectedPRoduct));
if (checkBoxItem != null)
{
checkBoxItem.IsChecked = true;
}
}
this.filePackages.DataContext = this.checkBoxItems;
}
#endregion
#region Private methods
private void Append(IEnumerable<IRelease> records)
{
this.checkBoxItems.AddRange(records.Select(r => new ProductDownloadInCheckbox(r)).ToList());
}
private void ModuleSelected([CanBeNull] object sender, [CanBeNull] SelectionChangedEventArgs e)
{
this.filePackages.SelectedIndex = -1;
}
private void UserControlLoaded(object sender, RoutedEventArgs e)
{
}
#endregion
#endregion
}
}
| {
"content_hash": "0e4054dd3a8ccf9929b132dd0683dd8a",
"timestamp": "",
"source": "github",
"line_count": 236,
"max_line_length": 160,
"avg_line_length": 24.483050847457626,
"alnum_prop": 0.6512634129456559,
"repo_name": "dsolovay/Sitecore-Instance-Manager",
"id": "a889e258f517023420b9cc6623b0cd430ad13f5f",
"size": "5780",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/SIM.Tool.Windows/UserControls/Download/Downloads.xaml.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "1320884"
},
{
"name": "PowerShell",
"bytes": "13655"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>React Example</title>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/css/bootstrap.min.css">
<script src="https://cdnjs.cloudflare.com/ajax/libs/lodash.js/3.10.0/lodash.min.js"></script>
<script src="https://fb.me/react-0.13.3.js"></script>
<script src="https://fb.me/JSXTransformer-0.13.3.js"></script>
</head>
<body>
<div id="app" class="container">
</div>
<script type="text/jsx">
var NameItem = React.createClass({
render: function() {
return (
<span>
{this.props.name}
</span>
);
}
});
var NameList = React.createClass({
render: function() {
var createItem = function(item, index) {
return (
<li className="list-group-item" key={item.id}>
<NameItem name={item.name} />
<button
onClick={this.props.remove.bind(null, index)}
className="btn btn-danger">
Remove
</button>
</li>
);
};
return (
<ul className="list-group">
{this.props.names.map(createItem, this)}
</ul>
);
}
});
var NameForm = React.createClass({
_onSubmit: function(e) {
e.preventDefault();
var name = React.findDOMNode(this.refs.name).value.trim();
if (!name) {
return;
}
this.props.submitHandler(name);
React.findDOMNode(this.refs.name).value = '';
return;
},
render: function() {
return (
<form onSubmit={this._onSubmit}>
<input type="text" ref="name" />
<button type="submit" className="btn btn-default">Add</button>
</form>
);
}
});
var NameBox = React.createClass({
getInitialState: function() {
return {names: []};
},
_addNewName: function(name) {
// NOTE: NEVER mutate this.state directly
var nameList = [{
name: name,
id: _.uniqueId()
}].concat(this.state.names);
this.setState({names: nameList});
},
_removeName: function(index) {
var nameList = _.clone(this.state.names, true);
nameList.splice(index, 1);
this.setState({names: nameList});
},
render: function() {
return (
<div className="row">
<NameForm submitHandler={this._addNewName} />
<NameList
names={this.state.names}
remove={this._removeName} />
</div>
);
}
});
React.render(<NameBox />, document.getElementById('app'));
</script>
</body>
</html>
| {
"content_hash": "e2c9315f65975419a9e81dcb96043097",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 104,
"avg_line_length": 27.850467289719628,
"alnum_prop": 0.4798657718120805,
"repo_name": "seanlin0800/intro_react",
"id": "725ee8a3163de210cc1e9a1daee1be668c9b2685",
"size": "2980",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "examples/snapshots/snapshot10/index.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1480"
},
{
"name": "HTML",
"bytes": "6583"
},
{
"name": "JavaScript",
"bytes": "3594"
}
],
"symlink_target": ""
} |
namespace peloton {
namespace test {
//===--------------------------------------------------------------------===//
// Transaction Tests
//===--------------------------------------------------------------------===//
class TransactionTests : public PelotonTest {};
static std::vector<ConcurrencyType> TEST_TYPES = {
CONCURRENCY_TYPE_OPTIMISTIC,
CONCURRENCY_TYPE_PESSIMISTIC,
CONCURRENCY_TYPE_SSI,
// CONCURRENCY_TYPE_SPECULATIVE_READ,
CONCURRENCY_TYPE_EAGER_WRITE,
CONCURRENCY_TYPE_TO,
CONCURRENCY_TYPE_OCC_RB
};
void TransactionTest(concurrency::TransactionManager *txn_manager) {
uint64_t thread_id = TestingHarness::GetInstance().GetThreadId();
for (oid_t txn_itr = 1; txn_itr <= 50; txn_itr++) {
txn_manager->BeginTransaction();
if (thread_id % 2 == 0) {
std::chrono::microseconds sleep_time(1);
std::this_thread::sleep_for(sleep_time);
}
if (txn_itr % 25 != 0) {
txn_manager->CommitTransaction();
} else {
txn_manager->AbortTransaction();
}
}
}
TEST_F(TransactionTests, TransactionTest) {
for (auto test_type : TEST_TYPES) {
concurrency::TransactionManagerFactory::Configure(test_type);
auto &txn_manager = concurrency::TransactionManagerFactory::GetInstance();
LaunchParallelTest(8, TransactionTest, &txn_manager);
LOG_INFO("next Commit Id :: %lu", txn_manager.GetNextCommitId());
}
}
TEST_F(TransactionTests, SingleTransactionTest) {
for (auto test_type : TEST_TYPES) {
concurrency::TransactionManagerFactory::Configure(test_type);
auto &txn_manager = concurrency::TransactionManagerFactory::GetInstance();
std::unique_ptr<storage::DataTable> table(
TransactionTestsUtil::CreateTable());
// Just scan the table
{
TransactionScheduler scheduler(1, table.get(), &txn_manager);
scheduler.Txn(0).Scan(0);
scheduler.Txn(0).Commit();
scheduler.Run();
EXPECT_EQ(10, scheduler.schedules[0].results.size());
}
// read, read, read, read, update, read, read not exist
// another txn read
{
TransactionScheduler scheduler(2, table.get(), &txn_manager);
scheduler.Txn(0).Read(0);
scheduler.Txn(0).Read(0);
scheduler.Txn(0).Read(0);
scheduler.Txn(0).Read(0);
scheduler.Txn(0).Update(0, 1);
scheduler.Txn(0).Read(0);
scheduler.Txn(0).Read(100);
scheduler.Txn(0).Commit();
scheduler.Txn(1).Read(0);
scheduler.Txn(1).Commit();
scheduler.Run();
EXPECT_EQ(RESULT_SUCCESS, scheduler.schedules[0].txn_result);
EXPECT_EQ(RESULT_SUCCESS, scheduler.schedules[1].txn_result);
EXPECT_EQ(0, scheduler.schedules[0].results[0]);
EXPECT_EQ(0, scheduler.schedules[0].results[1]);
EXPECT_EQ(0, scheduler.schedules[0].results[2]);
EXPECT_EQ(0, scheduler.schedules[0].results[3]);
EXPECT_EQ(1, scheduler.schedules[0].results[4]);
EXPECT_EQ(-1, scheduler.schedules[0].results[5]);
EXPECT_EQ(1, scheduler.schedules[1].results[0]);
}
// // update, update, update, update, read
{
TransactionScheduler scheduler(1, table.get(), &txn_manager);
scheduler.Txn(0).Update(0, 1);
scheduler.Txn(0).Update(0, 2);
scheduler.Txn(0).Update(0, 3);
scheduler.Txn(0).Update(0, 4);
scheduler.Txn(0).Read(0);
scheduler.Txn(0).Commit();
scheduler.Run();
EXPECT_EQ(RESULT_SUCCESS, scheduler.schedules[0].txn_result);
EXPECT_EQ(4, scheduler.schedules[0].results[0]);
}
// // delete not exist, delete exist, read deleted, update deleted,
// // read deleted, insert back, update inserted, read newly updated,
// // delete inserted, read deleted
{
TransactionScheduler scheduler(1, table.get(), &txn_manager);
scheduler.Txn(0).Delete(100);
scheduler.Txn(0).Delete(0);
scheduler.Txn(0).Read(0);
scheduler.Txn(0).Update(0, 1);
scheduler.Txn(0).Read(0);
scheduler.Txn(0).Insert(0, 2);
scheduler.Txn(0).Update(0, 3);
scheduler.Txn(0).Read(0);
scheduler.Txn(0).Delete(0);
scheduler.Txn(0).Read(0);
scheduler.Txn(0).Commit();
scheduler.Run();
EXPECT_EQ(RESULT_SUCCESS, scheduler.schedules[0].txn_result);
EXPECT_EQ(-1, scheduler.schedules[0].results[0]);
EXPECT_EQ(-1, scheduler.schedules[0].results[1]);
EXPECT_EQ(3, scheduler.schedules[0].results[2]);
EXPECT_EQ(-1, scheduler.schedules[0].results[3]);
LOG_INFO("FINISH THIS");
}
// // insert, delete inserted, read deleted, insert again, delete again
// // read deleted, insert again, read inserted, update inserted, read updated
{
TransactionScheduler scheduler(1, table.get(), &txn_manager);
scheduler.Txn(0).Insert(1000, 0);
scheduler.Txn(0).Delete(1000);
scheduler.Txn(0).Read(1000);
scheduler.Txn(0).Insert(1000, 1);
scheduler.Txn(0).Delete(1000);
scheduler.Txn(0).Read(1000);
scheduler.Txn(0).Insert(1000, 2);
scheduler.Txn(0).Read(1000);
scheduler.Txn(0).Update(1000, 3);
scheduler.Txn(0).Read(1000);
scheduler.Txn(0).Commit();
scheduler.Run();
EXPECT_EQ(RESULT_SUCCESS, scheduler.schedules[0].txn_result);
EXPECT_EQ(-1, scheduler.schedules[0].results[0]);
EXPECT_EQ(-1, scheduler.schedules[0].results[1]);
EXPECT_EQ(2, scheduler.schedules[0].results[2]);
EXPECT_EQ(3, scheduler.schedules[0].results[3]);
}
// // Deadlock detection test for eager write
// // T0: R0 W0 C0
// // T1: R1 W1 C1
if (concurrency::TransactionManagerFactory::GetProtocol() == CONCURRENCY_TYPE_EAGER_WRITE)
{
TransactionScheduler scheduler(2, table.get(), &txn_manager);
scheduler.Txn(0).Read(2);
scheduler.Txn(1).Read(3);
scheduler.Txn(0).Update(3,1);
scheduler.Txn(1).Update(2,2);
scheduler.Txn(0).Commit();
scheduler.Txn(1).Commit();
scheduler.Run();
EXPECT_EQ(RESULT_SUCCESS, scheduler.schedules[1].txn_result);
EXPECT_EQ(RESULT_ABORTED, scheduler.schedules[0].txn_result);
}
}
}
TEST_F(TransactionTests, AbortTest) {
for (auto test_type : TEST_TYPES) {
concurrency::TransactionManagerFactory::Configure(test_type);
auto &txn_manager = concurrency::TransactionManagerFactory::GetInstance();
std::unique_ptr<storage::DataTable> table(
TransactionTestsUtil::CreateTable());
{
TransactionScheduler scheduler(2, table.get(), &txn_manager);
scheduler.Txn(0).Update(0, 100);
scheduler.Txn(0).Abort();
scheduler.Txn(1).Read(0);
scheduler.Txn(1).Commit();
scheduler.Run();
EXPECT_EQ(RESULT_ABORTED, scheduler.schedules[0].txn_result);
EXPECT_EQ(RESULT_SUCCESS, scheduler.schedules[1].txn_result);
EXPECT_EQ(0, scheduler.schedules[1].results[0]);
}
{
TransactionScheduler scheduler(2, table.get(), &txn_manager);
scheduler.Txn(0).Insert(100, 0);
scheduler.Txn(0).Abort();
scheduler.Txn(1).Read(100);
scheduler.Txn(1).Commit();
scheduler.Run();
EXPECT_EQ(RESULT_ABORTED, scheduler.schedules[0].txn_result);
EXPECT_EQ(RESULT_SUCCESS, scheduler.schedules[1].txn_result);
EXPECT_EQ(-1, scheduler.schedules[1].results[0]);
}
}
}
} // End test namespace
} // End peloton namespace
| {
"content_hash": "0c398f2d27c3009ebda1af6e8ac4b476",
"timestamp": "",
"source": "github",
"line_count": 219,
"max_line_length": 94,
"avg_line_length": 33.61643835616438,
"alnum_prop": 0.6279543602281988,
"repo_name": "amaliujia/CMUDB-peloton",
"id": "8e434a9821638b63c18daba160155f57cf987f83",
"size": "7795",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/concurrency/transaction_test.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "3265"
},
{
"name": "C",
"bytes": "2707160"
},
{
"name": "C++",
"bytes": "27564704"
},
{
"name": "DTrace",
"bytes": "3480"
},
{
"name": "Groff",
"bytes": "28217"
},
{
"name": "Java",
"bytes": "8582"
},
{
"name": "Lex",
"bytes": "35658"
},
{
"name": "M4",
"bytes": "185165"
},
{
"name": "Makefile",
"bytes": "90322"
},
{
"name": "PLpgSQL",
"bytes": "117654"
},
{
"name": "Perl",
"bytes": "76475"
},
{
"name": "Protocol Buffer",
"bytes": "18300"
},
{
"name": "Python",
"bytes": "27284"
},
{
"name": "Ruby",
"bytes": "1037"
},
{
"name": "SQLPL",
"bytes": "32868"
},
{
"name": "Shell",
"bytes": "12280"
},
{
"name": "Yacc",
"bytes": "122640"
}
],
"symlink_target": ""
} |
package net.spy.memcached;
import java.util.Iterator;
import org.jmock.Mock;
import org.jmock.MockObjectTestCase;
public abstract class AbstractNodeLocationCase extends MockObjectTestCase {
protected MemcachedNode[] nodes;
protected Mock[] nodeMocks;
protected NodeLocator locator;
private void runSequenceAssertion(NodeLocator l, String k, int... seq) {
int pos = 0;
for (Iterator<MemcachedNode> i = l.getSequence(k); i.hasNext(); ) {
assertEquals("At position " + pos, nodes[seq[pos]].toString(),
i.next().toString());
try {
i.remove();
fail("Allowed a removal from a sequence.");
} catch (UnsupportedOperationException e) {
// pass
}
pos++;
}
assertEquals("Incorrect sequence size for " + k, seq.length, pos);
}
public final void testCloningGetPrimary() {
setupNodes(5);
assertTrue(locator.getReadonlyCopy().getPrimary("hi")
instanceof MemcachedNodeROImpl);
}
public final void testCloningGetAll() {
setupNodes(5);
assertTrue(locator.getReadonlyCopy().getAll().iterator().next()
instanceof MemcachedNodeROImpl);
}
public final void testCloningGetSequence() {
setupNodes(5);
assertTrue(locator.getReadonlyCopy().getSequence("hi").next()
instanceof MemcachedNodeROImpl);
}
protected final void assertSequence(String k, int... seq) {
runSequenceAssertion(locator, k, seq);
runSequenceAssertion(locator.getReadonlyCopy(), k, seq);
}
protected void setupNodes(int n) {
nodes = new MemcachedNode[n];
nodeMocks = new Mock[nodes.length];
for (int i = 0; i < nodeMocks.length; i++) {
nodeMocks[i] = mock(MemcachedNode.class, "node#" + i);
nodes[i] = (MemcachedNode) nodeMocks[i].proxy();
}
}
}
| {
"content_hash": "e2de9ba3b57bc73951f57a39b5000b48",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 75,
"avg_line_length": 29.177419354838708,
"alnum_prop": 0.6627971254836926,
"repo_name": "naver/arcus-java-client",
"id": "2c5b7aee55857b84951d72fb17ebaa2493e5884c",
"size": "1809",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test/java/net/spy/memcached/AbstractNodeLocationCase.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "6243"
},
{
"name": "Java",
"bytes": "2624439"
},
{
"name": "Ruby",
"bytes": "2376"
},
{
"name": "Shell",
"bytes": "116"
}
],
"symlink_target": ""
} |
package org.apache.flink.runtime.webmonitor.handlers;
import org.apache.flink.configuration.ConfigConstants;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.handler.codec.http.DefaultFullHttpResponse;
import io.netty.handler.codec.http.HttpHeaders;
import io.netty.handler.codec.http.HttpResponse;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.handler.codec.http.HttpVersion;
import io.netty.handler.codec.http.router.KeepAliveWrite;
import io.netty.handler.codec.http.router.Routed;
/**
* Responder that returns a constant String.
*/
@ChannelHandler.Sharable
public class ConstantTextHandler extends SimpleChannelInboundHandler<Routed> {
private final byte[] encodedText;
public ConstantTextHandler(String text) {
this.encodedText = text.getBytes(ConfigConstants.DEFAULT_CHARSET);
}
@Override
protected void channelRead0(ChannelHandlerContext ctx, Routed routed) throws Exception {
HttpResponse response = new DefaultFullHttpResponse(
HttpVersion.HTTP_1_1, HttpResponseStatus.OK, Unpooled.wrappedBuffer(encodedText));
response.headers().set(HttpHeaders.Names.CONTENT_LENGTH, encodedText.length);
response.headers().set(HttpHeaders.Names.CONTENT_TYPE, "text/plain");
KeepAliveWrite.flush(ctx, routed.request(), response);
}
}
| {
"content_hash": "c080329b0fb1f4e06bc146d1b98ac2bf",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 89,
"avg_line_length": 34.707317073170735,
"alnum_prop": 0.8137737174982431,
"repo_name": "WangTaoTheTonic/flink",
"id": "61e2958c4444d58e5d228ac653dbbcdfea9de242",
"size": "2228",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/ConstantTextHandler.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4792"
},
{
"name": "CSS",
"bytes": "18100"
},
{
"name": "CoffeeScript",
"bytes": "89458"
},
{
"name": "HTML",
"bytes": "88253"
},
{
"name": "Java",
"bytes": "31102723"
},
{
"name": "JavaScript",
"bytes": "8267"
},
{
"name": "Python",
"bytes": "166860"
},
{
"name": "Scala",
"bytes": "5502649"
},
{
"name": "Shell",
"bytes": "76520"
}
],
"symlink_target": ""
} |
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=US-ASCII">
<title>basic_seq_packet_socket::protocol_type</title>
<link rel="stylesheet" href="../../../../../doc/src/boostbook.css" type="text/css">
<meta name="generator" content="DocBook XSL Stylesheets V1.78.1">
<link rel="home" href="../../../boost_asio.html" title="Boost.Asio">
<link rel="up" href="../basic_seq_packet_socket.html" title="basic_seq_packet_socket">
<link rel="prev" href="operator_eq_/overload2.html" title="basic_seq_packet_socket::operator= (2 of 2 overloads)">
<link rel="next" href="receive.html" title="basic_seq_packet_socket::receive">
</head>
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
<table cellpadding="2" width="100%"><tr>
<td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../../boost.png"></td>
<td align="center"><a href="../../../../../index.html">Home</a></td>
<td align="center"><a href="../../../../../libs/libraries.htm">Libraries</a></td>
<td align="center"><a href="http://www.boost.org/users/people.html">People</a></td>
<td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td>
<td align="center"><a href="../../../../../more/index.htm">More</a></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="operator_eq_/overload2.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../basic_seq_packet_socket.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../boost_asio.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="receive.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
<div class="section">
<div class="titlepage"><div><div><h4 class="title">
<a name="boost_asio.reference.basic_seq_packet_socket.protocol_type"></a><a class="link" href="protocol_type.html" title="basic_seq_packet_socket::protocol_type">basic_seq_packet_socket::protocol_type</a>
</h4></div></div></div>
<p>
<a class="indexterm" name="idp65062552"></a>
The protocol type.
</p>
<pre class="programlisting"><span class="keyword">typedef</span> <span class="identifier">Protocol</span> <span class="identifier">protocol_type</span><span class="special">;</span>
</pre>
<h6>
<a name="boost_asio.reference.basic_seq_packet_socket.protocol_type.h0"></a>
<span class="phrase"><a name="boost_asio.reference.basic_seq_packet_socket.protocol_type.requirements"></a></span><a class="link" href="protocol_type.html#boost_asio.reference.basic_seq_packet_socket.protocol_type.requirements">Requirements</a>
</h6>
<p>
<span class="emphasis"><em>Header: </em></span><code class="literal">boost/asio/basic_seq_packet_socket.hpp</code>
</p>
<p>
<span class="emphasis"><em>Convenience header: </em></span><code class="literal">boost/asio.hpp</code>
</p>
</div>
<table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr>
<td align="left"></td>
<td align="right"><div class="copyright-footer">Copyright © 2003-2013 Christopher M. Kohlhoff<p>
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>)
</p>
</div></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="operator_eq_/overload2.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../basic_seq_packet_socket.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../boost_asio.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="receive.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
</body>
</html>
| {
"content_hash": "cadbe4f94f7b16738512f12a977d7bca",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 450,
"avg_line_length": 67.2542372881356,
"alnum_prop": 0.6411290322580645,
"repo_name": "laborautonomo/poedit",
"id": "97a1e43f489b19f474ef0f97e1f28a1d86b87576",
"size": "3968",
"binary": false,
"copies": "1",
"ref": "refs/heads/stable",
"path": "deps/boost/doc/html/boost_asio/reference/basic_seq_packet_socket/protocol_type.html",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
package VMOMI::HostImageAcceptanceLevel;
use parent 'VMOMI::SimpleType';
use strict;
use warnings;
1;
| {
"content_hash": "6b543e23be5a34125af833a7d6d17d9d",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 40,
"avg_line_length": 14.857142857142858,
"alnum_prop": 0.7788461538461539,
"repo_name": "stumpr/p5-vmomi",
"id": "2aa9501caa88e94987d3b6dada0b6ba6244d1a68",
"size": "104",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/VMOMI/HostImageAcceptanceLevel.pm",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Perl",
"bytes": "2084415"
}
],
"symlink_target": ""
} |
==================================
Comparison Functions and Operators
==================================
Comparison Operators
--------------------
======== ===========
Operator Description
======== ===========
``<`` Less than
``>`` Greater than
``<=`` Less than or equal to
``>=`` Greater than or equal to
``=`` Equal
``<>`` Not equal
``!=`` Not equal (non-standard but popular syntax)
======== ===========
Range Operator: BETWEEN
-----------------------
The ``BETWEEN`` operator tests if a value is within a specified range.
It uses the syntax ``value BETWEEN min AND max``::
SELECT 3 BETWEEN 2 AND 6;
The statement shown above is equivalent to the following statement::
SELECT 3 >= 2 AND 3 <= 6;
To test if a value does not fall within the specified range
use ``NOT BETWEEN``::
SELECT 3 NOT BETWEEN 2 AND 6;
The statement shown above is equivalent to the following statement::
SELECT 3 < 2 OR 3 > 6;
The presence of NULL in a ``BETWEEN`` or ``NOT BETWEEN`` statement
will result in the statement evaluating to NULL::
SELECT NULL BETWEEN 2 AND 4; -- null
SELECT 2 BETWEEN NULL AND 6; -- null
The ``BETWEEN`` and ``NOT BETWEEN`` operators can also be used to
evaluate string arguments::
SELECT 'Paul' BETWEEN 'John' AND 'Ringo'; -- true
Not that the value, min, and max parameters to ``BETWEEN`` and ``NOT
BETWEEN`` must be the same type. For example, Presto will produce an
error if you ask it if John is between 2.3 and 35.2.
IS NULL and IS NOT NULL
-----------------------
The ``IS NULL`` and ``IS NOT NULL`` operators test whether a value
is null (undefined). Both operators work for all data types.
Using ``NULL`` with ``IS NULL`` evaluates to true::
select NULL IS NULL; -- true
But any other constant does not::
SELECT 3.0 IS NULL; -- false
IS DISTINCT FROM and IS NOT DISTINCT FROM
-----------------------------------------
In SQL a ``NULL`` value signifies an unknown value, so any comparison
involving a ``NULL`` will produce ``NULL``. The ``IS DISTINCT FROM``
and ``IS NOT DISTINCT FROM`` operators treat ``NULL`` as a known value
and both operators guarantee either a true or false outcome even in
the presence of ``NULL`` input::
SELECT NULL IS DISTINCT FROM NULL; -- false
SELECT NULL IS NOT DISTINCT FROM NULL; -- true
In the example shown above, a ``NULL`` value is not considered
distinct from ``NULL``. When you are comparing values which may
include ``NULL`` use these operators to guarantee either a ``TRUE`` or
``FALSE`` result.
The following truth table demonstrate the handling of ``NULL`` in
``IS DISTINCT FROM`` and ``IS NOT DISTINCT FROM``:
======== ======== ========= ========= ============ ================
a b a = b a <> b a DISTINCT b a NOT DISTINCT b
======== ======== ========= ========= ============ ================
``1`` ``1`` ``TRUE`` ``FALSE`` ``FALSE`` ``TRUE``
``1`` ``2`` ``FALSE`` ``TRUE`` ``TRUE`` ``FALSE``
``1`` ``NULL`` ``NULL`` ``NULL`` ``TRUE`` ``FALSE``
``NULL`` ``NULL`` ``NULL`` ``NULL`` ``FALSE`` ``TRUE``
======== ======== ========= ========= ============ ================
GREATEST and LEAST
------------------
These functions are not in the SQL standard, but are a common extension.
Like most other functions in Presto, they return null if any argument is
null. Note that in some other databases, such as PostgreSQL, they only
return null if all arguments are null.
The following types are supported:
``DOUBLE``,
``BIGINT``,
``VARCHAR``,
``TIMESTAMP``,
``TIMESTAMP WITH TIME ZONE``,
``DATE``
.. function:: greatest(value1, value2, ..., valueN) -> [same as input]
Returns the largest of the provided values.
.. function:: least(value1, value2, ..., valueN) -> [same as input]
Returns the smallest of the provided values.
Quantified Comparison Predicates: ALL, ANY and SOME
---------------------------------------------------
The ``ALL``, ``ANY`` and ``SOME`` quantifiers can be used together with comparison operators in the
following way:
.. code-block:: none
expression operator quantifier ( subquery )
For example::
SELECT 'hello' = ANY (VALUES 'hello', 'world'); -- true
SELECT 21 < ALL (VALUES 19, 20, 21); -- false
SELECT 42 >= SOME (SELECT 41 UNION ALL SELECT 42 UNION ALL SELECT 43); -- true
Here are the meanings of some quantifier and comparison operator combinations:
==================== ===========
Expression Meaning
==================== ===========
``A = ALL (...)`` Evaluates to ``true`` when ``A`` is equal to all values.
``A <> ALL (...)`` Evaluates to ``true`` when ``A`` doesn't match any value.
``A < ALL (...)`` Evaluates to ``true`` when ``A`` is smaller than the smallest value.
``A = ANY (...)`` Evaluates to ``true`` when ``A`` is equal to any of the values. This form is equivalent to ``A IN (...)``.
``A <> ANY (...)`` Evaluates to ``true`` when ``A`` doesn't match one or more values.
``A < ANY (...)`` Evaluates to ``true`` when ``A`` is smaller than the biggest value.
==================== ===========
``ANY`` and ``SOME`` have the same meaning and can be used interchangeably.
LIKE
----
The LIKE operator is used to match a specified character pattern in a string. Patterns can contain
regular characters as well as wildcards. Wildcard characters can be escaped using the single character
specified for the ESCAPE parameter. Matching is case sensitive.
Syntax::
expression LIKE pattern [ ESCAPE 'escape_character' ]
if ``pattern`` or ``escape_character`` is null, the expression evaluates to null.
==================== ===========
Wildcard Representation
==================== ===========
``%`` The percent sign represents zero, one, or multiple characters
``_`` The underscore represents a single character
==================== ===========
Examples::
SELECT * FROM (VALUES ('abc'), ('bcd'), ('cde')) AS t (name)
WHERE name LIKE '%b%'
--returns 'abc' and 'bcd'
SELECT * FROM (VALUES ('abc'), ('bcd'), ('cde')) AS t (name)
WHERE name LIKE '_b%'
--returns 'abc'
SELECT * FROM (VALUES ('abc'), ('bcd'), ('cde')) AS t (name)
WHERE name LIKE 'b%'
--returns 'bcd'
SELECT * FROM (VALUES ('abc'), ('bcd'), ('cde')) AS t (name)
WHERE name LIKE 'B%'
--returns nothing
SELECT * FROM (VALUES ('a_c'), ('_cd'), ('cde')) AS t (name)
WHERE name LIKE '%#_%' ESCAPE '#'
--returns 'a_c' and '_cd'
SELECT * FROM (VALUES ('a%c'), ('%cd'), ('cde')) AS t (name)
WHERE name LIKE '%#%%' ESCAPE '#'
--returns 'a%c' and '%cd'
| {
"content_hash": "f36d8f1ca4a3d40f89a0a0d2114e6078",
"timestamp": "",
"source": "github",
"line_count": 200,
"max_line_length": 130,
"avg_line_length": 33.47,
"alnum_prop": 0.5734986555123992,
"repo_name": "zzhao0/presto",
"id": "0601bd0d8be774024aea5c0711dfeb6ce9cd8158",
"size": "6694",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "presto-docs/src/main/sphinx/functions/comparison.rst",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "22552"
},
{
"name": "HTML",
"bytes": "65394"
},
{
"name": "Java",
"bytes": "14879907"
},
{
"name": "JavaScript",
"bytes": "4863"
},
{
"name": "Makefile",
"bytes": "6819"
},
{
"name": "PLSQL",
"bytes": "6538"
},
{
"name": "Python",
"bytes": "4479"
},
{
"name": "SQLPL",
"bytes": "6363"
},
{
"name": "Shell",
"bytes": "9520"
}
],
"symlink_target": ""
} |
from blaze.interactive import Data, compute, concrete_head, expr_repr, to_html
import datetime
from odo import into, append
from odo.backends.csv import CSV
from blaze import discover, transform
from blaze.compute.core import compute
from blaze.compute.python import compute
from blaze.expr import symbol
from datashape import dshape
from blaze.utils import tmpfile, example
import pytest
import sys
from types import MethodType
import pandas as pd
import pandas.util.testing as tm
import numpy as np
data = (('Alice', 100),
('Bob', 200))
L = [[1, 'Alice', 100],
[2, 'Bob', -200],
[3, 'Charlie', 300],
[4, 'Denis', 400],
[5, 'Edith', -500]]
t = Data(data, fields=['name', 'amount'])
x = np.ones((2, 2))
def test_table_raises_on_inconsistent_inputs():
with pytest.raises(ValueError):
t = Data(data, schema='{name: string, amount: float32}',
dshape=dshape("{name: string, amount: float32}"))
def test_resources():
assert t._resources() == {t: t.data}
def test_resources_fail():
t = symbol('t', 'var * {x: int, y: int}')
d = t[t['x'] > 100]
with pytest.raises(ValueError):
compute(d)
def test_compute_on_Data_gives_back_data():
assert compute(Data([1, 2, 3])) == [1, 2, 3]
def test_len():
assert len(t) == 2
assert len(t.name) == 2
def test_compute():
assert list(compute(t['amount'] + 1)) == [101, 201]
def test_create_with_schema():
t = Data(data, schema='{name: string, amount: float32}')
assert t.schema == dshape('{name: string, amount: float32}')
def test_create_with_raw_data():
t = Data(data, fields=['name', 'amount'])
assert t.schema == dshape('{name: string, amount: int64}')
assert t.name
assert t.data == data
def test_repr():
result = expr_repr(t['name'])
print(result)
assert isinstance(result, str)
assert 'Alice' in result
assert 'Bob' in result
assert '...' not in result
result = expr_repr(t['amount'] + 1)
print(result)
assert '101' in result
t2 = Data(tuple((i, i**2) for i in range(100)), fields=['x', 'y'])
assert t2.dshape == dshape('100 * {x: int64, y: int64}')
result = expr_repr(t2)
print(result)
assert len(result.split('\n')) < 20
assert '...' in result
def test_str_does_not_repr():
# see GH issue #1240.
d = Data([('aa', 1), ('b', 2)], name="ZZZ",
dshape='2 * {a: string, b: int64}')
expr = transform(d, c=d.a.strlen() + d.b)
assert str(
expr) == "Merge(_child=ZZZ, children=(ZZZ, label(strlen(_child=ZZZ.a) + ZZZ.b, 'c')))"
def test_repr_of_scalar():
assert repr(t.amount.sum()) == '300'
def test_mutable_backed_repr():
mutable_backed_table = Data([[0]], fields=['col1'])
repr(mutable_backed_table)
def test_dataframe_backed_repr():
df = pd.DataFrame(data=[0], columns=['col1'])
dataframe_backed_table = Data(df)
repr(dataframe_backed_table)
def test_dataframe_backed_repr_complex():
df = pd.DataFrame([(1, 'Alice', 100),
(2, 'Bob', -200),
(3, 'Charlie', 300),
(4, 'Denis', 400),
(5, 'Edith', -500)],
columns=['id', 'name', 'balance'])
t = Data(df)
repr(t[t['balance'] < 0])
def test_repr_html_on_no_resources_symbol():
t = symbol('t', '5 * {id: int, name: string, balance: int}')
assert to_html(t) == 't'
def test_expr_repr_empty():
s = repr(t[t.amount > 1e9])
assert isinstance(s, str)
assert 'amount' in s
def test_to_html():
s = to_html(t)
assert s
assert 'Alice' in s
assert '<table' in s
assert to_html(1) == '1'
assert to_html(t.count()) == '2'
def test_to_html_on_arrays():
s = to_html(Data(np.ones((2, 2))))
assert '1' in s
assert 'br>' in s
def test_repr_html():
assert '<table' in t._repr_html_()
assert '<table' in t.name._repr_html_()
def test_into():
assert into(list, t) == into(list, data)
def test_serialization():
import pickle
t2 = pickle.loads(pickle.dumps(t))
assert t.schema == t2.schema
assert t._name == t2._name
def test_table_resource():
with tmpfile('csv') as filename:
ds = dshape('var * {a: int, b: int}')
csv = CSV(filename)
append(csv, [[1, 2], [10, 20]], dshape=ds)
t = Data(filename)
assert isinstance(t.data, CSV)
assert into(list, compute(t)) == into(list, csv)
def test_concretehead_failure():
t = symbol('t', 'var * {x:int, y:int}')
d = t[t['x'] > 100]
with pytest.raises(ValueError):
concrete_head(d)
def test_into_np_ndarray_column():
t = Data(L, fields=['id', 'name', 'balance'])
expr = t[t.balance < 0].name
colarray = into(np.ndarray, expr)
assert len(list(compute(expr))) == len(colarray)
def test_into_nd_array_selection():
t = Data(L, fields=['id', 'name', 'balance'])
expr = t[t['balance'] < 0]
selarray = into(np.ndarray, expr)
assert len(list(compute(expr))) == len(selarray)
def test_into_nd_array_column_failure():
tble = Data(L, fields=['id', 'name', 'balance'])
expr = tble[tble['balance'] < 0]
colarray = into(np.ndarray, expr)
assert len(list(compute(expr))) == len(colarray)
def test_Data_attribute_repr():
t = Data(CSV(example('accounts-datetimes.csv')))
result = t.when.day
expected = pd.DataFrame({'when_day': [1, 2, 3, 4, 5]})
assert repr(result) == repr(expected)
def test_can_trivially_create_csv_Data():
Data(example('iris.csv'))
# in context
with Data(example('iris.csv')) as d:
assert d is not None
def test_can_trivially_create_csv_Data_with_unicode():
if sys.version[0] == '2':
assert isinstance(Data(example(u'iris.csv')).data, CSV)
def test_can_trivially_create_sqlite_table():
pytest.importorskip('sqlalchemy')
Data('sqlite:///'+example('iris.db')+'::iris')
# in context
with Data('sqlite:///'+example('iris.db')+'::iris') as d:
assert d is not None
@pytest.mark.xfail(sys.platform != 'darwin', reason="h5py/pytables mismatch")
@pytest.mark.skipif(sys.version_info[:2] == (3, 4) and sys.platform == 'win32',
reason='PyTables + Windows + Python 3.4 crashes')
def test_can_trivially_create_pytables():
pytest.importorskip('tables')
with Data(example('accounts.h5')+'::/accounts') as d:
assert d is not None
def test_data_passes_kwargs_to_resource():
assert Data(example('iris.csv'), encoding='ascii').data.encoding == 'ascii'
def test_data_on_iterator_refies_data():
data = [1, 2, 3]
d = Data(iter(data))
assert into(list, d) == data
assert into(list, d) == data
# in context
with Data(iter(data)) as d:
assert d is not None
def test_Data_on_json_is_concrete():
d = Data(example('accounts-streaming.json'))
assert compute(d.amount.sum()) == 100 - 200 + 300 + 400 - 500
assert compute(d.amount.sum()) == 100 - 200 + 300 + 400 - 500
def test_repr_on_nd_array_doesnt_err():
d = Data(np.ones((2, 2, 2)))
repr(d + 1)
def test_generator_reprs_concretely():
x = [1, 2, 3, 4, 5, 6]
d = Data(x)
expr = d[d > 2] + 1
assert '4' in repr(expr)
def test_incompatible_types():
d = Data(pd.DataFrame(L, columns=['id', 'name', 'amount']))
with pytest.raises(ValueError):
d.id == 'foo'
result = compute(d.id == 3)
expected = pd.Series([False, False, True, False, False], name='id')
tm.assert_series_equal(result, expected)
def test___array__():
x = np.ones(4)
d = Data(x)
assert (np.array(d + 1) == x + 1).all()
d = Data(x[:2])
x[2:] = d + 1
assert x.tolist() == [1, 1, 2, 2]
def test_python_scalar_protocols():
d = Data(1)
assert int(d + 1) == 2
assert float(d + 1.0) == 2.0
assert bool(d > 0) is True
assert complex(d + 1.0j) == 1 + 1.0j
def test_iter():
x = np.ones(4)
d = Data(x)
assert list(d + 1) == [2, 2, 2, 2]
@pytest.mark.xfail(
reason="DataFrame constructor doesn't yet support __array__"
)
def test_DataFrame():
x = np.array([(1, 2), (1., 2.)], dtype=[('a', 'i4'), ('b', 'f4')])
d = Data(x)
assert isinstance(pd.DataFrame(d), pd.DataFrame)
def test_head_compute():
data = tm.makeMixedDataFrame()
t = symbol('t', discover(data))
db = into('sqlite:///:memory:::t', data, dshape=t.dshape)
n = 2
d = Data(db)
# skip the header and the ... at the end of the repr
expr = d.head(n)
s = repr(expr)
assert '...' not in s
result = s.split('\n')[1:]
assert len(result) == n
def test_scalar_sql_compute():
t = into('sqlite:///:memory:::t', data,
dshape=dshape('var * {name: string, amount: int}'))
d = Data(t)
assert repr(d.amount.sum()) == '300'
def test_no_name_for_simple_data():
d = Data([1, 2, 3])
assert repr(d) == ' \n0 1\n1 2\n2 3'
assert not d._name
d = Data(1)
assert not d._name
assert repr(d) == '1'
def test_coerce_date_and_datetime():
x = datetime.datetime.now().date()
d = Data(x)
assert repr(d) == repr(x)
x = datetime.datetime.now()
d = Data(x)
assert repr(d) == repr(x)
def test_highly_nested_repr():
data = [[0, [[1, 2], [3]], 'abc']]
d = Data(data)
assert 'abc' in repr(d.head())
def test_asarray_fails_on_different_column_names():
vs = {'first': [2., 5., 3.],
'second': [4., 1., 4.],
'third': [6., 4., 3.]}
df = pd.DataFrame(vs)
with pytest.raises(ValueError):
Data(df, fields=list('abc'))
def test_data_does_not_accept_columns_kwarg():
with pytest.raises(ValueError):
Data([(1, 2), (3, 4)], columns=list('ab'))
def test_functions_as_bound_methods():
"""
Test that all functions on an InteractiveSymbol are instance methods
of that object.
"""
# Filter out __class__ and friends that are special, these can be
# callables without being instance methods.
callable_attrs = filter(
callable,
(getattr(t, a, None) for a in dir(t) if not a.startswith('__')),
)
for attr in callable_attrs:
assert isinstance(attr, MethodType)
# Make sure this is bound to the correct object.
assert attr.__self__ is t
| {
"content_hash": "1beadb826e02c28f946a1afca05be7a6",
"timestamp": "",
"source": "github",
"line_count": 409,
"max_line_length": 94,
"avg_line_length": 25.278728606356967,
"alnum_prop": 0.581971177096431,
"repo_name": "caseyclements/blaze",
"id": "fb36543b58b6665f37ac252369c4f4902d30c254",
"size": "10339",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "blaze/tests/test_interactive.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "37"
},
{
"name": "Python",
"bytes": "753339"
},
{
"name": "Shell",
"bytes": "35"
}
],
"symlink_target": ""
} |
#ifndef TENSORFLOW_TSL_UTIL_TENSOR_BUNDLE_BYTE_SWAP_ARRAY_H_
#define TENSORFLOW_TSL_UTIL_TENSOR_BUNDLE_BYTE_SWAP_ARRAY_H_
#include "tensorflow/tsl/platform/byte_order.h"
#include "tensorflow/tsl/platform/errors.h"
#include "tensorflow/tsl/platform/status.h"
// Define basic byte swapping operations.
// These operations must be macros to use compiler intrinsics.
// Note that the code here is written for portability, not speed. Byte swapping
// only happens when importing a checkpoint from one hardware architecture onto
// a different architecture. If these operations become part of a fast path,
// then the function ByteSwapArray() below should be rewritten to use
// architecture-appropriate SIMD instructions that swap multiple words at once.
#if defined(__linux__)
// Use the Gnu byte swap macros when available. See bswap(3) for more info.
#include <byteswap.h>
#define BYTE_SWAP_16(x) bswap_16(x)
#define BYTE_SWAP_32(x) bswap_32(x)
#define BYTE_SWAP_64(x) bswap_64(x)
#elif defined(PLATFORM_WINDOWS)
// On windows, byte-swapping is in winsock.h, and winsock2.h has a version of
// of htonl that can byte-swap 64-bit values.
#include <winsock2.h>
#define BYTE_SWAP_16(x) htons(x)
#define BYTE_SWAP_32(x) htonl(x)
// At the moment the 64-bit and 128-bit byte-swapping routines in Winsock2 are
// disabled in TensorFlow's standard Windows build environment, so we use
// htonl() instead of "#define BYTE_SWAP_64(x) htonll (x)".
#define BYTE_SWAP_64(x) \
((uint64_t(htonl((x)&0x00000000ffffffffUL)) << 32) | \
(htonl(((x)&0xffffffff00000000UL) >> 32)))
#elif __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
// On non-Linux, non-Windows, but little-endian, environments, use htonl/s,
// which byte-swap when the host byte order is little-endian. POSIX doesn't
// define a 64-bit version of these library functions, so we roll our own.
#include <arpa/inet.h>
#define BYTE_SWAP_16(x) htons(x)
#define BYTE_SWAP_32(x) htonl(x)
#define BYTE_SWAP_64(x) \
((uint64_t(htonl((x)&0x00000000ffffffffUL)) << 32) | \
(htonl(((x)&0xffffffff00000000UL) >> 32)))
#else // not defined(__linux__) and not defined(PLATFORM_WINDOWS)
// and (__BYTE_ORDER__ != __ORDER_LITTLE_ENDIAN__)
// Fall back on a non-optimized implementation on other big-endian targets.
// This code swaps one byte at a time and is probably an order of magnitude
// slower.
#define BYTE_SWAP_16(x) ((((x)&0x00ff) << 8) | (((x)&0xff00) >> 8))
#define BYTE_SWAP_32(x) \
((((x)&0x000000ffU) << 24) | (((x)&0x0000ff00U) << 8) | \
(((x)&0x00ff0000U) >> 8) | (((x)&0xff000000U) >> 24))
#define BYTE_SWAP_64(x) \
((((x)&0x00000000000000ffUL) << 56) | (((x)&0x000000000000ff00UL) << 40) | \
(((x)&0x0000000000ff0000UL) << 24) | (((x)&0x00000000ff000000UL) << 8) | \
(((x)&0x000000ff00000000UL) >> 8) | (((x)&0x0000ff0000000000UL) >> 24) | \
(((x)&0x00ff000000000000UL) >> 40) | (((x)&0xff00000000000000UL) >> 56))
#endif // defined(__linux__)
namespace tsl {
// Byte-swap an entire array of atomic C/C++ types in place.
//
// Note: When calling this function on arrays of std::complex<> types,
// multiply the number of elements by 2 and divide the bytes per element by 2.
//
// Args:
// array: Pointer to the beginning of the array
// bytes_per_elem: Number of bytes in each element of the array
// array_len: Number of elements in the array
//
// Returns: OkStatus() on success, -1 otherwise
//
Status ByteSwapArray(char *array, size_t bytes_per_elem, int array_len);
} // namespace tsl
#endif // TENSORFLOW_TSL_UTIL_TENSOR_BUNDLE_BYTE_SWAP_ARRAY_H_
| {
"content_hash": "a7305315c718f2bba321c2da30c73871",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 79,
"avg_line_length": 40.76923076923077,
"alnum_prop": 0.667654986522911,
"repo_name": "tensorflow/tensorflow-experimental_link_static_libraries_once",
"id": "22aadc2b66313aa52342e264704a2743795994d1",
"size": "4374",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/tsl/util/tensor_bundle/byte_swap_array.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "36962"
},
{
"name": "C",
"bytes": "1343737"
},
{
"name": "C#",
"bytes": "13584"
},
{
"name": "C++",
"bytes": "123969891"
},
{
"name": "CMake",
"bytes": "182027"
},
{
"name": "Cython",
"bytes": "5003"
},
{
"name": "Dockerfile",
"bytes": "416070"
},
{
"name": "Go",
"bytes": "2095490"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "1074471"
},
{
"name": "Jupyter Notebook",
"bytes": "789401"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "11067751"
},
{
"name": "Makefile",
"bytes": "2760"
},
{
"name": "Objective-C",
"bytes": "169288"
},
{
"name": "Objective-C++",
"bytes": "294177"
},
{
"name": "Pawn",
"bytes": "5552"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "42585406"
},
{
"name": "Roff",
"bytes": "5034"
},
{
"name": "Ruby",
"bytes": "9199"
},
{
"name": "Shell",
"bytes": "620507"
},
{
"name": "Smarty",
"bytes": "89545"
},
{
"name": "SourcePawn",
"bytes": "14577"
},
{
"name": "Starlark",
"bytes": "7486225"
},
{
"name": "Swift",
"bytes": "78435"
},
{
"name": "Vim Snippet",
"bytes": "58"
}
],
"symlink_target": ""
} |
{-# LANGUAGE GADTs #-}
-- It's not clear whether this one should succeed or fail,
-- Arguably it should succeed because the type refinement on
-- T1 should make (y::Int). Currently, though, it fails.
module ShouldFail where
data T a where
T1 :: Int -> T Int
f :: (T a, a) -> Int
f ~(T1 x, y) = x+y
| {
"content_hash": "7a6da6084acf6c559a2d301c54ab2e46",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 60,
"avg_line_length": 21.857142857142858,
"alnum_prop": 0.6503267973856209,
"repo_name": "sgillespie/ghc",
"id": "44fae2fa10a26d8a47262d769dca278b73d7845d",
"size": "306",
"binary": false,
"copies": "27",
"ref": "refs/heads/master",
"path": "testsuite/tests/gadt/lazypatok.hs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "8740"
},
{
"name": "Batchfile",
"bytes": "394"
},
{
"name": "C",
"bytes": "2652174"
},
{
"name": "C++",
"bytes": "36821"
},
{
"name": "CSS",
"bytes": "984"
},
{
"name": "DTrace",
"bytes": "3887"
},
{
"name": "Emacs Lisp",
"bytes": "734"
},
{
"name": "Game Maker Language",
"bytes": "14164"
},
{
"name": "Gnuplot",
"bytes": "103851"
},
{
"name": "Groff",
"bytes": "3840"
},
{
"name": "HTML",
"bytes": "6144"
},
{
"name": "Haskell",
"bytes": "20217515"
},
{
"name": "Haxe",
"bytes": "218"
},
{
"name": "Logos",
"bytes": "128668"
},
{
"name": "M4",
"bytes": "52384"
},
{
"name": "Makefile",
"bytes": "547097"
},
{
"name": "Objective-C",
"bytes": "19639"
},
{
"name": "Objective-C++",
"bytes": "535"
},
{
"name": "Pascal",
"bytes": "114241"
},
{
"name": "Perl",
"bytes": "23150"
},
{
"name": "Perl6",
"bytes": "42973"
},
{
"name": "PostScript",
"bytes": "63"
},
{
"name": "Python",
"bytes": "120983"
},
{
"name": "Shell",
"bytes": "77815"
},
{
"name": "TeX",
"bytes": "667"
},
{
"name": "Terra",
"bytes": "418503"
},
{
"name": "Yacc",
"bytes": "62812"
}
],
"symlink_target": ""
} |
package org.eigengo.monitor.output.dtrace;
import com.sun.tracing.ProbeName;
import com.sun.tracing.ProviderName;
import com.sun.tracing.dtrace.FunctionName;
@ProviderName("akka")
public interface DtraceCounterProvider extends com.sun.tracing.Provider {
@FunctionName("Receive execution time")
@ProbeName("execution-time")
void executionTime(String name, int length, int duration);
@FunctionName("All counters")
@ProbeName("all-counters")
void counter(String name, int length, int delta);
@FunctionName("All gauges")
@ProbeName("all-gauges")
void gauge(String name, int length, int value);
} | {
"content_hash": "ea4bab2a1b8a4a4c4df3371b115388a1",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 73,
"avg_line_length": 30.095238095238095,
"alnum_prop": 0.7420886075949367,
"repo_name": "eigengo/monitor",
"id": "ffd180da7de3f5eac3e1f91c5ed8f7f4844001f3",
"size": "1235",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "output-dtrace/src/main/java/org/eigengo/monitor/output/dtrace/DtraceCounterProvider.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AspectJ",
"bytes": "30758"
},
{
"name": "D",
"bytes": "188"
},
{
"name": "Java",
"bytes": "24594"
},
{
"name": "Python",
"bytes": "9864"
},
{
"name": "Scala",
"bytes": "118212"
}
],
"symlink_target": ""
} |
<?php
/**
* Service Module
* @author Zsolt Erdélyi <[email protected]>
* @author ideApp <[email protected]>
* @author Bálint Horváth <[email protected]>
*/
namespace Franklin\Company;
class Service extends \Franklin\System\Object{
public $Id;
public $Name;
public $Status;
public $Icon;
public $Title;
public $Lead;
public $Article;
public $TimeCreated;
public $TimeModified;
public $TimeRemoved;
public $UserTouched;
public function __construct($Parent) {
parent::__construct($Parent);
$this->Status = new \Franklin\Basic\Status($this);
$this->UserTouched = new \Franklin\User\User($this);
}
}
| {
"content_hash": "5b7d8686368af21b3b8ced6e9037c10c",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 60,
"avg_line_length": 21.40625,
"alnum_prop": 0.635036496350365,
"repo_name": "snett/Franklin",
"id": "654c164e6d181bb228b54b19ba5e0978a5eee4f7",
"size": "688",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Modules/Company/Service.php",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "101"
},
{
"name": "PHP",
"bytes": "299670"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<Terminology xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="..\TerminologyData.xsd">
<!--Set the preferred designation type-->
<CodeSystem>
<Action>none</Action>
<Name>VHAT</Name>
<Version>
<Append>true</Append>
<Name>Authoring Version</Name>
<CodedConcepts>
<CodedConcept>
<Action>update</Action>
<Code>-505</Code> <!-- concept VUID here -->
<VUID>-505</VUID>
<Active>true</Active>
</CodedConcept>
</CodedConcepts>
</Version>
</CodeSystem>
</Terminology>
| {
"content_hash": "90763ac2226ef406aaf4d1709f1a903d",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 122,
"avg_line_length": 37.15,
"alnum_prop": 0.5195154777927322,
"repo_name": "OSEHRA/ISAAC",
"id": "74fbc51feb61773b77d7b7e60c1fd24d46ef772b",
"size": "743",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "integration/tests/src/test/resources/xml/set1/ReactivateConcept.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AppleScript",
"bytes": "1499"
},
{
"name": "CSS",
"bytes": "80251"
},
{
"name": "HTML",
"bytes": "35085"
},
{
"name": "Java",
"bytes": "12855254"
},
{
"name": "Swift",
"bytes": "98"
},
{
"name": "XSLT",
"bytes": "362"
}
],
"symlink_target": ""
} |
package org.apache.camel.component.microprofile.metrics.route.policy;
import org.apache.camel.Route;
import org.eclipse.microprofile.metrics.Tag;
import static org.apache.camel.component.microprofile.metrics.MicroProfileMetricsConstants.CAMEL_CONTEXT_TAG;
import static org.apache.camel.component.microprofile.metrics.MicroProfileMetricsConstants.DEFAULT_CAMEL_ROUTE_POLICY_METRIC_NAME;
import static org.apache.camel.component.microprofile.metrics.MicroProfileMetricsConstants.ROUTE_ID_TAG;
public interface MicroProfileMetricsRoutePolicyNamingStrategy {
MicroProfileMetricsRoutePolicyNamingStrategy DEFAULT = route -> DEFAULT_CAMEL_ROUTE_POLICY_METRIC_NAME;
String getName(Route route);
default Tag[] getTags(Route route) {
return new Tag[] {
new Tag(CAMEL_CONTEXT_TAG, route.getCamelContext().getName()),
new Tag(ROUTE_ID_TAG, route.getId()),
};
}
}
| {
"content_hash": "2dfc4a711b57410825b7ff1e24030d6d",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 130,
"avg_line_length": 39.82608695652174,
"alnum_prop": 0.7762008733624454,
"repo_name": "zregvart/camel",
"id": "9b6cdf1daadc3f184492282a8e1b9f171de505fb",
"size": "1718",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "components/camel-microprofile-metrics/src/main/java/org/apache/camel/component/microprofile/metrics/route/policy/MicroProfileMetricsRoutePolicyNamingStrategy.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Apex",
"bytes": "6521"
},
{
"name": "Batchfile",
"bytes": "2353"
},
{
"name": "CSS",
"bytes": "5472"
},
{
"name": "Elm",
"bytes": "10852"
},
{
"name": "FreeMarker",
"bytes": "8015"
},
{
"name": "Groovy",
"bytes": "20938"
},
{
"name": "HTML",
"bytes": "914791"
},
{
"name": "Java",
"bytes": "90321137"
},
{
"name": "JavaScript",
"bytes": "101298"
},
{
"name": "RobotFramework",
"bytes": "8461"
},
{
"name": "Shell",
"bytes": "11165"
},
{
"name": "TSQL",
"bytes": "28835"
},
{
"name": "Tcl",
"bytes": "4974"
},
{
"name": "Thrift",
"bytes": "6979"
},
{
"name": "XQuery",
"bytes": "546"
},
{
"name": "XSLT",
"bytes": "280849"
}
],
"symlink_target": ""
} |
package org.openapitools.model;
import java.net.URI;
import java.util.Objects;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import org.openapitools.jackson.nullable.JsonNullable;
import java.time.OffsetDateTime;
import javax.validation.Valid;
import javax.validation.constraints.*;
import io.swagger.v3.oas.annotations.media.Schema;
import java.util.*;
import javax.annotation.Generated;
/**
* ResponseTimeMonitorData
*/
@Generated(value = "org.openapitools.codegen.languages.SpringCodegen", date = "2022-06-04T08:12:04.098807Z[Etc/UTC]")
public class ResponseTimeMonitorData {
@JsonProperty("_class")
private String propertyClass;
@JsonProperty("timestamp")
private Integer timestamp;
@JsonProperty("average")
private Integer average;
public ResponseTimeMonitorData propertyClass(String propertyClass) {
this.propertyClass = propertyClass;
return this;
}
/**
* Get propertyClass
* @return propertyClass
*/
@Schema(name = "_class", required = false)
public String getPropertyClass() {
return propertyClass;
}
public void setPropertyClass(String propertyClass) {
this.propertyClass = propertyClass;
}
public ResponseTimeMonitorData timestamp(Integer timestamp) {
this.timestamp = timestamp;
return this;
}
/**
* Get timestamp
* @return timestamp
*/
@Schema(name = "timestamp", required = false)
public Integer getTimestamp() {
return timestamp;
}
public void setTimestamp(Integer timestamp) {
this.timestamp = timestamp;
}
public ResponseTimeMonitorData average(Integer average) {
this.average = average;
return this;
}
/**
* Get average
* @return average
*/
@Schema(name = "average", required = false)
public Integer getAverage() {
return average;
}
public void setAverage(Integer average) {
this.average = average;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ResponseTimeMonitorData responseTimeMonitorData = (ResponseTimeMonitorData) o;
return Objects.equals(this.propertyClass, responseTimeMonitorData.propertyClass) &&
Objects.equals(this.timestamp, responseTimeMonitorData.timestamp) &&
Objects.equals(this.average, responseTimeMonitorData.average);
}
@Override
public int hashCode() {
return Objects.hash(propertyClass, timestamp, average);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class ResponseTimeMonitorData {\n");
sb.append(" propertyClass: ").append(toIndentedString(propertyClass)).append("\n");
sb.append(" timestamp: ").append(toIndentedString(timestamp)).append("\n");
sb.append(" average: ").append(toIndentedString(average)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| {
"content_hash": "507f56462325014039cef7e45ea7fc87",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 117,
"avg_line_length": 24.793893129770993,
"alnum_prop": 0.6939655172413793,
"repo_name": "cliffano/swaggy-jenkins",
"id": "9954092839fe4aeed1f1752caacb2cda99b3c68f",
"size": "3248",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "clients/spring/generated/src/main/java/org/openapitools/model/ResponseTimeMonitorData.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ada",
"bytes": "569823"
},
{
"name": "Apex",
"bytes": "741346"
},
{
"name": "Batchfile",
"bytes": "14792"
},
{
"name": "C",
"bytes": "971274"
},
{
"name": "C#",
"bytes": "5131336"
},
{
"name": "C++",
"bytes": "7799032"
},
{
"name": "CMake",
"bytes": "20609"
},
{
"name": "CSS",
"bytes": "4873"
},
{
"name": "Clojure",
"bytes": "129018"
},
{
"name": "Crystal",
"bytes": "864941"
},
{
"name": "Dart",
"bytes": "876777"
},
{
"name": "Dockerfile",
"bytes": "7385"
},
{
"name": "Eiffel",
"bytes": "424642"
},
{
"name": "Elixir",
"bytes": "139252"
},
{
"name": "Elm",
"bytes": "187067"
},
{
"name": "Emacs Lisp",
"bytes": "191"
},
{
"name": "Erlang",
"bytes": "373074"
},
{
"name": "F#",
"bytes": "556012"
},
{
"name": "Gherkin",
"bytes": "951"
},
{
"name": "Go",
"bytes": "345227"
},
{
"name": "Groovy",
"bytes": "89524"
},
{
"name": "HTML",
"bytes": "2367424"
},
{
"name": "Haskell",
"bytes": "680841"
},
{
"name": "Java",
"bytes": "12164874"
},
{
"name": "JavaScript",
"bytes": "1959006"
},
{
"name": "Kotlin",
"bytes": "1280953"
},
{
"name": "Lua",
"bytes": "322316"
},
{
"name": "Makefile",
"bytes": "11882"
},
{
"name": "Nim",
"bytes": "65818"
},
{
"name": "OCaml",
"bytes": "94665"
},
{
"name": "Objective-C",
"bytes": "464903"
},
{
"name": "PHP",
"bytes": "4383673"
},
{
"name": "Perl",
"bytes": "743304"
},
{
"name": "PowerShell",
"bytes": "678274"
},
{
"name": "Python",
"bytes": "5529523"
},
{
"name": "QMake",
"bytes": "6915"
},
{
"name": "R",
"bytes": "840841"
},
{
"name": "Raku",
"bytes": "10945"
},
{
"name": "Ruby",
"bytes": "328360"
},
{
"name": "Rust",
"bytes": "1735375"
},
{
"name": "Scala",
"bytes": "1387368"
},
{
"name": "Shell",
"bytes": "407167"
},
{
"name": "Swift",
"bytes": "342562"
},
{
"name": "TypeScript",
"bytes": "3060093"
}
],
"symlink_target": ""
} |
package com.sun.jini.test.impl.start;
import java.util.logging.Level;
// com.sun.jini
import com.sun.jini.qa.harness.TestException;
import com.sun.jini.qa.harness.ActivatableServiceStarterAdmin;
import com.sun.jini.qa.harness.QAConfig;
// java.rmi
import java.rmi.RemoteException;
/**
* This test ensures that different service instances in the same VM properly
* load classes from their respective classpaths and the classpath of a
* common class loader.
* <p>
* Given two activated service instances, each with different classpaths,
* each instance is asked to:
* <ul>
* <li>load a class that is only in it's classpath (should pass)
* <li>load a class that is only in the other instance's classpath (should
* fail)
* <li>load a class that is in the classpath of each instance as well
* as in the classpath of a common class loader to both instances
* (should pass)
* <li>get and set a public static variable
* </ul>
* <p>
* The results are returned to the test.
* <p>
* This test expects:
* <ul>
* <li>com.sun.jini.test.impl.start.TestServiceDummyClass0
* to be defined in a common class loader of both service instances
* <li>com.sun.jini.test.impl.start.TestServiceDummyClass2
* to be defined only in the class loader of one service instance
* <li>com.sun.jini.test.impl.start.TestServiceDummyClass3
* to be defined only in the class loader of the other service instance
* </ul>
*/
public class ClasspathTest extends AbstractStartBaseTest {
// javadoc inherited from super class
public void run() throws Exception {
// start test services
String propertyKey = "com.sun.jini.test.impl.start.ClasspathTest";
TestService service1 = null;
TestService service2 = null;
logger.log(Level.FINE, "activating test service 1");
service1 =
(TestService) manager.startService(propertyKey + "1");
logger.log(Level.FINE, "activating test service 2");
service2 =
(TestService) manager.startService(propertyKey + "2");
ActivatableServiceStarterAdmin admin1 =
(ActivatableServiceStarterAdmin) manager.getAdmin(service1);
ActivatableServiceStarterAdmin admin2 =
(ActivatableServiceStarterAdmin) manager.getAdmin(service2);
if (!admin1.getGroupID().equals(admin2.getGroupID())) {
throw new TestException("Test services have different "
+ "ActivationGroupIDs which means that services are not "
+ "being run in a shared VM");
}
// load a class exclusive to the test service; should pass
loadClass("2",service1,"1"); //service 1: load dummy class 2
loadClass("3",service2,"2"); //service 2: load dummy class 3
// load a class exclusive to the *other* test service; should fail
try {
loadClass("3",service1,"1"); //service 1: load dummy class 3
throw new TestException("test service 1 was able "
+ "to load a class from the classpath of test service 2");
} catch (ClassNotFoundException ignore) {
// should occur
}
try {
loadClass("2",service2,"2"); //service 2: load dummy class 2
throw new TestException("test service 2 was able "
+ "to load a class from the classpath of test service 1");
} catch (ClassNotFoundException ignore) {
// should occur
}
// load a class common to test services and a common class loader
loadClass("0",service1,"1"); //service 1: load dummy class 0
loadClass("0",service2,"2"); //service 2: load dummy class 0
// set and get static variable of common class
int setValue = -1;
logger.log(Level.FINE, "setting static variable "
+ "common to both test services to: " + setValue);
service1.setCommonStaticVariable(setValue);
int getValue = service2.getCommonStaticVariable();
if (getValue != setValue) {
throw new TestException("test service 1 set the "
+ "common static variable to " + setValue
+ " but test service 2 got the common static variable "
+ getValue + "; these values should match");
}
setValue = 3000;
logger.log(Level.FINE, "setting static variable "
+ "common to both test services to: " + setValue);
service2.setCommonStaticVariable(setValue);
getValue = service1.getCommonStaticVariable();
if (getValue != setValue) {
throw new TestException("test service 2 set the "
+ "common static variable to " + setValue
+ " but test service 1 got the common static variable "
+ getValue + "; these values should match");
}
// set and get static variable local to each test service
setValue = 100;
logger.log(Level.FINE, "setting static variable "
+ "local to test service 1: " + setValue);
service1.setLocalStaticVariable(setValue);
getValue = service2.getLocalStaticVariable();
if (getValue == setValue) {
throw new TestException("test service 1 set its "
+ "local static variable to " + setValue
+ " and test service 2 got its local static variable "
+ getValue + "; these values should *not* match");
}
setValue = -40;
logger.log(Level.FINE, "setting static variable "
+ "local to test service 2: " + setValue);
service2.setLocalStaticVariable(setValue);
getValue = service1.getLocalStaticVariable();
if (getValue == setValue) {
throw new TestException("test service 2 set its "
+ "local static variable to " + setValue
+ " and test service 1 got its local static variable "
+ getValue + "; these values should *not* match");
}
return;
}
private Object loadClass(String dummyClass, // dummy class to load
TestService service, // service to load dummy class
String serviceNumber) // service name for doc purpose
throws Exception
{
String classToLoad =
"com.sun.jini.test.impl.start.TestServiceDummyClass"
+ dummyClass;
logger.log(Level.FINE, "attempting to load "
+ classToLoad + " from test service " + serviceNumber);
return service.loadClass(classToLoad);
}
}
| {
"content_hash": "0907e88da50b032ebff9e8082f4d404b",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 79,
"avg_line_length": 42.56410256410256,
"alnum_prop": 0.6219879518072289,
"repo_name": "trasukg/river-qa-2.2",
"id": "8ef5f791868582e589778aee2ee5b0237d4c71c9",
"size": "7446",
"binary": false,
"copies": "2",
"ref": "refs/heads/2.2",
"path": "qa/src/com/sun/jini/test/impl/start/ClasspathTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2047"
},
{
"name": "Groovy",
"bytes": "17991"
},
{
"name": "Java",
"bytes": "21668373"
},
{
"name": "Shell",
"bytes": "117675"
}
],
"symlink_target": ""
} |
import logging
from django.conf import settings
from django.utils.translation import gettext_lazy as _
import horizon
LOG = logging.getLogger(__name__)
class Aggregates(horizon.Panel):
name = _("Host Aggregates")
slug = 'aggregates'
policy_rules = (("compute", "compute_extension:aggregates"),)
permissions = ('openstack.services.compute',)
def allowed(self, context):
if (('compute' in settings.SYSTEM_SCOPE_SERVICES) !=
bool(context['request'].user.system_scoped)):
return False
return super().allowed(context)
| {
"content_hash": "b5e316e24f82b5830896c181c817a470",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 65,
"avg_line_length": 26.545454545454547,
"alnum_prop": 0.666095890410959,
"repo_name": "openstack/horizon",
"id": "315bef2e05ab8c784e54b1a0c016ec21d93bb342",
"size": "1157",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "openstack_dashboard/dashboards/admin/aggregates/panel.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "583449"
},
{
"name": "JavaScript",
"bytes": "2585531"
},
{
"name": "Python",
"bytes": "5370605"
},
{
"name": "SCSS",
"bytes": "133237"
},
{
"name": "Shell",
"bytes": "6526"
}
],
"symlink_target": ""
} |
class MetricsServiceTest : public InProcessBrowserTest {
public:
virtual void SetUpCommandLine(CommandLine* command_line) OVERRIDE {
// Enable the metrics service for testing (in recording-only mode).
command_line->AppendSwitch(switches::kMetricsRecordingOnly);
}
// Open a couple of tabs of random content.
void OpenTabs() {
const int kBrowserTestFlags =
ui_test_utils::BROWSER_TEST_WAIT_FOR_TAB |
ui_test_utils::BROWSER_TEST_WAIT_FOR_NAVIGATION;
FilePath test_directory;
ASSERT_TRUE(PathService::Get(chrome::DIR_TEST_DATA, &test_directory));
FilePath page1_path = test_directory.AppendASCII("title2.html");
ui_test_utils::NavigateToURLWithDisposition(
browser(),
net::FilePathToFileURL(page1_path),
NEW_FOREGROUND_TAB,
kBrowserTestFlags);
FilePath page2_path = test_directory.AppendASCII("iframe.html");
ui_test_utils::NavigateToURLWithDisposition(
browser(),
net::FilePathToFileURL(page2_path),
NEW_FOREGROUND_TAB,
kBrowserTestFlags);
}
};
IN_PROC_BROWSER_TEST_F(MetricsServiceTest, CloseRenderersNormally) {
OpenTabs();
// Verify that the expected stability metrics were recorded.
const PrefService* prefs = g_browser_process->local_state();
EXPECT_EQ(1, prefs->GetInteger(prefs::kStabilityLaunchCount));
EXPECT_EQ(3, prefs->GetInteger(prefs::kStabilityPageLoadCount));
EXPECT_EQ(0, prefs->GetInteger(prefs::kStabilityRendererCrashCount));
// TODO(isherman): We should also verify that prefs::kStabilityExitedCleanly
// is set to true, but this preference isn't set until the browser
// exits... it's not clear to me how to test that.
}
// Flaky on Linux. See http://crbug.com/131094
#if defined(OS_LINUX)
#define MAYBE_CrashRenderers DISABLED_CrashRenderers
#else
#define MAYBE_CrashRenderers CrashRenderers
#endif
IN_PROC_BROWSER_TEST_F(MetricsServiceTest, MAYBE_CrashRenderers) {
OpenTabs();
// Kill the process for one of the tabs.
content::WindowedNotificationObserver observer(
content::NOTIFICATION_RENDERER_PROCESS_CLOSED,
content::NotificationService::AllSources());
ui_test_utils::NavigateToURL(browser(), GURL(chrome::kChromeUICrashURL));
observer.Wait();
// The MetricsService listens for the same notification, so the |observer|
// might finish waiting before the MetricsService has a chance to process the
// notification. To avoid racing here, we repeatedly run the message loop
// until the MetricsService catches up. This should happen "real soon now",
// since the notification is posted to all observers essentially
// simultaneously... so busy waiting here shouldn't be too bad.
const PrefService* prefs = g_browser_process->local_state();
while (!prefs->GetInteger(prefs::kStabilityRendererCrashCount)) {
ui_test_utils::RunAllPendingInMessageLoop();
}
// Verify that the expected stability metrics were recorded.
EXPECT_EQ(1, prefs->GetInteger(prefs::kStabilityLaunchCount));
EXPECT_EQ(4, prefs->GetInteger(prefs::kStabilityPageLoadCount));
EXPECT_EQ(1, prefs->GetInteger(prefs::kStabilityRendererCrashCount));
// TODO(isherman): We should also verify that prefs::kStabilityExitedCleanly
// is set to true, but this preference isn't set until the browser
// exits... it's not clear to me how to test that.
}
| {
"content_hash": "293d53f344fabd5512cd7b91b9712e4d",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 79,
"avg_line_length": 41.85,
"alnum_prop": 0.7365591397849462,
"repo_name": "keishi/chromium",
"id": "c58d810d3fff232cff7a71c18474f6b42e920e70",
"size": "4300",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chrome/browser/metrics/metrics_service_browsertest.cc",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "853"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "1172794"
},
{
"name": "C",
"bytes": "67452317"
},
{
"name": "C#",
"bytes": "1132"
},
{
"name": "C++",
"bytes": "132681259"
},
{
"name": "F#",
"bytes": "381"
},
{
"name": "Go",
"bytes": "19048"
},
{
"name": "Java",
"bytes": "361412"
},
{
"name": "JavaScript",
"bytes": "16603687"
},
{
"name": "Objective-C",
"bytes": "9609581"
},
{
"name": "PHP",
"bytes": "97796"
},
{
"name": "Perl",
"bytes": "918683"
},
{
"name": "Python",
"bytes": "6407891"
},
{
"name": "R",
"bytes": "524"
},
{
"name": "Shell",
"bytes": "4192593"
},
{
"name": "Tcl",
"bytes": "277077"
}
],
"symlink_target": ""
} |
package org.eclipse.flux.jdt.services;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.flux.client.IMessageHandler;
import org.eclipse.flux.client.MessageConnector;
import org.eclipse.flux.client.MessageHandler;
import org.eclipse.jdt.core.CompletionContext;
import org.eclipse.jdt.core.CompletionProposal;
import org.eclipse.jdt.core.CompletionRequestor;
import org.eclipse.jdt.core.ICompilationUnit;
import org.eclipse.jdt.core.JavaModelException;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
/**
* Handles Java content assist requests coming from message bus by invoking JDT content assist engine.
* @author Martin Lippert
*/
public class ContentAssistService {
private LiveEditUnits liveEditUnits;
private MessageConnector messagingConnector;
private IMessageHandler contentAssistRequestHandler;
public ContentAssistService(MessageConnector messagingConnector, LiveEditUnits liveEditUnits) {
this.messagingConnector = messagingConnector;
this.liveEditUnits = liveEditUnits;
this.contentAssistRequestHandler = new MessageHandler("contentassistrequest") {
@Override
public void handle(String messageType, JSONObject message) {
handleContentAssistRequest(message);
}
};
messagingConnector.addMessageHandler(this.contentAssistRequestHandler);
}
protected void handleContentAssistRequest(JSONObject message) {
try {
String username = message.getString("username");
String projectName = message.getString("project");
String resourcePath = message.getString("resource");
int callbackID = message.getInt("callback_id");
String liveEditID = projectName + "/" + resourcePath;
if (liveEditUnits.isLiveEditResource(username, liveEditID)) {
int offset = message.getInt("offset");
String prefix = message.optString("prefix");
String sender = message.getString("requestSenderID");
JSONObject responseMessage = new JSONObject();
responseMessage.put("username", username);
responseMessage.put("project", projectName);
responseMessage.put("resource", resourcePath);
responseMessage.put("callback_id", callbackID);
responseMessage.put("requestSenderID", sender);
responseMessage.put("proposals", computeContentAssist(username, liveEditID, offset, prefix));
messagingConnector.send("contentassistresponse", responseMessage);
}
} catch (Exception e) {
e.printStackTrace();
}
}
protected JSONArray computeContentAssist(String username, String resourcePath, int offset, String prefix) throws JSONException {
final List<CompletionProposal> proposals = new ArrayList<CompletionProposal>();
final CompletionContext[] completionContextParam = new CompletionContext[] { null };
ICompilationUnit liveEditUnit = liveEditUnits.getLiveEditUnit(username, resourcePath);
try {
if (liveEditUnit != null) {
CompletionRequestor collector = new CompletionRequestor() {
@Override
public void accept(CompletionProposal proposal) {
proposals.add(proposal);
}
@Override
public void acceptContext(CompletionContext context) {
super.acceptContext(context);
completionContextParam[0] = context;
}
};
// Allow completions for unresolved types - since 3.3
collector.setAllowsRequiredProposals(CompletionProposal.FIELD_REF, CompletionProposal.TYPE_REF, true);
collector.setAllowsRequiredProposals(CompletionProposal.FIELD_REF, CompletionProposal.TYPE_IMPORT, true);
collector.setAllowsRequiredProposals(CompletionProposal.FIELD_REF, CompletionProposal.FIELD_IMPORT, true);
collector.setAllowsRequiredProposals(CompletionProposal.METHOD_REF, CompletionProposal.TYPE_REF, true);
collector.setAllowsRequiredProposals(CompletionProposal.METHOD_REF, CompletionProposal.TYPE_IMPORT, true);
collector.setAllowsRequiredProposals(CompletionProposal.METHOD_REF, CompletionProposal.METHOD_IMPORT, true);
collector.setAllowsRequiredProposals(CompletionProposal.CONSTRUCTOR_INVOCATION, CompletionProposal.TYPE_REF, true);
collector.setAllowsRequiredProposals(CompletionProposal.ANONYMOUS_CLASS_CONSTRUCTOR_INVOCATION, CompletionProposal.TYPE_REF, true);
collector.setAllowsRequiredProposals(CompletionProposal.ANONYMOUS_CLASS_DECLARATION, CompletionProposal.TYPE_REF, true);
collector.setAllowsRequiredProposals(CompletionProposal.TYPE_REF, CompletionProposal.TYPE_REF, true);
liveEditUnit.codeComplete(offset, collector, new NullProgressMonitor());
}
} catch (JavaModelException e) {
e.printStackTrace();
}
List<JSONObject> jsonProposals = new ArrayList<JSONObject>(proposals.size());
CompletionContext completionContext = completionContextParam[0];
for (CompletionProposal proposal : proposals) {
JSONObject jsonDescription = getDescription(proposal, completionContext);
ProposalReplcamentInfo replacementInfo = new CompletionProposalReplacementProvider(liveEditUnit, proposal, completionContext, offset, prefix).createReplacement();
JSONObject jsonProposal = new JSONObject();
jsonProposal.put("description", jsonDescription);
jsonProposal.put("proposal", replacementInfo.replacement);
int initOffset = offset - prefix.length();
if (replacementInfo.extraChanges != null) {
jsonProposal.put("additionalEdits", Utils.editsToJsonArray(replacementInfo.extraChanges));
initOffset += Utils.getOffsetAdjustment(replacementInfo.extraChanges, initOffset);
}
if (replacementInfo.positions != null && !replacementInfo.positions.isEmpty()) {
jsonProposal.put("positions", getPositions(replacementInfo.positions, initOffset));
}
jsonProposal.put("escapePosition", initOffset + replacementInfo.replacement.length());
jsonProposal.put("style", "attributedString");
jsonProposal.put("replace", true);
jsonProposal.put("relevance", proposal.getRelevance());
jsonProposals.add(jsonProposal);
}
Collections.sort(jsonProposals, new Comparator<JSONObject>() {
@Override
public int compare(JSONObject o1, JSONObject o2) {
try {
int diff = o2.getInt("relevance") - o1.getInt("relevance");
if (diff == 0) {
JSONArray nameDescription1 = o1.getJSONObject("description").getJSONArray("segments");
JSONArray nameDescription2 = o2.getJSONObject("description").getJSONArray("segments");
StringBuilder nameBuffer1 = new StringBuilder();
for (int i = 0; i < nameDescription1.length(); i++) {
nameBuffer1.append(nameDescription1.getJSONObject(i).getString("value"));
}
StringBuilder nameBuffer2 = new StringBuilder();
for (int i = 0; i < nameDescription2.length(); i++) {
nameBuffer2.append(nameDescription2.getJSONObject(i).getString("value"));
}
return nameBuffer1.toString().compareTo(nameBuffer2.toString());
} else {
return diff;
}
} catch (JSONException e) {
return -1;
}
}
});
return new JSONArray(jsonProposals);
}
private JSONArray getPositions(List<Integer> positionsList, int initOffset) throws JSONException {
if (positionsList != null && positionsList.size() % 2 == 0) {
JSONArray jsonPositions = new JSONArray();
for (int i = 0; i < positionsList.size(); i += 2) {
JSONObject position = new JSONObject();
position.put("offset", positionsList.get(i) + initOffset);
position.put("length", positionsList.get(i + 1));
jsonPositions.put(position);
}
return jsonPositions;
} else {
return null;
}
}
protected JSONObject getDescription(CompletionProposal proposal, CompletionContext context) throws JSONException {
CompletionProposalDescriptionProvider provider = new CompletionProposalDescriptionProvider(context);
JSONObject description = new JSONObject();
/*
* Add icon field for now. Possibly needs to be moved to a client side
*/
if (proposal.getKind() == CompletionProposal.METHOD_REF) {
JSONObject src = new JSONObject();
src.put("src", "../js/editor/textview/methpub_obj.gif");
description.put("icon", src);
} else if (proposal.getKind() == CompletionProposal.FIELD_REF) {
JSONObject src = new JSONObject();
src.put("src", "../js/editor/textview/field_public_obj.gif");
description.put("icon", src);
} else if (proposal.getKind() == CompletionProposal.TYPE_REF) {
JSONObject src = new JSONObject();
src.put("src", "../js/editor/textview/class_obj.gif");
description.put("icon", src);
}
description.put("segments", new JSONArray(provider.createDescription(proposal).toString()));
description.put("metadata", new JSONObject(provider.createMetadata(proposal)));
return description;
}
public void dispose() {
messagingConnector.removeMessageHandler(contentAssistRequestHandler);
}
} | {
"content_hash": "db071713f3ae70abebfaf7b1d02f98d0",
"timestamp": "",
"source": "github",
"line_count": 215,
"max_line_length": 165,
"avg_line_length": 41.21860465116279,
"alnum_prop": 0.7531031369893929,
"repo_name": "fjodorver/flux",
"id": "e6ba0a66fa718617c80f768ab93613b02a0d999d",
"size": "9460",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "org.eclipse.flux.jdt.service/src/org/eclipse/flux/jdt/services/ContentAssistService.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "35425"
},
{
"name": "HTML",
"bytes": "17936"
},
{
"name": "Java",
"bytes": "1851531"
},
{
"name": "JavaScript",
"bytes": "1713841"
}
],
"symlink_target": ""
} |
<?php
/**
* Webservice main controller
*
* @category Mage
* @package Mage_Api
* @author Magento Core Team <[email protected]>
*/
class Mage_Api_IndexController extends Mage_Api_Controller_Action
{
public function indexAction()
{
/* @var $server Mage_Api_Model_Server */
$this->_getServer()->init($this)
->run();
}
} // Class Mage_Api_IndexController End
| {
"content_hash": "3fd8f5f4f328cc8b6b9e35f64c0a90b4",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 65,
"avg_line_length": 22,
"alnum_prop": 0.6196172248803827,
"repo_name": "melvyn-sopacua/magedepot",
"id": "39753e5924bbd6b3034872c160b2ee04a6cb4fd6",
"size": "1370",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "magento/app/code/core/Mage/Api/controllers/IndexController.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "PHP",
"bytes": "25548654"
},
{
"name": "Shell",
"bytes": "642"
}
],
"symlink_target": ""
} |
module Balladina
class Board
include Celluloid
include Celluloid::Logger
include Celluloid::Notifications
def initialize(options = {})
@name = SecureRandom.hex
@creates_tracks = options.fetch(:creates_tracks) { Track }
@creates_coordinators = options.fetch(:creates_coordinators) { TrackCoordinator }
@tracks = Hamster.set
@ready_ids = Hamster.set
@engineer = Engineer.new_link(Actor.current)
end
attr_reader :tracks, :ready_ids, :creates_tracks, :creates_coordinators, :engineer, :name
private :tracks, :ready_ids, :creates_tracks, :creates_coordinators, :engineer
def add_track(track_id, control_socket, data_socket)
track = creates_tracks.new(track_id, data_socket)
@tracks = (tracks << track)
create_track_coordinator control_socket, track
broadcast_online
track
end
def remove_track(track)
@tracks = Hamster.set(*tracks.to_a.delete_if { |t|
t.id == track.id
})
@ready_ids = ready_ids.delete(track.id)
broadcast_online
broadcast_ready
track.terminate
end
def notify_ready(track)
@ready_ids = ready_ids << track.id
broadcast_ready
end
def broadcast_ready
publish "peers_ready", ready_ids.to_a
end
def broadcast_online
publish "peers_online", tracks.map(&:id).to_a
end
def start_recording
publish "start_recording"
end
def stop_recording
publish "stop_recording"
end
def mixdown
mixdown_ready_tracks = tracks.map { |t| t.future.prepare_mixdown }
engineer.mixdown mixdown_ready_tracks
end
def mixdown_ready(public_path)
publish "download_mixdown", File.basename(public_path)
end
private
def create_track_coordinator(control_socket, track)
creates_coordinators.new(control_socket, track, Actor.current)
end
end
end
| {
"content_hash": "057f0c6338e79047d526a07552c2184f",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 93,
"avg_line_length": 26.32894736842105,
"alnum_prop": 0.6276861569215393,
"repo_name": "dodecaphonic/balladina",
"id": "d8fe0adb080851489a79d50c6b465aa2bf1b589e",
"size": "2001",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/balladina/board.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "23316"
},
{
"name": "JavaScript",
"bytes": "14075"
},
{
"name": "Ruby",
"bytes": "23798"
}
],
"symlink_target": ""
} |
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html lang="en-us" xml:lang="en-us">
<head>
<meta content="text/html; charset=utf-8" http-equiv="Content-Type" />
<meta name="copyright" content="(C) Copyright 2005" />
<meta name="DC.rights.owner" content="(C) Copyright 2005" />
<meta content="public" name="security" />
<meta content="index,follow" name="Robots" />
<meta http-equiv="PICS-Label" content='(PICS-1.1 "http://www.icra.org/ratingsv02.html" l gen true r (cz 1 lz 1 nz 1 oz 1 vz 1) "http://www.rsac.org/ratingsv01.html" l gen true r (n 0 s 0 v 0 l 0) "http://www.classify.org/safesurf/" l gen true r (SS~~000 1))' />
<meta content="reference" name="DC.Type" />
<meta name="DC.Title" content="derby.system.home" />
<meta content="derby.system.home" name="DC.subject" />
<meta content="derby.system.home" name="keywords" />
<meta scheme="URI" name="DC.Relation" content="crefproper22250.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperbuiltinalgorithm.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperauthdn.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperauthpw.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper26978.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper37341.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper13766.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper25581.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper27467.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper24846.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper81405.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper25025.html" />
<meta scheme="URI" name="DC.Relation" content="rrefpropernoautoboot.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper24390.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper39325.html" />
<meta scheme="URI" name="DC.Relation" content="rrefpropersqlauth.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper13217.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperxatrantimeout.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper43414.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper43517.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper10607.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper23835.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper40346.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper98166.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper46141.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperlogbuffersize.html" />
<meta scheme="URI" name="DC.Relation" content="rrefpropermaxlogshippinginterval.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperminlogshippinginterval.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperverbose.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper27529.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperstormin.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper81359.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper28026.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper40688.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperrowlocking.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper34037.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper33027.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper18151.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper35028.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper26985.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperbootall.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperdurability.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper27355.html" />
<meta content="XHTML" name="DC.Format" />
<meta content="rrefproper32066" name="DC.Identifier" />
<meta content="en-us" name="DC.Language" />
<link href="commonltr.css" type="text/css" rel="stylesheet" />
<title>derby.system.home</title>
</head>
<body id="rrefproper32066"><a name="rrefproper32066"><!-- --></a>
<h1 class="topictitle1">derby.system.home</h1>
<div>
<div class="section"><h4 class="sectiontitle">Function</h4>
<p>Specifies the <span>Derby</span> system
directory, which is the directory that contains subdirectories holding databases
that you create and the text file <em>derby.properties</em>.</p>
<p>If the system directory that you specify with <em>derby.system.home</em> does
not exist at startup, <span>Derby</span>
creates the directory automatically.</p>
</div>
<div class="section"><h4 class="sectiontitle">Default</h4>
<p>Current directory (the value of the JVM system property <em>user.dir</em>).</p>
<p>If you do not explicitly set the <em>derby.system.home</em> property when
starting <span>Derby</span>, the default
is the directory in which
<span>Derby</span> was started.</p>
<div class="note"><span class="notetitle">Note: </span>You should always explicitly set the value of
<em>derby.system.home</em>.</div>
</div>
<div class="example"><h4 class="sectiontitle">Example</h4>
<pre><strong>-Dderby.system.home=C:\<span>derby</span></strong></pre>
</div>
<div class="section"><h4 class="sectiontitle">Dynamic or static</h4>
<p>This property is static; if you change it while
<span>Derby</span> is running, the change
does not take effect until you reboot.</p>
</div>
</div>
<div>
<div class="familylinks">
<div class="parentlink"><strong>Parent topic:</strong> <a href="crefproper22250.html" title="The Derby properties are used for configuring the system and database, as well as for diagnostics such as logging statements, and monitoring and tracing locks.">Derby properties</a></div>
</div>
<div class="relref"><strong>Related reference</strong><br />
<div><a href="rrefproperbuiltinalgorithm.html" title="">derby.authentication.builtin.algorithm</a></div>
<div><a href="rrefproperauthdn.html" title="">derby.authentication.ldap.searchAuthDN</a></div>
<div><a href="rrefproperauthpw.html" title="">derby.authentication.ldap.searchAuthPW</a></div>
<div><a href="rrefproper26978.html" title="">derby.authentication.ldap.searchBase</a></div>
<div><a href="rrefproper37341.html" title="">derby.authentication.ldap.searchFilter</a></div>
<div><a href="rrefproper13766.html" title="">derby.authentication.provider</a></div>
<div><a href="rrefproper25581.html" title="">derby.authentication.server</a></div>
<div><a href="rrefproper27467.html" title="">derby.connection.requireAuthentication</a></div>
<div><a href="rrefproper24846.html" title="">derby.database.defaultConnectionMode</a></div>
<div><a href="rrefproper81405.html" title="">derby.database.forceDatabaseLock</a></div>
<div><a href="rrefproper25025.html" title="">derby.database.fullAccessUsers</a></div>
<div><a href="rrefpropernoautoboot.html" title="">derby.database.noAutoBoot</a></div>
<div><a href="rrefproper24390.html" title="">derby.database.propertiesOnly</a></div>
<div><a href="rrefproper39325.html" title="">derby.database.readOnlyAccessUsers</a></div>
<div><a href="rrefpropersqlauth.html" title="">derby.database.sqlAuthorization</a></div>
<div><a href="rrefproper13217.html" title="">derby.infolog.append</a></div>
<div><a href="rrefproperxatrantimeout.html" title="">derby.jdbc.xaTransactionTimeout</a></div>
<div><a href="rrefproper43414.html" title="">derby.language.logQueryPlan</a></div>
<div><a href="rrefproper43517.html" title="">derby.language.logStatementText</a></div>
<div><a href="rrefproper10607.html" title="">derby.locks.deadlockTimeout</a></div>
<div><a href="rrefproper23835.html" title="">derby.locks.deadlockTrace</a></div>
<div><a href="rrefproper40346.html" title="">derby.locks.escalationThreshold</a></div>
<div><a href="rrefproper98166.html" title="">derby.locks.monitor</a></div>
<div><a href="rrefproper46141.html" title="">derby.locks.waitTimeout</a></div>
<div><a href="rrefproperlogbuffersize.html" title="">derby.replication.logBufferSize</a></div>
<div><a href="rrefpropermaxlogshippinginterval.html" title="">derby.replication.maxLogShippingInterval</a></div>
<div><a href="rrefproperminlogshippinginterval.html" title="">derby.replication.minLogShippingInterval</a></div>
<div><a href="rrefproperverbose.html" title="">derby.replication.verbose</a></div>
<div><a href="rrefproper27529.html" title="">derby.storage.initialPages</a></div>
<div><a href="rrefproperstormin.html" title="">derby.storage.minimumRecordSize</a></div>
<div><a href="rrefproper81359.html" title="">derby.storage.pageCacheSize</a></div>
<div><a href="rrefproper28026.html" title="">derby.storage.pageReservedSpace</a></div>
<div><a href="rrefproper40688.html" title="">derby.storage.pageSize</a></div>
<div><a href="rrefproperrowlocking.html" title="">derby.storage.rowLocking</a></div>
<div><a href="rrefproper34037.html" title="">derby.storage.tempDirectory</a></div>
<div><a href="rrefproper33027.html" title="">derby.stream.error.field</a></div>
<div><a href="rrefproper18151.html" title="">derby.stream.error.file</a></div>
<div><a href="rrefproper35028.html" title="">derby.stream.error.method</a></div>
<div><a href="rrefproper26985.html" title="">derby.stream.error.logSeverityLevel</a></div>
<div><a href="rrefproperbootall.html" title="">derby.system.bootAll</a></div>
<div><a href="rrefproperdurability.html" title="">derby.system.durability</a></div>
<div><a href="rrefproper27355.html" title="">derby.user.UserName</a></div>
</div>
</div>
</body>
</html> | {
"content_hash": "074c7e043247e17b75e1b6f524b7f049",
"timestamp": "",
"source": "github",
"line_count": 172,
"max_line_length": 280,
"avg_line_length": 62.58139534883721,
"alnum_prop": 0.7262170196952805,
"repo_name": "OpenVnmrJ/ovjTools",
"id": "652038552384eb424df8e3041d55fe6764b937e6",
"size": "10764",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "jdk1.6.0_39_64/db/docs/html/ref/rrefproper32066.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "14629"
},
{
"name": "C",
"bytes": "3175236"
},
{
"name": "C#",
"bytes": "5792"
},
{
"name": "C++",
"bytes": "1011240"
},
{
"name": "CSS",
"bytes": "96602"
},
{
"name": "Forth",
"bytes": "156961"
},
{
"name": "GLSL",
"bytes": "2258"
},
{
"name": "GSC",
"bytes": "114234"
},
{
"name": "Gnuplot",
"bytes": "1586"
},
{
"name": "HTML",
"bytes": "13204966"
},
{
"name": "Java",
"bytes": "401038"
},
{
"name": "JavaScript",
"bytes": "12584"
},
{
"name": "LiveScript",
"bytes": "2142"
},
{
"name": "Makefile",
"bytes": "155781"
},
{
"name": "PLpgSQL",
"bytes": "294832"
},
{
"name": "PostScript",
"bytes": "9546"
},
{
"name": "Prolog",
"bytes": "15309"
},
{
"name": "Python",
"bytes": "1721"
},
{
"name": "R",
"bytes": "2164"
},
{
"name": "Roff",
"bytes": "1355417"
},
{
"name": "Shell",
"bytes": "158809"
},
{
"name": "Tcl",
"bytes": "1208407"
}
],
"symlink_target": ""
} |
namespace v8 {
namespace internal {
namespace compiler {
enum class GraphReducer::State : uint8_t {
kUnvisited,
kRevisit,
kOnStack,
kVisited
};
void Reducer::Finalize() {}
GraphReducer::GraphReducer(Zone* zone, Graph* graph, TickCounter* tick_counter,
Node* dead)
: graph_(graph),
dead_(dead),
state_(graph, 4),
reducers_(zone),
revisit_(zone),
stack_(zone),
tick_counter_(tick_counter) {
if (dead != nullptr) {
NodeProperties::SetType(dead_, Type::None());
}
}
GraphReducer::~GraphReducer() = default;
void GraphReducer::AddReducer(Reducer* reducer) {
reducers_.push_back(reducer);
}
void GraphReducer::ReduceNode(Node* node) {
DCHECK(stack_.empty());
DCHECK(revisit_.empty());
Push(node);
for (;;) {
if (!stack_.empty()) {
// Process the node on the top of the stack, potentially pushing more or
// popping the node off the stack.
ReduceTop();
} else if (!revisit_.empty()) {
// If the stack becomes empty, revisit any nodes in the revisit queue.
Node* const node = revisit_.front();
revisit_.pop();
if (state_.Get(node) == State::kRevisit) {
// state can change while in queue.
Push(node);
}
} else {
// Run all finalizers.
for (Reducer* const reducer : reducers_) reducer->Finalize();
// Check if we have new nodes to revisit.
if (revisit_.empty()) break;
}
}
DCHECK(revisit_.empty());
DCHECK(stack_.empty());
}
void GraphReducer::ReduceGraph() { ReduceNode(graph()->end()); }
Reduction GraphReducer::Reduce(Node* const node) {
auto skip = reducers_.end();
for (auto i = reducers_.begin(); i != reducers_.end();) {
if (i != skip) {
tick_counter_->DoTick();
Reduction reduction = (*i)->Reduce(node);
if (!reduction.Changed()) {
// No change from this reducer.
} else if (reduction.replacement() == node) {
// {replacement} == {node} represents an in-place reduction. Rerun
// all the other reducers for this node, as now there may be more
// opportunities for reduction.
if (FLAG_trace_turbo_reduction) {
AllowHandleDereference allow_deref;
StdoutStream{} << "- In-place update of #" << *node << " by reducer "
<< (*i)->reducer_name() << std::endl;
}
skip = i;
i = reducers_.begin();
continue;
} else {
// {node} was replaced by another node.
if (FLAG_trace_turbo_reduction) {
AllowHandleDereference allow_deref;
StdoutStream{} << "- Replacement of #" << *node << " with #"
<< *(reduction.replacement()) << " by reducer "
<< (*i)->reducer_name() << std::endl;
}
return reduction;
}
}
++i;
}
if (skip == reducers_.end()) {
// No change from any reducer.
return Reducer::NoChange();
}
// At least one reducer did some in-place reduction.
return Reducer::Changed(node);
}
void GraphReducer::ReduceTop() {
NodeState& entry = stack_.top();
Node* node = entry.node;
DCHECK_EQ(State::kOnStack, state_.Get(node));
if (node->IsDead()) return Pop(); // Node was killed while on stack.
Node::Inputs node_inputs = node->inputs();
// Recurse on an input if necessary.
int start = entry.input_index < node_inputs.count() ? entry.input_index : 0;
for (int i = start; i < node_inputs.count(); ++i) {
Node* input = node_inputs[i];
if (input != node && Recurse(input)) {
entry.input_index = i + 1;
return;
}
}
for (int i = 0; i < start; ++i) {
Node* input = node_inputs[i];
if (input != node && Recurse(input)) {
entry.input_index = i + 1;
return;
}
}
// Remember the max node id before reduction.
NodeId const max_id = static_cast<NodeId>(graph()->NodeCount() - 1);
// All inputs should be visited or on stack. Apply reductions to node.
Reduction reduction = Reduce(node);
// If there was no reduction, pop {node} and continue.
if (!reduction.Changed()) return Pop();
// Check if the reduction is an in-place update of the {node}.
Node* const replacement = reduction.replacement();
if (replacement == node) {
for (Node* const user : node->uses()) {
DCHECK_IMPLIES(user == node, state_.Get(node) != State::kVisited);
Revisit(user);
}
// In-place update of {node}, may need to recurse on an input.
Node::Inputs node_inputs = node->inputs();
for (int i = 0; i < node_inputs.count(); ++i) {
Node* input = node_inputs[i];
if (input != node && Recurse(input)) {
entry.input_index = i + 1;
return;
}
}
}
// After reducing the node, pop it off the stack.
Pop();
// Check if we have a new replacement.
if (replacement != node) {
Replace(node, replacement, max_id);
}
}
void GraphReducer::Replace(Node* node, Node* replacement) {
Replace(node, replacement, std::numeric_limits<NodeId>::max());
}
void GraphReducer::Replace(Node* node, Node* replacement, NodeId max_id) {
if (node == graph()->start()) graph()->SetStart(replacement);
if (node == graph()->end()) graph()->SetEnd(replacement);
if (replacement->id() <= max_id) {
// {replacement} is an old node, so unlink {node} and assume that
// {replacement} was already reduced and finish.
for (Edge edge : node->use_edges()) {
Node* const user = edge.from();
Verifier::VerifyEdgeInputReplacement(edge, replacement);
edge.UpdateTo(replacement);
// Don't revisit this node if it refers to itself.
if (user != node) Revisit(user);
}
node->Kill();
} else {
// Replace all old uses of {node} with {replacement}, but allow new nodes
// created by this reduction to use {node}.
for (Edge edge : node->use_edges()) {
Node* const user = edge.from();
if (user->id() <= max_id) {
edge.UpdateTo(replacement);
// Don't revisit this node if it refers to itself.
if (user != node) Revisit(user);
}
}
// Unlink {node} if it's no longer used.
if (node->uses().empty()) node->Kill();
// If there was a replacement, reduce it after popping {node}.
Recurse(replacement);
}
}
void GraphReducer::ReplaceWithValue(Node* node, Node* value, Node* effect,
Node* control) {
if (effect == nullptr && node->op()->EffectInputCount() > 0) {
effect = NodeProperties::GetEffectInput(node);
}
if (control == nullptr && node->op()->ControlInputCount() > 0) {
control = NodeProperties::GetControlInput(node);
}
// Requires distinguishing between value, effect and control edges.
for (Edge edge : node->use_edges()) {
Node* const user = edge.from();
DCHECK(!user->IsDead());
if (NodeProperties::IsControlEdge(edge)) {
if (user->opcode() == IrOpcode::kIfSuccess) {
Replace(user, control);
} else if (user->opcode() == IrOpcode::kIfException) {
DCHECK_NOT_NULL(dead_);
edge.UpdateTo(dead_);
Revisit(user);
} else {
DCHECK_NOT_NULL(control);
edge.UpdateTo(control);
Revisit(user);
}
} else if (NodeProperties::IsEffectEdge(edge)) {
DCHECK_NOT_NULL(effect);
edge.UpdateTo(effect);
Revisit(user);
} else {
DCHECK_NOT_NULL(value);
edge.UpdateTo(value);
Revisit(user);
}
}
}
void GraphReducer::Pop() {
Node* node = stack_.top().node;
state_.Set(node, State::kVisited);
stack_.pop();
}
void GraphReducer::Push(Node* const node) {
DCHECK_NE(State::kOnStack, state_.Get(node));
state_.Set(node, State::kOnStack);
stack_.push({node, 0});
}
bool GraphReducer::Recurse(Node* node) {
if (state_.Get(node) > State::kRevisit) return false;
Push(node);
return true;
}
void GraphReducer::Revisit(Node* node) {
if (state_.Get(node) == State::kVisited) {
state_.Set(node, State::kRevisit);
revisit_.push(node);
}
}
} // namespace compiler
} // namespace internal
} // namespace v8
| {
"content_hash": "4ebe1b66853886d4f7c291aab88c8be0",
"timestamp": "",
"source": "github",
"line_count": 283,
"max_line_length": 79,
"avg_line_length": 28.724381625441698,
"alnum_prop": 0.5937999753967278,
"repo_name": "endlessm/chromium-browser",
"id": "d9bc9d6c22b322133645badfc7848e5a79b4b41f",
"size": "8557",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "v8/src/compiler/graph-reducer.cc",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
basePath = '';
// https://github.com/karma-runner/karma/issues/481
// list of files / patterns to load in the browser
files = [
JASMINE,
JASMINE_ADAPTER,
REQUIRE,
REQUIRE_ADAPTER,
{pattern: 'app/components/jquery/jquery.js', included: false},
{pattern: 'app/components/requirejs/require.js', included: false},
{pattern: 'test/spec/*-spec.js', included: false},
// helpers & fixtures for jasmine-jquery
{ pattern: 'test/helpers/*.js', included: true },
'test/test-main.js',
];
// list of files to exclude
exclude = [];
// test results reporter to use
// possible values: 'dots', 'progress', 'junit'
reporters = ['progress'];
// web server port
port = 9876;
// cli runner port
runnerPort = 9100;
// enable / disable colors in the output (reporters and logs)
colors = true;
// level of logging
// possible values: LOG_DISABLE || LOG_ERROR || LOG_WARN || LOG_INFO || LOG_DEBUG
logLevel = LOG_INFO;
// enable / disable watching file and executing tests whenever any file changes
autoWatch = true;
// Start these browsers, currently available:
// - Chrome
// - ChromeCanary
// - Firefox
// - Opera
// - Safari (only Mac)
// - PhantomJS
// - IE (only Windows)
browsers = ['Chrome'];
// If browser does not capture in given timeout [ms], kill it
captureTimeout = 60000;
// Continuous Integration mode
// if true, it capture browsers, run tests and exit
singleRun = false;
| {
"content_hash": "837fc830c9e3101e03c029f8874bbb20",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 81,
"avg_line_length": 20.414285714285715,
"alnum_prop": 0.6710986703988804,
"repo_name": "goliatone/flatg-website",
"id": "289c9b8d109010d1832eec30943b2999e44abdf3",
"size": "1572",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "karma.conf.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "35493"
},
{
"name": "JavaScript",
"bytes": "153973"
},
{
"name": "PHP",
"bytes": "15181"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<title>range.rb</title>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<link rel="stylesheet" href="../../../../../../../../../../../../../../../css/reset.css" type="text/css" media="screen" />
<link rel="stylesheet" href="../../../../../../../../../../../../../../../css/main.css" type="text/css" media="screen" />
<link rel="stylesheet" href="../../../../../../../../../../../../../../../css/github.css" type="text/css" media="screen" />
<script src="../../../../../../../../../../../../../../../js/jquery-1.3.2.min.js" type="text/javascript" charset="utf-8"></script>
<script src="../../../../../../../../../../../../../../../js/jquery-effect.js" type="text/javascript" charset="utf-8"></script>
<script src="../../../../../../../../../../../../../../../js/main.js" type="text/javascript" charset="utf-8"></script>
<script src="../../../../../../../../../../../../../../../js/highlight.pack.js" type="text/javascript" charset="utf-8"></script>
</head>
<body>
<div class="banner">
<span>Ruby on Rails 4.2.1</span><br />
<h1>
range.rb
</h1>
<ul class="files">
<li>
../../../../.rvm/gems/ruby-2.2.2/gems/activerecord-4.2.1/lib/active_record/connection_adapters/postgresql/oid/range.rb
</li>
<li>Last modified: 2015-05-15 20:53:13 +0100</li>
</ul>
</div>
<div id="bodyContent">
<div id="content">
<!-- File only: requires -->
<div class="sectiontitle">Required Files</div>
<ul>
<li>active_support/core_ext/string/filters</li>
</ul>
<!-- Namespace -->
<div class="sectiontitle">Namespace</div>
<ul>
<li>
<span class="type">MODULE</span>
<a href="../../../../../../../../../../../../../../../classes/ActiveRecord.html">ActiveRecord</a>
</li>
<li>
<span class="type">MODULE</span>
<a href="../../../../../../../../../../../../../../../classes/ActiveRecord/ConnectionAdapters.html">ActiveRecord::ConnectionAdapters</a>
</li>
<li>
<span class="type">MODULE</span>
<a href="../../../../../../../../../../../../../../../classes/ActiveRecord/ConnectionAdapters/PostgreSQL.html">ActiveRecord::ConnectionAdapters::PostgreSQL</a>
</li>
<li>
<span class="type">MODULE</span>
<a href="../../../../../../../../../../../../../../../classes/ActiveRecord/ConnectionAdapters/PostgreSQL/OID.html">ActiveRecord::ConnectionAdapters::PostgreSQL::OID</a>
</li>
<li>
<span class="type">MODULE</span>
<a href="../../../../../../../../../../../../../../../classes/ActiveSupport.html">ActiveSupport</a>
</li>
</ul>
<!-- Methods -->
</div>
</div>
</body>
</html> | {
"content_hash": "49f2a7ea6846f381f278bbf128dd3430",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 178,
"avg_line_length": 28.86725663716814,
"alnum_prop": 0.4610668301655426,
"repo_name": "TomMulvaney/AudioFlow",
"id": "3a76b96149d3e1efe76043f9f1083fae52efc1dc",
"size": "3262",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doc/api/files/__/__/__/__/_rvm/gems/ruby-2_2_2/gems/activerecord-4_2_1/lib/active_record/connection_adapters/postgresql/oid/range_rb.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "60402"
},
{
"name": "CoffeeScript",
"bytes": "1073"
},
{
"name": "HTML",
"bytes": "9191"
},
{
"name": "JavaScript",
"bytes": "24013"
},
{
"name": "Ruby",
"bytes": "29957"
}
],
"symlink_target": ""
} |
module.exports = require('../config.js');
| {
"content_hash": "5a98807bbc141eba0ce7888b8e0ea316",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 41,
"avg_line_length": 42,
"alnum_prop": 0.6666666666666666,
"repo_name": "yoo2001818/shortly.me",
"id": "ae385b5604bf841e703b868e33443c534a240dd1",
"size": "42",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/config.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1529"
},
{
"name": "HTML",
"bytes": "1762"
},
{
"name": "JavaScript",
"bytes": "8319"
}
],
"symlink_target": ""
} |
<div id='content'>
<form name='signup' id='signup' action='index.php' method='POST'>
<input type='hidden' name='page' value='summary' />
<input type='hidden' name='own' value='<?php echo $experiment->owner; ?>' />
<input type='hidden' name='exp' value='<?php echo $experiment->id; ?>' />
<input type='hidden' name='name' value='<?php echo $_REQUEST['name']; ?>' />
<input type='hidden' name='email' value='<?php echo $_REQUEST['email']; ?>' />
<input type='hidden' name='phone' value='<?php echo $_REQUEST['phone']; ?>' />
<h2>Date and time</h2>
<?php if ($warning_message == True) {
echo "<div id='notification'><p>Sorry... someone else has just taken your chosen timeslot. Please choose another one.<p></div>"; }
else { echo '<p>Please select a suitable time when you are free to take part in this experiment.</p>'; } ?>
<?php if ($experiment->getPerSlot() > 1) {
echo "<p>If possible, please choose a slot that someone else has already signed up for (the slots highlighted in green).</p>"; } ?>
<?php $experiment->printCalendar(); ?>
<p><input type='submit' id='button' name='signup' value='Next' /></p>
</form>
</div>
| {
"content_hash": "16b7f43b8ff546d5580bead3882cd11e",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 137,
"avg_line_length": 47.8,
"alnum_prop": 0.6150627615062761,
"repo_name": "jwcarr/SimpleSignUp",
"id": "06fb944009182216421298eb727fbef241e78c85",
"size": "1195",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "html/calendar.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4181"
},
{
"name": "HTML",
"bytes": "43141"
},
{
"name": "JavaScript",
"bytes": "9874"
},
{
"name": "PHP",
"bytes": "87274"
}
],
"symlink_target": ""
} |
using RIAPP.DataService.Resources;
using RIAPP.DataService.Types;
using RIAPP.DataService.Utils;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Threading.Tasks;
namespace RIAPP.DataService
{
internal class ServiceOperationsHelper
{
private BaseDomainService _domainService;
private IServiceContainer _services;
public ServiceOperationsHelper(BaseDomainService domainService)
{
this._domainService = domainService;
this._services = domainService.ServiceContainer;
}
public void ApplyValues(object entity, RowInfo rowInfo, string path, ValueChange[] values, bool isOriginal)
{
DbSetInfo dbSetInfo = rowInfo.dbSetInfo;
var dataHelper = this._services.DataHelper;
Array.ForEach(values, (val) =>
{
string fullName = path + val.fieldName;
Field fieldInfo = dataHelper.getFieldInfo(dbSetInfo, fullName);
if (!fieldInfo.GetIsIncludeInResult())
return;
//Server Side calculated fields are never set on entities from updates
if (fieldInfo.fieldType == FieldType.ServerCalculated)
return;
if (fieldInfo.fieldType == FieldType.Object && val.nested != null)
{
this.ApplyValues(entity, rowInfo, fullName + '.', val.nested.ToArray(), isOriginal);
}
else
{
this.ApplyValue(entity, rowInfo, fullName, fieldInfo, val, isOriginal);
}
});
}
private void ApplyValue(object entity, RowInfo rowInfo, string fullName, Field fieldInfo, ValueChange val, bool isOriginal)
{
var dataHelper = this._services.DataHelper;
if (isOriginal)
{
if ((val.flags & ValueFlags.Setted) == ValueFlags.Setted)
dataHelper.SetFieldValue(entity, fullName, fieldInfo, val.orig);
}
else
{
switch (rowInfo.changeType)
{
case ChangeType.Deleted:
{
//For delete fill only original values
if ((val.flags & ValueFlags.Setted) == ValueFlags.Setted)
dataHelper.SetFieldValue(entity, fullName, fieldInfo, val.orig);
}
break;
case ChangeType.Added:
{
if (fieldInfo.isAutoGenerated)
return;
if ((val.flags & ValueFlags.Changed) == ValueFlags.Changed)
{
if (fieldInfo.isReadOnly && val.val != null && !fieldInfo.allowClientDefault)
{
throw new DomainServiceException(string.Format(ErrorStrings.ERR_PROPERTY_IS_READONLY, entity.GetType().Name, fieldInfo.fieldName));
}
if (fieldInfo.isAutoGenerated && val.val != null)
{
throw new DomainServiceException(string.Format(ErrorStrings.ERR_PROPERTY_IS_READONLY, entity.GetType().Name, fieldInfo.fieldName));
}
dataHelper.SetFieldValue(entity, fullName, fieldInfo, val.val);
}
}
break;
case ChangeType.Updated:
{
if ((val.flags & ValueFlags.Changed) == ValueFlags.Changed)
{
if (fieldInfo.isReadOnly || (fieldInfo.isPrimaryKey > 0 || fieldInfo.fieldType == FieldType.RowTimeStamp || fieldInfo.isAutoGenerated))
throw new DomainServiceException(string.Format(ErrorStrings.ERR_PROPERTY_IS_READONLY, entity.GetType().Name, fieldInfo.fieldName));
if (!fieldInfo.isNullable && val.val == null)
{
throw new DomainServiceException(string.Format(ErrorStrings.ERR_FIELD_IS_NOT_NULLABLE, fieldInfo.fieldName));
}
dataHelper.SetFieldValue(entity, fullName, fieldInfo, val.val);
}
else if ((val.flags & ValueFlags.Setted) == ValueFlags.Setted)
{
if ((fieldInfo.isPrimaryKey > 0 || fieldInfo.fieldType == FieldType.RowTimeStamp || fieldInfo.isNeedOriginal) && val.val != val.orig)
{
throw new DomainServiceException(string.Format(ErrorStrings.ERR_VAL_ORIGINAL_INVALID, fieldInfo.fieldName));
}
dataHelper.SetFieldValue(entity, fullName, fieldInfo, val.val);
}
}
break;
}
}
}
public void UpdateEntityFromRowInfo(object entity, RowInfo rowInfo, bool isOriginal)
{
DbSetInfo dbSetInfo = rowInfo.dbSetInfo;
var values = rowInfo.values;
this.ApplyValues(entity, rowInfo, "", rowInfo.values.ToArray(), isOriginal);
var dataHelper = this._services.DataHelper;
if (!isOriginal && rowInfo.changeType == ChangeType.Added)
{
foreach (var pn in rowInfo.changeState.ParentRows)
{
if (!dataHelper.SetValue(entity, pn.association.childToParentName, pn.ParentRow.changeState.Entity, false))
{
throw new DomainServiceException(string.Format(ErrorStrings.ERR_CAN_NOT_SET_PARENT_FIELD, pn.association.childToParentName, rowInfo.dbSetInfo.EntityType.Name));
}
}
}
}
public void UpdateValuesFromEntity(object entity, string path, DbSetInfo dbSetInfo, ValueChange[] values)
{
var dataHelper = this._services.DataHelper;
Array.ForEach(values, (val) =>
{
string fullName = path + val.fieldName;
Field fieldInfo = dataHelper.getFieldInfo(dbSetInfo, fullName);
if (!fieldInfo.GetIsIncludeInResult())
return;
if (fieldInfo.fieldType == FieldType.Object && val.nested != null)
{
this.UpdateValuesFromEntity(entity, fullName + '.', dbSetInfo, val.nested.ToArray());
}
else
{
val.val = dataHelper.SerializeField(entity, fullName, fieldInfo);
val.flags = val.flags | ValueFlags.Refreshed;
}
});
}
public void CheckValuesChanges(RowInfo rowInfo, string path, ValueChange[] values)
{
DbSetInfo dbSetInfo = rowInfo.dbSetInfo;
var dataHelper = this._services.DataHelper;
Array.ForEach(values, (val) =>
{
string fullName = path + val.fieldName;
Field fieldInfo = dataHelper.getFieldInfo(dbSetInfo, fullName);
if (!fieldInfo.GetIsIncludeInResult())
return;
if (fieldInfo.fieldType == FieldType.Object && val.nested != null)
{
this.CheckValuesChanges(rowInfo, fullName + '.', val.nested.ToArray());
}
else
{
string newVal;
if (this.isEntityValueChanged(rowInfo, fullName, fieldInfo, out newVal))
{
val.val = newVal;
val.flags = val.flags | ValueFlags.Refreshed;
}
}
});
}
public void UpdateRowInfoFromEntity(object entity, RowInfo rowInfo)
{
DbSetInfo dbSetInfo = rowInfo.dbSetInfo;
this.UpdateValuesFromEntity(entity, "", dbSetInfo, rowInfo.values.ToArray());
if (rowInfo.changeType == ChangeType.Added)
{
rowInfo.serverKey = rowInfo.GetRowKeyAsString();
}
}
public bool isEntityValueChanged(RowInfo rowInfo, string fullName, Field fieldInfo, out string newVal)
{
EntityChangeState changeState = rowInfo.changeState;
var dataHelper = this._services.DataHelper;
string oldVal = null;
newVal = dataHelper.SerializeField(changeState.Entity, fullName, fieldInfo);
if (changeState.OriginalEntity != null)
oldVal = dataHelper.SerializeField(changeState.OriginalEntity, fullName, fieldInfo);
return (newVal != oldVal);
}
public void UpdateRowInfoAfterUpdates(RowInfo rowInfo)
{
this.CheckValuesChanges(rowInfo, "", rowInfo.values.ToArray());
if (rowInfo.changeType == ChangeType.Added)
{
rowInfo.serverKey = rowInfo.GetRowKeyAsString();
}
}
public T GetOriginalEntity<T>(RowInfo rowInfo)
where T : class
{
if (rowInfo == null)
{
throw new DomainServiceException(ErrorStrings.ERR_METH_APPLY_INVALID);
}
return (T)rowInfo.changeState.OriginalEntity;
}
public object GetOriginalEntity(object entity, RowInfo rowInfo)
{
object dbEntity = Activator.CreateInstance(entity.GetType());
UpdateEntityFromRowInfo(dbEntity, rowInfo, true);
return dbEntity;
}
public T GetParentEntity<T>(RowInfo rowInfo)
where T : class
{
if (rowInfo == null)
{
throw new DomainServiceException(ErrorStrings.ERR_METH_APPLY_INVALID);
}
var parents = rowInfo.changeState.ParentRows;
if (parents.Length == 0)
return (T)null;
return (T)parents.Where(p => p.ParentRow.dbSetInfo.EntityType == typeof(T)).Select(p => p.ParentRow.changeState.Entity).FirstOrDefault();
}
public void InsertEntity(RowInfo rowInfo)
{
DbSetInfo dbSetInfo = rowInfo.dbSetInfo;
if (rowInfo.changeType != ChangeType.Added)
throw new DomainServiceException(string.Format(ErrorStrings.ERR_REC_CHANGETYPE_INVALID, dbSetInfo.EntityType.Name, rowInfo.changeType));
MethodInfo methInfo = dbSetInfo.getOperationMethodInfo(MethodType.Insert);
if (methInfo == null)
throw new DomainServiceException(string.Format(ErrorStrings.ERR_DB_INSERT_NOT_IMPLEMENTED, dbSetInfo.EntityType.Name, this.GetType().Name));
object dbEntity = Activator.CreateInstance(dbSetInfo.EntityType);
UpdateEntityFromRowInfo(dbEntity, rowInfo, false);
rowInfo.changeState.Entity = dbEntity;
methInfo.Invoke(this._domainService, new object[] { dbEntity });
}
public void UpdateEntity(RowInfo rowInfo)
{
DbSetInfo dbSetInfo = rowInfo.dbSetInfo;
if (rowInfo.changeType != ChangeType.Updated)
throw new DomainServiceException(string.Format(ErrorStrings.ERR_REC_CHANGETYPE_INVALID, dbSetInfo.EntityType.Name, rowInfo.changeType));
MethodInfo methInfo = dbSetInfo.getOperationMethodInfo(MethodType.Update);
if (methInfo == null)
throw new DomainServiceException(string.Format(ErrorStrings.ERR_DB_UPDATE_NOT_IMPLEMENTED, dbSetInfo.EntityType.Name, this.GetType().Name));
object dbEntity = Activator.CreateInstance(dbSetInfo.EntityType);
UpdateEntityFromRowInfo(dbEntity, rowInfo, false);
var original = this.GetOriginalEntity(dbEntity, rowInfo);
rowInfo.changeState.Entity = dbEntity;
rowInfo.changeState.OriginalEntity = original;
//apply this changes to entity that is in the database (this is done in user domain service method)
methInfo.Invoke(this._domainService, new object[] { dbEntity });
}
public void DeleteEntity(RowInfo rowInfo)
{
DbSetInfo dbSetInfo = rowInfo.dbSetInfo;
if (rowInfo.changeType != ChangeType.Deleted)
throw new DomainServiceException(string.Format(ErrorStrings.ERR_REC_CHANGETYPE_INVALID, dbSetInfo.EntityType.Name, rowInfo.changeType));
MethodInfo methInfo = dbSetInfo.getOperationMethodInfo(MethodType.Delete);
if (methInfo == null)
throw new DomainServiceException(string.Format(ErrorStrings.ERR_DB_DELETE_NOT_IMPLEMENTED, dbSetInfo.EntityType.Name, this.GetType().Name));
object dbEntity = Activator.CreateInstance(dbSetInfo.EntityType);
UpdateEntityFromRowInfo(dbEntity, rowInfo, true);
rowInfo.changeState.Entity = dbEntity;
rowInfo.changeState.OriginalEntity = dbEntity;
methInfo.Invoke(this._domainService, new object[] { dbEntity });
}
public async Task<bool> ValidateEntity(RowInfo rowInfo)
{
DbSetInfo dbSetInfo = rowInfo.dbSetInfo;
IEnumerable<ValidationErrorInfo> errs = null;
LinkedList<string> mustBeChecked = new LinkedList<string>();
LinkedList<string> skipCheckList = null;
var dataHelper = this._services.DataHelper;
var validationHelper = this._services.ValidationHelper;
if (rowInfo.changeType == ChangeType.Added)
{
skipCheckList = new LinkedList<string>();
foreach (var pn in rowInfo.changeState.ParentRows)
{
foreach (var frel in pn.association.fieldRels)
{
skipCheckList.AddLast(frel.childField);
}
}
}
foreach (var fieldInfo in dbSetInfo.fieldInfos)
{
dataHelper.ForEachFieldInfo("", fieldInfo, (string fullName, Field f) =>
{
if (!f.GetIsIncludeInResult())
return;
if (f.fieldType == FieldType.Object || f.fieldType == FieldType.ServerCalculated)
return;
string value = dataHelper.SerializeField(rowInfo.changeState.Entity, fullName, f);
if (rowInfo.changeType == ChangeType.Added)
{
bool isSkip = f.isAutoGenerated || (skipCheckList != null && skipCheckList.Any(n => n == fullName));
if (!isSkip)
{
validationHelper.CheckValue(f, value);
mustBeChecked.AddLast(fullName);
}
}
else if (rowInfo.changeType == ChangeType.Updated)
{
string newVal;
bool isChanged = isEntityValueChanged(rowInfo, fullName, f, out newVal);
if (isChanged)
{
validationHelper.CheckValue(f, newVal);
}
if (isChanged)
mustBeChecked.AddLast(fullName);
}
});
}
rowInfo.changeState.NamesOfChangedFields = mustBeChecked.ToArray();
MethodInfo methInfo = dbSetInfo.getOperationMethodInfo(MethodType.Validate);
if (methInfo != null)
{
var invokeRes = methInfo.Invoke(this._domainService, new object[] { rowInfo.changeState.Entity, rowInfo.changeState.NamesOfChangedFields });
errs = (IEnumerable<ValidationErrorInfo>)await GetMethodResult(invokeRes).ConfigureAwait(false);
}
if (errs != null && errs.Count() > 0)
{
rowInfo.changeState.ValidationErrors = errs.ToArray();
return false;
}
return true;
}
public static async Task<object> GetMethodResult(object invokeRes)
{
var typeInfo = invokeRes != null ? invokeRes.GetType() : null;
if (typeInfo != null && invokeRes is Task)
{
await ((Task)invokeRes).ConfigureAwait(false);
return typeInfo.GetProperty("Result").GetValue(invokeRes, null);
}
else
return invokeRes;
}
}
}
| {
"content_hash": "2f44b659ce836148d1083e0dd884ff71",
"timestamp": "",
"source": "github",
"line_count": 372,
"max_line_length": 184,
"avg_line_length": 46.07258064516129,
"alnum_prop": 0.5435556333508372,
"repo_name": "jmptrader/JRIAppTS",
"id": "d029016dc93f49e542e0f0c0f010ea22817da857",
"size": "17141",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "RIAppDemo/RIAPP.DataService/DomainService/ServiceOperationsHelper.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "100"
},
{
"name": "C#",
"bytes": "1174267"
},
{
"name": "CSS",
"bytes": "68929"
},
{
"name": "HTML",
"bytes": "287152"
},
{
"name": "JavaScript",
"bytes": "2938184"
},
{
"name": "PowerShell",
"bytes": "95100"
},
{
"name": "TypeScript",
"bytes": "1164619"
}
],
"symlink_target": ""
} |
using System;
namespace Syndll2.Data
{
public class ProgrammingStatus
{
private readonly ProgrammingOperationStatus _operationStatus;
private readonly char _operationType; // TODO: Need an enum for this
private readonly string _fileName;
private readonly char _terminalMode; // TODO: Need an enum for this
private readonly char _programmingModeState; // TODO: Need an enum for this
private readonly int _previousBlockNumber;
private readonly int _currentBlockNumber;
private readonly string _debugInfo;
public ProgrammingOperationStatus OperationStatus
{
get { return _operationStatus; }
}
public char OperationType
{
get { return _operationType; }
}
public string FileName
{
get { return _fileName; }
}
public char TableType
{
get { return TableId == -1 ? ' ' : _fileName[0]; }
}
public int TableId
{
get
{
int i;
if (_fileName != null && _fileName.Length == 4 && int.TryParse(_fileName.Substring(1), out i))
return i;
return -1;
}
}
public char TerminalMode
{
get { return _terminalMode; }
}
public char ProgrammingModeState
{
get { return _programmingModeState; }
}
public int PreviousBlockNumber
{
get { return _previousBlockNumber; }
}
public int CurrentBlockNumber
{
get { return _currentBlockNumber; }
}
public string DebugInfo
{
get { return _debugInfo; }
}
// Sample Raw Data C = command, T = terminal id, CCCC = CRC code
// Only the Data field should be passed in.
// (ACK)0MP1536PH000000L593<
// CCCCCT012345678901234CCCC
// 0 1
// Data Field Breakdown
// M P 1536 P H 00 00 00L (from halt)
// C N Run_ N R 00 00 00L (from run)
// W R V800 P 3 01 01 00L (from replace table)
// W D V800 P 2 41 41 00L (from delete table)
internal static ProgrammingStatus Parse(string data)
{
if (data == null)
throw new ArgumentNullException("data");
if (data.Length != 1 && data.Length != 15)
throw new ArgumentException(
string.Format(
"Program debug status data should be either {0} or {1} characters, and you passed {2} characters. " +
"Do not pass the command, terminal id, or CRC here.", 1, 15, data.Length),
"data");
return new ProgrammingStatus(data);
}
private ProgrammingStatus(string data)
{
_operationStatus = (ProgrammingOperationStatus) data[0];
if (data.Length == 1)
return;
_operationType = data[1];
_fileName = data.Substring(2, 4);
_terminalMode = data[6];
_programmingModeState = data[7];
_previousBlockNumber = SynelNumericFormat.Convert(data.Substring(8, 2));
_currentBlockNumber = SynelNumericFormat.Convert(data.Substring(10, 2));
_debugInfo = data.Substring(12, 3);
// Workaround for some terminals that don't always report the correct operation status code.
if (_fileName == "Run_") _operationStatus = ProgrammingOperationStatus.InRunMode;
}
internal ProgrammingStatus()
{
_operationStatus = ProgrammingOperationStatus.Unknown;
}
}
}
| {
"content_hash": "b046f09d8922be16c27e639f74035977",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 126,
"avg_line_length": 30.838709677419356,
"alnum_prop": 0.5366108786610879,
"repo_name": "synel/syndll2",
"id": "13ac572c6fc3848575d79f49605c4c65490e5361",
"size": "3826",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Syndll2/Data/ProgrammingStatus.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "309366"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "3d8f009ac95f83259b13315c0a904e00",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.307692307692308,
"alnum_prop": 0.6940298507462687,
"repo_name": "mdoering/backbone",
"id": "4c90743caac98f91587e6a1774fdd62e32b0a14e",
"size": "184",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Apiales/Apiaceae/Neopaulia/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using System.Xml;
using DocSite.Pages;
using DocSite.SiteModel;
namespace DocSite.Renderers
{
/// <summary>
/// Interface to be implemented by classes intending to render <see cref="IRenderable"/> classes.
/// </summary>
/// <seealso cref="HtmlRenderer"/>
/// <seealso cref="IRenderable"/>
public interface IRenderer
{
/// <summary>
/// Render a collection of <see cref="XmlNode"/>.
/// </summary>
/// <param name="nodes">The collection of nodes to render.</param>
/// <returns><see cref="String"/> - Returns the rendered string.</returns>
string RenderNodes(IEnumerable<XmlNode> nodes);
/// <summary>
/// Render a single <see cref="XmlNode"/>.
/// </summary>
/// <param name="node">The node to render.</param>
/// <returns><see cref="String"/> - Returns the rendered string.</returns>
string RenderNode(XmlNode node);
/// <summary>
/// Render a <see cref="Page"/>.
/// </summary>
/// <param name="page">The <see cref="Page"/> to render.</param>
/// <returns><see cref="String"/> - Returns the rendered string.</returns>
string RenderPage(Page page);
/// <summary>
/// Render a <see cref="Section"/>
/// </summary>
/// <param name="section">The <see cref="Section"/> to render.</param>
/// <returns><see cref="String"/> - Returns the rendered string.</returns>
string RenderSection(Section section);
/// <summary>
/// Render a <see cref="TableSection"/>.
/// </summary>
/// <param name="section">The <see cref="TableSection"/> to render.</param>
/// <returns><see cref="String"/> - Returns the rendered string.</returns>
string RenderTableSection(TableSection section);
/// <summary>
/// Render a <see cref="DefinitionsSection"/>.
/// </summary>
/// <param name="section">The <see cref="DefinitionsSection"/> to render.</param>
/// <returns><see cref="String"/> - Returns the rendered string.</returns>
string RenderDefinitionsSection(DefinitionsSection section);
/// <summary>
/// Render the <see cref="DocSiteModel"/> to <paramref name="outDir"/>.
/// </summary>
/// <param name="site">The site to render</param>
/// <param name="outDir">The output directory. Can be either relative or full path.</param>
void RenderSite(DocSiteModel site, string outDir);
}
}
| {
"content_hash": "0d19e7df96dd5296c237bd869bb30af7",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 101,
"avg_line_length": 39.32835820895522,
"alnum_prop": 0.5927893738140417,
"repo_name": "PaulTrampert/DocSite",
"id": "46cb97f86c6b35db551ee98b66ed258c3898f361",
"size": "2637",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "DocSite/Renderers/IRenderer.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "119206"
},
{
"name": "HTML",
"bytes": "2906"
}
],
"symlink_target": ""
} |
// Copyright (c) Microsoft Open Technologies, Inc. All rights reserved. See License.txt in the project root for license information.
namespace System.Web.OData
{
public class ReferenceDepthContext
{
int maxRefDepth;
int currentRefDepth = -1;
public ReferenceDepthContext(int maxRefDepth)
{
this.maxRefDepth = maxRefDepth;
}
public bool IncreamentCounter()
{
if (++currentRefDepth > this.maxRefDepth)
{
return false;
}
return true;
}
public void DecrementCounter()
{
--currentRefDepth;
}
}
}
| {
"content_hash": "d3154e009d9878576dcea152a5995040",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 133,
"avg_line_length": 22.833333333333332,
"alnum_prop": 0.5562043795620438,
"repo_name": "Terminator-Aaron/Katana",
"id": "f860acada5a82b196a1df272a7b4c8e0497e3975",
"size": "687",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "OData/test/System.Web.OData.Test/TestCommon/ReferenceDepthContext.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "10480"
},
{
"name": "C#",
"bytes": "17928568"
},
{
"name": "CSS",
"bytes": "19810"
},
{
"name": "HTML",
"bytes": "2554"
},
{
"name": "JavaScript",
"bytes": "11545"
},
{
"name": "PowerShell",
"bytes": "20946"
},
{
"name": "Shell",
"bytes": "12306"
},
{
"name": "Smalltalk",
"bytes": "50876"
},
{
"name": "Visual Basic",
"bytes": "157681"
}
],
"symlink_target": ""
} |
package com.tokenautocomplete;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.text.style.ReplacementSpan;
import android.view.View;
import android.view.ViewGroup;
/**
* Span that holds a view it draws when rendering
*
* Created on 2/3/15.
* @author mgod
*/
public class ViewSpan extends ReplacementSpan {
protected View view;
private int maxWidth;
public ViewSpan(View v, int maxWidth) {
super();
this.maxWidth = maxWidth;
view = v;
view.setLayoutParams(new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));
}
private void prepView() {
int widthSpec = View.MeasureSpec.makeMeasureSpec(maxWidth, View.MeasureSpec.AT_MOST);
int heightSpec = View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED);
view.measure(widthSpec, heightSpec);
view.layout(0, 0, view.getMeasuredWidth(), view.getMeasuredHeight());
}
public void draw(Canvas canvas, CharSequence text, int start, int end, float x, int top, int y, int bottom, Paint paint) {
prepView();
canvas.save();
//Centering the token looks like a better strategy that aligning the bottom
int padding = (bottom - top - view.getBottom()) / 2;
canvas.translate(x, bottom - view.getBottom() - padding);
view.draw(canvas);
canvas.restore();
}
public int getSize(Paint paint, CharSequence charSequence, int i, int i2, Paint.FontMetricsInt fm) {
prepView();
if (fm != null) {
//We need to make sure the layout allots enough space for the view
int height = view.getMeasuredHeight();
int need = height - (fm.descent - fm.ascent);
if (need > 0) {
int ascent = need / 2;
//This makes sure the text drawing area will be tall enough for the view
fm.descent += need - ascent;
fm.ascent -= ascent;
fm.bottom += need - ascent;
fm.top -= need / 2;
}
}
return view.getRight();
}
}
| {
"content_hash": "5f547e66eac2fefcc88b6f71e94a4050",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 131,
"avg_line_length": 33.734375,
"alnum_prop": 0.6248263084761464,
"repo_name": "RacZo/TokenAutoComplete",
"id": "88b0812fb037696876fbfd18016d62f1665990db",
"size": "2159",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "library/src/main/java/com/tokenautocomplete/ViewSpan.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "67807"
}
],
"symlink_target": ""
} |
Example project that throws a LOT of records at AWS Kinesis
# To run the app
1. git clone [email protected]:matthewbogner/kinesis-stress-example.git
2. modify the arguments at the bottom of the pom
3. mvn -P start | {
"content_hash": "323ba1688825615cd513d7cae8779c81",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 68,
"avg_line_length": 30.285714285714285,
"alnum_prop": 0.7783018867924528,
"repo_name": "matthewbogner/kinesis-stress-example",
"id": "f9ec123c222ea02aead389b37c538e0e9b11b3d1",
"size": "237",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "9387"
}
],
"symlink_target": ""
} |
package com.badlogic.gdx.tiledmappacker;
import java.awt.Graphics;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.StringTokenizer;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.w3c.dom.Attr;
import org.w3c.dom.Document;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import com.badlogic.gdx.ApplicationListener;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.assets.loaders.FileHandleResolver;
import com.badlogic.gdx.backends.lwjgl.LwjglApplication;
import com.badlogic.gdx.backends.lwjgl.LwjglApplicationConfiguration;
import com.badlogic.gdx.files.FileHandle;
import com.badlogic.gdx.graphics.g2d.TextureAtlas;
import com.badlogic.gdx.maps.MapLayer;
import com.badlogic.gdx.maps.tiled.TiledMap;
import com.badlogic.gdx.maps.tiled.TiledMapTileLayer;
import com.badlogic.gdx.maps.tiled.TiledMapTileSet;
import com.badlogic.gdx.maps.tiled.TmxMapLoader;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.tools.imagepacker.TexturePacker2;
import com.badlogic.gdx.tools.imagepacker.TexturePacker2.Settings;
import com.badlogic.gdx.utils.GdxRuntimeException;
import com.badlogic.gdx.utils.IntArray;
import com.badlogic.gdx.utils.ObjectMap;
/** Given one or more TMX tilemaps, packs all tileset resources used across the maps into a <b>single</b> {@link TextureAtlas} and
* produces a new TMX file to be loaded with an {@link AtlasTiledMapLoader} loader. Optionally, it can keep track of unused tiles
* and omit them from the generated atlas, reducing the resource size.
*
* The original TMX map file will be parsed by using the {@link TmxMapLoader} loader, thus access to a valid OpenGL context is
* <b>required</b>, that's why an LwjglApplication is created by this preprocessor: this is probably subject to change in the
* future, where loading both maps metadata and graphics resources should be made conditional.
*
* The new TMX map file will contains a new property, namely "atlas", whose value will enable the {@link AtlasTiledMapLoader} to
* correctly read the associated TextureAtlas representing the tileset.
*
* @author David Fraska and others (initial implementation, tell me who you are!)
* @author Manuel Bua */
public class TiledMapPacker {
private TexturePacker2 packer;
private TiledMap map;
private ArrayList<Integer> blendedTiles = new ArrayList<Integer>();
private TmxMapLoader mapLoader = new TmxMapLoader(new PackerFileHandleResolver());
private TiledMapPackerSettings settings;
// the tilesets output directory, relative to the global output directory
private static final String TilesetsOutputDir = "tileset";
// the generate atlas' name
private static final String AtlasOutputName = "packed";
// a map tracking tileids usage for any given tileset, across multiple maps
private HashMap<String, IntArray> tilesetUsedIds = new HashMap<String, IntArray>();
private static class TmxFilter implements FilenameFilter {
public TmxFilter () {
}
@Override
public boolean accept (File dir, String name) {
if (name.endsWith(".tmx")) return true;
return false;
}
}
private static class PackerFileHandleResolver implements FileHandleResolver {
public PackerFileHandleResolver () {
}
@Override
public FileHandle resolve (String fileName) {
return new FileHandle(fileName);
}
}
/** Constructs a new preprocessor by using the default packing settings */
public TiledMapPacker () {
this(new TiledMapPackerSettings());
}
/** Constructs a new preprocessor by using the specified packing settings */
public TiledMapPacker (TiledMapPackerSettings settings) {
this.settings = settings;
}
/** You can either run the {@link TiledMapPacker#main(String[])} method or reference this class in your own project and call
* this method.
*
* Keep in mind that this preprocessor will need to load the maps by using the {@link TmxMapLoader} loader and this in turn
* will need a valid OpenGL context to work: this is probably subject to change in the future, where loading both maps metadata
* and graphics resources should be made conditional.
*
* Process a directory containing TMX map files representing Tiled maps and produce a single TextureAtlas as well as new
* processed TMX map files, correctly referencing the generated {@link TextureAtlas} by using the "atlas" custom map property.
*
* Typically, your maps will lie in a directory, such as "maps/" and your tilesets in a subdirectory such as "maps/city": this
* layout will ensure that MapEditor will reference your tileset with a very simple relative path and no parent directory
* names, such as "..", will ever happen in your TMX file definition avoiding much of the confusion caused by the preprocessor
* working with relative paths.
*
* <strong>WARNING!</strong> Use caution if you have a "../" in the path of your tile sets! The output for these tile sets will
* be relative to the output directory. For example, if your output directory is "C:\mydir\maps" and you have a tileset with
* the path "../tileset.png", the tileset will be output to "C:\mydir\" and the maps will be in "C:\mydir\maps".
*
* @param inputDir the input directory containing the tmx files (and tile sets, relative to the path listed in the tmx file)
* @param outputDir The output directory for the TMX files, <strong>should be empty before running</strong>.
* @param settings the settings used in the TexturePacker */
public void processMaps (File inputDir, File outputDir, Settings settings) throws IOException {
FileHandle inputDirHandle = new FileHandle(inputDir.getAbsolutePath());
File[] files = inputDir.listFiles(new TmxFilter());
ObjectMap<String, TiledMapTileSet> tilesetsToPack = new ObjectMap<String, TiledMapTileSet>();
for (File file : files) {
map = mapLoader.load(file.getAbsolutePath());
// if enabled, build a list of used tileids for the tileset used by this map
if (this.settings.stripUnusedTiles) {
int mapWidth = map.getProperties().get("width", Integer.class);
int mapHeight = map.getProperties().get("height", Integer.class);
int numlayers = map.getLayers().getCount();
int bucketSize = mapWidth * mapHeight * numlayers;
Iterator<MapLayer> it = map.getLayers().iterator();
while (it.hasNext()) {
MapLayer layer = it.next();
// some layers can be plain MapLayer instances (ie. object groups), just ignore them
if (layer instanceof TiledMapTileLayer) {
TiledMapTileLayer tlayer = (TiledMapTileLayer)layer;
for (int y = 0; y < mapHeight; ++y) {
for (int x = 0; x < mapWidth; ++x) {
if (tlayer.getCell(x, y) != null) {
int tileid = tlayer.getCell(x, y).getTile().getId() & ~0xE0000000;
String tilesetName = tilesetNameFromTileId(map, tileid);
IntArray usedIds = getUsedIdsBucket(tilesetName, bucketSize);
usedIds.add(tileid);
// track this tileset to be packed if not already tracked
if (!tilesetsToPack.containsKey(tilesetName)) {
tilesetsToPack.put(tilesetName, map.getTileSets().getTileSet(tilesetName));
}
}
}
}
}
}
} else {
for (TiledMapTileSet tileset : map.getTileSets()) {
String tilesetName = tileset.getName();
if (!tilesetsToPack.containsKey(tilesetName)) {
tilesetsToPack.put(tilesetName, tileset);
}
}
}
FileHandle tmxFile = new FileHandle(file.getAbsolutePath());
writeUpdatedTMX(map, outputDir, tmxFile);
}
packTilesets(tilesetsToPack, inputDirHandle, outputDir, settings);
}
/** Returns the tileset name associated with the specified tile id
* @return a tileset name */
private String tilesetNameFromTileId (TiledMap map, int tileid) {
String name = "";
if (tileid == 0) {
return "";
}
for (TiledMapTileSet tileset : map.getTileSets()) {
int firstgid = tileset.getProperties().get("firstgid", -1, Integer.class);
if (firstgid == -1) continue; // skip this tileset
if (tileid >= firstgid) {
name = tileset.getName();
} else {
return name;
}
}
return name;
}
/** Returns the usedIds bucket for the given tileset name. If it doesn't exist one will be created with the specified size if
* its > 0, else null will be returned.
*
* @param size The size to use to create a new bucket if it doesn't exist, else specify 0 or lower to return null instead
* @return a bucket */
private IntArray getUsedIdsBucket (String tilesetName, int size) {
if (tilesetUsedIds.containsKey(tilesetName)) {
return tilesetUsedIds.get(tilesetName);
}
if (size <= 0) {
return null;
}
IntArray bucket = new IntArray(size);
tilesetUsedIds.put(tilesetName, bucket);
return bucket;
}
/** Traverse the specified tilesets, optionally lookup the used ids and pass every tile image to the {@link TexturePacker2},
* optionally ignoring unused tile ids */
private void packTilesets (ObjectMap<String, TiledMapTileSet> sets, FileHandle inputDirHandle, File outputDir,
Settings texturePackerSettings) throws IOException {
BufferedImage tile;
Vector2 tileLocation;
TileSetLayout packerTileSet;
Graphics g;
packer = new TexturePacker2(texturePackerSettings);
int tileidx = 0;
for (TiledMapTileSet set : sets.values()) {
String tilesetName = set.getName();
System.out.println("Processing tileset " + tilesetName);
IntArray usedIds = this.settings.stripUnusedTiles ? getUsedIdsBucket(tilesetName, -1) : null;
int tileWidth = set.getProperties().get("tilewidth", Integer.class);
int tileHeight = set.getProperties().get("tileheight", Integer.class);
int firstgid = set.getProperties().get("firstgid", Integer.class);
String imageName = set.getProperties().get("imagesource", String.class);
TileSetLayout layout = new TileSetLayout(firstgid, set, inputDirHandle);
for (int gid = layout.firstgid, i = 0; i < layout.numTiles; gid++, i++, tileidx++) {
if (usedIds != null && !usedIds.contains(gid)) {
System.out.println("Stripped id #" + gid + " from tileset \"" + tilesetName + "\"");
continue;
}
tileLocation = layout.getLocation(gid);
tile = new BufferedImage(tileWidth, tileHeight, BufferedImage.TYPE_4BYTE_ABGR);
g = tile.createGraphics();
g.drawImage(layout.image, 0, 0, tileWidth, tileHeight, (int)tileLocation.x, (int)tileLocation.y, (int)tileLocation.x
+ tileWidth, (int)tileLocation.y + tileHeight, null);
if (isBlended(tile)) setBlended(gid);
System.out.println("Adding " + tileWidth + "x" + tileHeight + " (" + (int)tileLocation.x + ", " + (int)tileLocation.y
+ ")");
packer.addImage(tile, this.settings.atlasOutputName + "_" + tileidx);
}
}
File outputDirTilesets = getRelativeFile(outputDir, this.settings.tilesetOutputDirectory);
outputDirTilesets.mkdirs();
packer.pack(outputDirTilesets, this.settings.atlasOutputName + ".atlas");
}
private static String removeExtension (String s) {
int extensionIndex = s.lastIndexOf(".");
if (extensionIndex == -1) return s;
return s.substring(0, extensionIndex);
}
private static String removePath (String s) {
String temp;
int index = s.lastIndexOf('\\');
if (index != -1)
temp = s.substring(index + 1);
else
temp = s;
index = temp.lastIndexOf('/');
if (index != -1)
return s.substring(index + 1);
else
return s;
}
private static File getRelativeFile (File path, String relativePath) {
if (relativePath.trim().length() == 0) return path;
File child = path;
StringTokenizer tokenizer = new StringTokenizer(relativePath, "\\/");
while (tokenizer.hasMoreElements()) {
String token = tokenizer.nextToken();
if (token.equals(".."))
child = child.getParentFile();
else {
child = new File(child, token);
}
}
return child;
}
private void setBlended (int tileNum) {
blendedTiles.add(tileNum);
}
private void writeUpdatedTMX (TiledMap tiledMap, File outputDir, FileHandle tmxFileHandle) throws IOException {
Document doc;
DocumentBuilder docBuilder;
DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance();
try {
docBuilder = docFactory.newDocumentBuilder();
doc = docBuilder.parse(tmxFileHandle.read());
Node map = doc.getFirstChild();
while (map.getNodeType() != Node.ELEMENT_NODE || map.getNodeName() != "map") {
if ((map = map.getNextSibling()) == null) {
throw new GdxRuntimeException("Couldn't find map node!");
}
}
setProperty(doc, map, "blended tiles", toCSV(blendedTiles));
setProperty(doc, map, "atlas", settings.tilesetOutputDirectory + "/" + settings.atlasOutputName + ".atlas");
TransformerFactory transformerFactory = TransformerFactory.newInstance();
Transformer transformer = transformerFactory.newTransformer();
DOMSource source = new DOMSource(doc);
outputDir.mkdirs();
StreamResult result = new StreamResult(new File(outputDir, tmxFileHandle.name()));
transformer.transform(source, result);
} catch (ParserConfigurationException e) {
throw new RuntimeException("ParserConfigurationException: " + e.getMessage());
} catch (SAXException e) {
throw new RuntimeException("SAXException: " + e.getMessage());
} catch (TransformerConfigurationException e) {
throw new RuntimeException("TransformerConfigurationException: " + e.getMessage());
} catch (TransformerException e) {
throw new RuntimeException("TransformerException: " + e.getMessage());
}
}
private static void setProperty (Document doc, Node parent, String name, String value) {
Node properties = getFirstChildNodeByName(parent, "properties");
Node property = getFirstChildByNameAttrValue(properties, "property", "name", name);
NamedNodeMap attributes = property.getAttributes();
Node valueNode = attributes.getNamedItem("value");
if (valueNode == null) {
valueNode = doc.createAttribute("value");
valueNode.setNodeValue(value);
attributes.setNamedItem(valueNode);
} else {
valueNode.setNodeValue(value);
}
}
private static String toCSV (ArrayList<Integer> values) {
String temp = "";
for (int i = 0; i < values.size() - 1; i++) {
temp += values.get(i) + ",";
}
if (values.size() > 0) temp += values.get(values.size() - 1);
return temp;
}
/** If the child node doesn't exist, it is created. */
private static Node getFirstChildNodeByName (Node parent, String child) {
NodeList childNodes = parent.getChildNodes();
for (int i = 0; i < childNodes.getLength(); i++) {
if (childNodes.item(i).getNodeName().equals(child)) {
return childNodes.item(i);
}
}
Node newNode = parent.getOwnerDocument().createElement(child);
if (childNodes.item(0) != null)
return parent.insertBefore(newNode, childNodes.item(0));
else
return parent.appendChild(newNode);
}
private static boolean isBlended (BufferedImage tile) {
int[] rgbArray = new int[tile.getWidth() * tile.getHeight()];
tile.getRGB(0, 0, tile.getWidth(), tile.getHeight(), rgbArray, 0, tile.getWidth());
for (int i = 0; i < tile.getWidth() * tile.getHeight(); i++) {
if (((rgbArray[i] >> 24) & 0xff) != 255) {
return true;
}
}
return false;
}
/** If the child node or attribute doesn't exist, it is created. Usage example: Node property =
* getFirstChildByAttrValue(properties, "property", "name", "blended tiles"); */
private static Node getFirstChildByNameAttrValue (Node node, String childName, String attr, String value) {
NodeList childNodes = node.getChildNodes();
for (int i = 0; i < childNodes.getLength(); i++) {
if (childNodes.item(i).getNodeName().equals(childName)) {
NamedNodeMap attributes = childNodes.item(i).getAttributes();
Node attribute = attributes.getNamedItem(attr);
if (attribute.getNodeValue().equals(value)) return childNodes.item(i);
}
}
Node newNode = node.getOwnerDocument().createElement(childName);
NamedNodeMap attributes = newNode.getAttributes();
Attr nodeAttr = node.getOwnerDocument().createAttribute(attr);
nodeAttr.setNodeValue(value);
attributes.setNamedItem(nodeAttr);
if (childNodes.item(0) != null) {
return node.insertBefore(newNode, childNodes.item(0));
} else {
return node.appendChild(newNode);
}
}
static File inputDir;
static File outputDir;
/** Processes a directory of Tile Maps, compressing each tile set contained in any map once.
*
* @param args args[0]: the input directory containing the tmx files (and tile sets, relative to the path listed in the tmx
* file). args[1]: The output directory for the tmx files, should be empty before running. WARNING: Use caution if
* you have a "../" in the path of your tile sets! The output for these tile sets will be relative to the output
* directory. For example, if your output directory is "C:\mydir\output" and you have a tileset with the path
* "../tileset.png", the tileset will be output to "C:\mydir\" and the maps will be in "C:\mydir\output". args[2]:
* --strip-unused (optional, include to let the TiledMapPacker remove tiles which are not used. */
public static void main (String[] args) {
final Settings texturePackerSettings = new Settings();
texturePackerSettings.paddingX = 2;
texturePackerSettings.paddingY = 2;
texturePackerSettings.edgePadding = true;
texturePackerSettings.duplicatePadding = true;
texturePackerSettings.bleed = true;
texturePackerSettings.alias = true;
texturePackerSettings.useIndexes = true;
final TiledMapPackerSettings packerSettings = new TiledMapPackerSettings();
switch (args.length) {
case 3: {
inputDir = new File(args[0]);
outputDir = new File(args[1]);
if ("--strip-unused".equals(args[2])) {
packerSettings.stripUnusedTiles = true;
}
break;
}
case 2: {
inputDir = new File(args[0]);
outputDir = new File(args[1]);
break;
}
case 1: {
inputDir = new File(args[0]);
outputDir = new File(inputDir, "output/");
break;
}
default: {
System.out.println("Usage: INPUTDIR [OUTPUTDIR] [--strip-unused]");
System.exit(0);
}
}
TiledMapPacker packer = new TiledMapPacker(packerSettings);
LwjglApplicationConfiguration config = new LwjglApplicationConfiguration();
config.forceExit = false;
config.width = 100;
config.height = 50;
config.useGL20 = true;
config.title = "TiledMapPacker";
new LwjglApplication(new ApplicationListener() {
@Override
public void resume () {
}
@Override
public void resize (int width, int height) {
}
@Override
public void render () {
}
@Override
public void pause () {
}
@Override
public void dispose () {
}
@Override
public void create () {
TiledMapPacker packer = new TiledMapPacker(packerSettings);
if (!inputDir.exists()) {
throw new RuntimeException("Input directory does not exist");
}
try {
packer.processMaps(inputDir, outputDir, texturePackerSettings);
} catch (IOException e) {
throw new RuntimeException("Error processing map: " + e.getMessage());
}
Gdx.app.exit();
}
}, config);
}
public static class TiledMapPackerSettings {
public boolean stripUnusedTiles = false;
public String tilesetOutputDirectory = TilesetsOutputDir;
public String atlasOutputName = AtlasOutputName;
}
}
| {
"content_hash": "c881e584863ff63b825a4b91f0b79405",
"timestamp": "",
"source": "github",
"line_count": 543,
"max_line_length": 130,
"avg_line_length": 36.64825046040516,
"alnum_prop": 0.7174874371859297,
"repo_name": "domix/libgdx",
"id": "2ff3917bda06b9a0fda4a9cec34fbc28680e7292",
"size": "20645",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "extensions/gdx-tiled-preprocessor/src/com/badlogic/gdx/tiledmappacker/TiledMapPacker.java",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package integration
import (
"reflect"
"testing"
"github.com/zaphod-concur/etcd/clientv3"
"github.com/zaphod-concur/etcd/integration"
"github.com/zaphod-concur/etcd/pkg/testutil"
"github.com/zaphod-concur/etcd/pkg/types"
"golang.org/x/net/context"
)
func TestMemberList(t *testing.T) {
defer testutil.AfterTest(t)
clus := integration.NewClusterV3(t, &integration.ClusterConfig{Size: 3})
defer clus.Terminate(t)
capi := clientv3.NewCluster(clus.RandClient())
resp, err := capi.MemberList(context.Background())
if err != nil {
t.Fatalf("failed to list member %v", err)
}
if len(resp.Members) != 3 {
t.Errorf("number of members = %d, want %d", len(resp.Members), 3)
}
}
func TestMemberAdd(t *testing.T) {
defer testutil.AfterTest(t)
clus := integration.NewClusterV3(t, &integration.ClusterConfig{Size: 3})
defer clus.Terminate(t)
capi := clientv3.NewCluster(clus.RandClient())
urls := []string{"http://127.0.0.1:1234"}
resp, err := capi.MemberAdd(context.Background(), urls)
if err != nil {
t.Fatalf("failed to add member %v", err)
}
if !reflect.DeepEqual(resp.Member.PeerURLs, urls) {
t.Errorf("urls = %v, want %v", urls, resp.Member.PeerURLs)
}
}
func TestMemberRemove(t *testing.T) {
defer testutil.AfterTest(t)
clus := integration.NewClusterV3(t, &integration.ClusterConfig{Size: 3})
defer clus.Terminate(t)
capi := clientv3.NewCluster(clus.Client(1))
resp, err := capi.MemberList(context.Background())
if err != nil {
t.Fatalf("failed to list member %v", err)
}
rmvID := resp.Members[0].ID
// indexes in capi member list don't necessarily match cluster member list;
// find member that is not the client to remove
for _, m := range resp.Members {
mURLs, _ := types.NewURLs(m.PeerURLs)
if !reflect.DeepEqual(mURLs, clus.Members[1].ServerConfig.PeerURLs) {
rmvID = m.ID
break
}
}
_, err = capi.MemberRemove(context.Background(), rmvID)
if err != nil {
t.Fatalf("failed to remove member %v", err)
}
resp, err = capi.MemberList(context.Background())
if err != nil {
t.Fatalf("failed to list member %v", err)
}
if len(resp.Members) != 2 {
t.Errorf("number of members = %d, want %d", len(resp.Members), 2)
}
}
func TestMemberUpdate(t *testing.T) {
defer testutil.AfterTest(t)
clus := integration.NewClusterV3(t, &integration.ClusterConfig{Size: 3})
defer clus.Terminate(t)
capi := clientv3.NewCluster(clus.RandClient())
resp, err := capi.MemberList(context.Background())
if err != nil {
t.Fatalf("failed to list member %v", err)
}
urls := []string{"http://127.0.0.1:1234"}
_, err = capi.MemberUpdate(context.Background(), resp.Members[0].ID, urls)
if err != nil {
t.Fatalf("failed to update member %v", err)
}
resp, err = capi.MemberList(context.Background())
if err != nil {
t.Fatalf("failed to list member %v", err)
}
if !reflect.DeepEqual(resp.Members[0].PeerURLs, urls) {
t.Errorf("urls = %v, want %v", urls, resp.Members[0].PeerURLs)
}
}
| {
"content_hash": "1225119e8e315ca45cf6fcc487cbc655",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 76,
"avg_line_length": 25.756521739130434,
"alnum_prop": 0.6846725185685347,
"repo_name": "zaphod-concur/etcd",
"id": "b5bdc59301552ad63a268347d7ba2ae3c2f409cb",
"size": "3556",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "clientv3/integration/cluster_test.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "51"
},
{
"name": "Go",
"bytes": "2852168"
},
{
"name": "Makefile",
"bytes": "1055"
},
{
"name": "PowerShell",
"bytes": "1782"
},
{
"name": "Protocol Buffer",
"bytes": "40568"
},
{
"name": "Shell",
"bytes": "25599"
}
],
"symlink_target": ""
} |
namespace Petstore.Models
{
using System;
using System.Linq;
using System.Collections.Generic;
using Newtonsoft.Json;
using Microsoft.Rest;
using Microsoft.Rest.Serialization;
using Microsoft.Rest.Azure;
/// <summary>
/// The URIs that are used to perform a retrieval of a public blob, queue
/// or table object.
/// </summary>
public partial class Endpoints
{
/// <summary>
/// Initializes a new instance of the Endpoints class.
/// </summary>
public Endpoints() { }
/// <summary>
/// Initializes a new instance of the Endpoints class.
/// </summary>
public Endpoints(string blob = default(string), string queue = default(string), string table = default(string), string file = default(string))
{
Blob = blob;
Queue = queue;
Table = table;
File = file;
}
/// <summary>
/// Gets or sets gets the blob endpoint.
/// </summary>
[JsonProperty(PropertyName = "blob")]
public string Blob { get; set; }
/// <summary>
/// Gets or sets gets the queue endpoint.
/// </summary>
[JsonProperty(PropertyName = "queue")]
public string Queue { get; set; }
/// <summary>
/// Gets or sets gets the table endpoint.
/// </summary>
[JsonProperty(PropertyName = "table")]
public string Table { get; set; }
/// <summary>
/// Gets or sets gets the file endpoint.
/// </summary>
[JsonProperty(PropertyName = "file")]
public string File { get; set; }
}
}
| {
"content_hash": "4f361bb2f1d46af810cd1e2c7722ea72",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 150,
"avg_line_length": 28.96551724137931,
"alnum_prop": 0.5523809523809524,
"repo_name": "sharadagarwal/autorest",
"id": "f9b8d67d127d3b913a752795fefde388b94530a7",
"size": "1681",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "Samples/azure-storage/Azure.CSharp/Models/Endpoints.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "12942"
},
{
"name": "C#",
"bytes": "11450022"
},
{
"name": "CSS",
"bytes": "110"
},
{
"name": "HTML",
"bytes": "274"
},
{
"name": "Java",
"bytes": "4693719"
},
{
"name": "JavaScript",
"bytes": "4685941"
},
{
"name": "PowerShell",
"bytes": "29614"
},
{
"name": "Python",
"bytes": "2274436"
},
{
"name": "Ruby",
"bytes": "232193"
},
{
"name": "Shell",
"bytes": "423"
},
{
"name": "TypeScript",
"bytes": "179577"
}
],
"symlink_target": ""
} |
description: Learn how to optimize your use of device mapper driver.
keywords: container, storage, driver, device mapper
title: Use the Device Mapper storage driver
redirect_from:
- /engine/userguide/storagedriver/device-mapper-driver/
---
Device Mapper is a kernel-based framework that underpins many advanced
volume management technologies on Linux. Docker's `devicemapper` storage driver
leverages the thin provisioning and snapshotting capabilities of this framework
for image and container management. This article refers to the Device Mapper
storage driver as `devicemapper`, and the kernel framework as _Device Mapper_.
For the systems where it is supported, `devicemapper` support is included in
the Linux kernel. However, specific configuration is required to use it with
Docker.
The `devicemapper` driver uses block devices dedicated to Docker and operates at
the block level, rather than the file level. These devices can be extended by
adding physical storage to your Docker host, and they perform better than using
a filesystem at the operating system (OS) level.
## Prerequisites
- `devicemapper` is supported on Docker Engine - Community running on CentOS, Fedora,
SLES 15, Ubuntu, Debian, or RHEL.
- `devicemapper` requires the `lvm2` and `device-mapper-persistent-data` packages
to be installed.
- Changing the storage driver makes any containers you have already
created inaccessible on the local system. Use `docker save` to save containers,
and push existing images to Docker Hub or a private repository, so you do
not need to recreate them later.
## Configure Docker with the `devicemapper` storage driver
Before following these procedures, you must first meet all the
[prerequisites](#prerequisites).
### Configure `loop-lvm` mode for testing
This configuration is only appropriate for testing. The `loop-lvm` mode makes
use of a 'loopback' mechanism that allows files on the local disk to be
read from and written to as if they were an actual physical disk or block
device.
However, the addition of the loopback mechanism, and interaction with the OS
filesystem layer, means that IO operations can be slow and resource-intensive.
Use of loopback devices can also introduce race conditions.
However, setting up `loop-lvm` mode can help identify basic issues (such as
missing user space packages, kernel drivers, etc.) ahead of attempting the more
complex set up required to enable `direct-lvm` mode. `loop-lvm` mode should
therefore only be used to perform rudimentary testing prior to configuring
`direct-lvm`.
For production systems, see
[Configure direct-lvm mode for production](#configure-direct-lvm-mode-for-production).
1. Stop Docker.
```console
$ sudo systemctl stop docker
```
2. Edit `/etc/docker/daemon.json`. If it does not yet exist, create it. Assuming
that the file was empty, add the following contents.
```json
{
"storage-driver": "devicemapper"
}
```
See all storage options for each storage driver in the
[daemon reference documentation](/engine/reference/commandline/dockerd/#options-per-storage-driver)
Docker does not start if the `daemon.json` file contains badly-formed JSON.
3. Start Docker.
```console
$ sudo systemctl start docker
```
4. Verify that the daemon is using the `devicemapper` storage driver. Use the
`docker info` command and look for `Storage Driver`.
```console
$ docker info
Containers: 0
Running: 0
Paused: 0
Stopped: 0
Images: 0
Server Version: 17.03.1-ce
Storage Driver: devicemapper
Pool Name: docker-202:1-8413957-pool
Pool Blocksize: 65.54 kB
Base Device Size: 10.74 GB
Backing Filesystem: xfs
Data file: /dev/loop0
Metadata file: /dev/loop1
Data Space Used: 11.8 MB
Data Space Total: 107.4 GB
Data Space Available: 7.44 GB
Metadata Space Used: 581.6 KB
Metadata Space Total: 2.147 GB
Metadata Space Available: 2.147 GB
Thin Pool Minimum Free Space: 10.74 GB
Udev Sync Supported: true
Deferred Removal Enabled: false
Deferred Deletion Enabled: false
Deferred Deleted Device Count: 0
Data loop file: /var/lib/docker/devicemapper/data
Metadata loop file: /var/lib/docker/devicemapper/metadata
Library Version: 1.02.135-RHEL7 (2016-11-16)
<...>
```
This host is running in `loop-lvm` mode, which is **not** supported on
production systems. This is indicated by the fact that the `Data loop file`
and a `Metadata loop file` are on files under
`/var/lib/docker/devicemapper`. These are loopback-mounted
sparse files. For production systems, see
[Configure direct-lvm mode for production](#configure-direct-lvm-mode-for-production).
### Configure direct-lvm mode for production
Production hosts using the `devicemapper` storage driver must use `direct-lvm`
mode. This mode uses block devices to create the thin pool. This is faster than
using loopback devices, uses system resources more efficiently, and block
devices can grow as needed. However, more setup is required than in `loop-lvm`
mode.
After you have satisfied the [prerequisites](#prerequisites), follow the steps
below to configure Docker to use the `devicemapper` storage driver in
`direct-lvm` mode.
> **Warning**: Changing the storage driver makes any containers you have already
created inaccessible on the local system. Use `docker save` to save containers,
and push existing images to Docker Hub or a private repository, so you do not
need to recreate them later.
#### Allow Docker to configure direct-lvm mode
Docker can manage the block device for you, simplifying configuration of `direct-lvm`
mode. **This is appropriate for fresh Docker setups only.** You can only use a
single block device. If you need to use multiple block devices,
[configure direct-lvm mode manually](#configure-direct-lvm-mode-manually) instead.
The following new configuration options are available:
| Option | Description | Required? | Default | Example |
|:--------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:----------|:--------|:-----------------------------------|
| `dm.directlvm_device` | The path to the block device to configure for `direct-lvm`. | Yes | | `dm.directlvm_device="/dev/xvdf"` |
| `dm.thinp_percent` | The percentage of space to use for storage from the passed in block device. | No | 95 | `dm.thinp_percent=95` |
| `dm.thinp_metapercent` | The percentage of space to use for metadata storage from the passed-in block device. | No | 1 | `dm.thinp_metapercent=1` |
| `dm.thinp_autoextend_threshold` | The threshold for when lvm should automatically extend the thin pool as a percentage of the total storage space. | No | 80 | `dm.thinp_autoextend_threshold=80` |
| `dm.thinp_autoextend_percent` | The percentage to increase the thin pool by when an autoextend is triggered. | No | 20 | `dm.thinp_autoextend_percent=20` |
| `dm.directlvm_device_force` | Whether to format the block device even if a filesystem already exists on it. If set to `false` and a filesystem is present, an error is logged and the filesystem is left intact. | No | false | `dm.directlvm_device_force=true` |
Edit the `daemon.json` file and set the appropriate options, then restart Docker
for the changes to take effect. The following `daemon.json` configuration sets all of the
options in the table above.
```json
{
"storage-driver": "devicemapper",
"storage-opts": [
"dm.directlvm_device=/dev/xdf",
"dm.thinp_percent=95",
"dm.thinp_metapercent=1",
"dm.thinp_autoextend_threshold=80",
"dm.thinp_autoextend_percent=20",
"dm.directlvm_device_force=false"
]
}
```
See all storage options for each storage driver in the
[daemon reference documentation](/engine/reference/commandline/dockerd/#options-per-storage-driver)
Restart Docker for the changes to take effect. Docker invokes the commands to
configure the block device for you.
> **Warning**: Changing these values after Docker has prepared the block device
> for you is not supported and causes an error.
You still need to [perform periodic maintenance tasks](#manage-devicemapper).
#### Configure direct-lvm mode manually
The procedure below creates a logical volume configured as a thin pool to
use as backing for the storage pool. It assumes that you have a spare block
device at `/dev/xvdf` with enough free space to complete the task. The device
identifier and volume sizes may be different in your environment and you
should substitute your own values throughout the procedure. The procedure also
assumes that the Docker daemon is in the `stopped` state.
1. Identify the block device you want to use. The device is located under
`/dev/` (such as `/dev/xvdf`) and needs enough free space to store the
images and container layers for the workloads that host runs.
A solid state drive is ideal.
2. Stop Docker.
```console
$ sudo systemctl stop docker
```
3. Install the following packages:
- **RHEL / CentOS**: `device-mapper-persistent-data`, `lvm2`, and all
dependencies
- **Ubuntu / Debian / SLES 15**: `thin-provisioning-tools`, `lvm2`, and all
dependencies
4. Create a physical volume on your block device from step 1, using the
`pvcreate` command. Substitute your device name for `/dev/xvdf`.
> **Warning**: The next few steps are destructive, so be sure that you have
> specified the correct device!
```console
$ sudo pvcreate /dev/xvdf
Physical volume "/dev/xvdf" successfully created.
```
5. Create a `docker` volume group on the same device, using the `vgcreate`
command.
```console
$ sudo vgcreate docker /dev/xvdf
Volume group "docker" successfully created
```
6. Create two logical volumes named `thinpool` and `thinpoolmeta` using the
`lvcreate` command. The last parameter specifies the amount of free space
to allow for automatic expanding of the data or metadata if space runs low,
as a temporary stop-gap. These are the recommended values.
```console
$ sudo lvcreate --wipesignatures y -n thinpool docker -l 95%VG
Logical volume "thinpool" created.
$ sudo lvcreate --wipesignatures y -n thinpoolmeta docker -l 1%VG
Logical volume "thinpoolmeta" created.
```
7. Convert the volumes to a thin pool and a storage location for metadata for
the thin pool, using the `lvconvert` command.
```console
$ sudo lvconvert -y \
--zero n \
-c 512K \
--thinpool docker/thinpool \
--poolmetadata docker/thinpoolmeta
WARNING: Converting logical volume docker/thinpool and docker/thinpoolmeta to
thin pool's data and metadata volumes with metadata wiping.
THIS WILL DESTROY CONTENT OF LOGICAL VOLUME (filesystem etc.)
Converted docker/thinpool to thin pool.
```
8. Configure autoextension of thin pools via an `lvm` profile.
```console
$ sudo vi /etc/lvm/profile/docker-thinpool.profile
```
9. Specify `thin_pool_autoextend_threshold` and `thin_pool_autoextend_percent`
values.
`thin_pool_autoextend_threshold` is the percentage of space used before `lvm`
attempts to autoextend the available space (100 = disabled, not recommended).
`thin_pool_autoextend_percent` is the amount of space to add to the device
when automatically extending (0 = disabled).
The example below adds 20% more capacity when the disk usage reaches
80%.
```none
activation {
thin_pool_autoextend_threshold=80
thin_pool_autoextend_percent=20
}
```
Save the file.
10. Apply the LVM profile, using the `lvchange` command.
```console
$ sudo lvchange --metadataprofile docker-thinpool docker/thinpool
Logical volume docker/thinpool changed.
```
11. Ensure monitoring of the logical volume is enabled.
```console
$ sudo lvs -o+seg_monitor
LV VG Attr LSize Pool Origin Data% Meta% Move Log Cpy%Sync Convert Monitor
thinpool docker twi-a-t--- 95.00g 0.00 0.01 not monitored
```
If the output in the `Monitor` column reports, as above, that the volume is
`not monitored`, then monitoring needs to be explicitly enabled. Without
this step, automatic extension of the logical volume will not occur,
regardless of any settings in the applied profile.
```console
$ sudo lvchange --monitor y docker/thinpool
```
Double check that monitoring is now enabled by running the
`sudo lvs -o+seg_monitor` command a second time. The `Monitor` column
should now report the logical volume is being `monitored`.
12. If you have ever run Docker on this host before, or if `/var/lib/docker/`
exists, move it out of the way so that Docker can use the new LVM pool to
store the contents of image and containers.
```console
$ sudo su -
# mkdir /var/lib/docker.bk
# mv /var/lib/docker/* /var/lib/docker.bk
# exit
```
If any of the following steps fail and you need to restore, you can remove
`/var/lib/docker` and replace it with `/var/lib/docker.bk`.
13. Edit `/etc/docker/daemon.json` and configure the options needed for the
`devicemapper` storage driver. If the file was previously empty, it should
now contain the following contents:
```json
{
"storage-driver": "devicemapper",
"storage-opts": [
"dm.thinpooldev=/dev/mapper/docker-thinpool",
"dm.use_deferred_removal=true",
"dm.use_deferred_deletion=true"
]
}
```
14. Start Docker.
**systemd**:
```console
$ sudo systemctl start docker
```
**service**:
```console
$ sudo service docker start
```
15. Verify that Docker is using the new configuration using `docker info`.
```console
$ docker info
Containers: 0
Running: 0
Paused: 0
Stopped: 0
Images: 0
Server Version: 17.03.1-ce
Storage Driver: devicemapper
Pool Name: docker-thinpool
Pool Blocksize: 524.3 kB
Base Device Size: 10.74 GB
Backing Filesystem: xfs
Data file:
Metadata file:
Data Space Used: 19.92 MB
Data Space Total: 102 GB
Data Space Available: 102 GB
Metadata Space Used: 147.5 kB
Metadata Space Total: 1.07 GB
Metadata Space Available: 1.069 GB
Thin Pool Minimum Free Space: 10.2 GB
Udev Sync Supported: true
Deferred Removal Enabled: true
Deferred Deletion Enabled: true
Deferred Deleted Device Count: 0
Library Version: 1.02.135-RHEL7 (2016-11-16)
<...>
```
If Docker is configured correctly, the `Data file` and `Metadata file` is
blank, and the pool name is `docker-thinpool`.
16. After you have verified that the configuration is correct, you can remove the
`/var/lib/docker.bk` directory which contains the previous configuration.
```console
$ sudo rm -rf /var/lib/docker.bk
```
## Manage devicemapper
### Monitor the thin pool
Do not rely on LVM auto-extension alone. The volume group
automatically extends, but the volume can still fill up. You can monitor
free space on the volume using `lvs` or `lvs -a`. Consider using a monitoring
tool at the OS level, such as Nagios.
To view the LVM logs, you can use `journalctl`:
```console
$ sudo journalctl -fu dm-event.service
```
If you run into repeated problems with thin pool, you can set the storage option
`dm.min_free_space` to a value (representing a percentage) in
`/etc/docker/daemon.json`. For instance, setting it to `10` ensures
that operations fail with a warning when the free space is at or near 10%.
See the
[storage driver options in the Engine daemon reference](/engine/reference/commandline/dockerd/#daemon-storage-driver){: target="_blank" rel="noopener" class="_"}.
### Increase capacity on a running device
You can increase the capacity of the pool on a running thin-pool device. This is
useful if the data's logical volume is full and the volume group is at full
capacity. The specific procedure depends on whether you are using a
[loop-lvm thin pool](#resize-a-loop-lvm-thin-pool) or a
[direct-lvm thin pool](#resize-a-direct-lvm-thin-pool).
#### Resize a loop-lvm thin pool
The easiest way to resize a `loop-lvm` thin pool is to
[use the device_tool utility](#use-the-device_tool-utility),
but you can [use operating system utilities](#use-operating-system-utilities)
instead.
##### Use the device_tool utility
A community-contributed script called `device_tool.go` is available in the
[moby/moby](https://github.com/moby/moby/tree/master/contrib/docker-device-tool)
Github repository. You can use this tool to resize a `loop-lvm` thin pool,
avoiding the long process above. This tool is not guaranteed to work, but you
should only be using `loop-lvm` on non-production systems.
If you do not want to use `device_tool`, you can [resize the thin pool manually](#use-operating-system-utilities) instead.
1. To use the tool, clone the Github repository, change to the
`contrib/docker-device-tool`, and follow the instructions in the `README.md`
to compile the tool.
2. Use the tool. The following example resizes the thin pool to 200GB.
```console
$ ./device_tool resize 200GB
```
##### Use operating system utilities
If you do not want to [use the device-tool utility](#use-the-device_tool-utility),
you can resize a `loop-lvm` thin pool manually using the following procedure.
In `loop-lvm` mode, a loopback device is used to store the data, and another
to store the metadata. `loop-lvm` mode is only supported for testing, because
it has significant performance and stability drawbacks.
If you are using `loop-lvm` mode, the output of `docker info` shows file
paths for `Data loop file` and `Metadata loop file`:
```console
$ docker info |grep 'loop file'
Data loop file: /var/lib/docker/devicemapper/data
Metadata loop file: /var/lib/docker/devicemapper/metadata
```
Follow these steps to increase the size of the thin pool. In this example, the
thin pool is 100 GB, and is increased to 200 GB.
1. List the sizes of the devices.
```console
$ sudo ls -lh /var/lib/docker/devicemapper/
total 1175492
-rw------- 1 root root 100G Mar 30 05:22 data
-rw------- 1 root root 2.0G Mar 31 11:17 metadata
```
2. Increase the size of the `data` file to 200 G using the `truncate` command,
which is used to increase **or** decrease the size of a file. Note that
decreasing the size is a destructive operation.
```console
$ sudo truncate -s 200G /var/lib/docker/devicemapper/data
```
3. Verify the file size changed.
```console
$ sudo ls -lh /var/lib/docker/devicemapper/
total 1.2G
-rw------- 1 root root 200G Apr 14 08:47 data
-rw------- 1 root root 2.0G Apr 19 13:27 metadata
```
4. The loopback file has changed on disk but not in memory. List the size of
the loopback device in memory, in GB. Reload it, then list the size again.
After the reload, the size is 200 GB.
```console
$ echo $[ $(sudo blockdev --getsize64 /dev/loop0) / 1024 / 1024 / 1024 ]
100
$ sudo losetup -c /dev/loop0
$ echo $[ $(sudo blockdev --getsize64 /dev/loop0) / 1024 / 1024 / 1024 ]
200
```
5. Reload the devicemapper thin pool.
a. Get the pool name first. The pool name is the first field, delimited by
` :`. This command extracts it.
$ sudo dmsetup status | grep ' thin-pool ' | awk -F ': ' {'print $1'}
docker-8:1-123141-pool
b. Dump the device mapper table for the thin pool.
$ sudo dmsetup table docker-8:1-123141-pool
0 209715200 thin-pool 7:1 7:0 128 32768 1 skip_block_zeroing
c. Calculate the total sectors of the thin pool using the second field
of the output. The number is expressed in 512-k sectors. A 100G file has
209715200 512-k sectors. If you double this number to 200G, you get
419430400 512-k sectors.
d. Reload the thin pool with the new sector number, using the following
three `dmsetup` commands.
$ sudo dmsetup suspend docker-8:1-123141-pool
$ sudo dmsetup reload docker-8:1-123141-pool --table '0 419430400 thin-pool 7:1 7:0 128 32768 1 skip_block_zeroing'
$ sudo dmsetup resume docker-8:1-123141-pool
#### Resize a direct-lvm thin pool
To extend a `direct-lvm` thin pool, you need to first attach a new block device
to the Docker host, and make note of the name assigned to it by the kernel. In
this example, the new block device is `/dev/xvdg`.
Follow this procedure to extend a `direct-lvm` thin pool, substituting your
block device and other parameters to suit your situation.
1. Gather information about your volume group.
Use the `pvdisplay` command to find the physical block devices currently in
use by your thin pool, and the volume group's name.
```console
$ sudo pvdisplay |grep 'VG Name'
PV Name /dev/xvdf
VG Name docker
```
In the following steps, substitute your block device or volume group name as
appropriate.
2. Extend the volume group, using the `vgextend` command with the `VG Name`
from the previous step, and the name of your **new** block device.
```console
$ sudo vgextend docker /dev/xvdg
Physical volume "/dev/xvdg" successfully created.
Volume group "docker" successfully extended
```
3. Extend the `docker/thinpool` logical volume. This command uses 100% of the
volume right away, without auto-extend. To extend the metadata thinpool
instead, use `docker/thinpool_tmeta`.
```console
$ sudo lvextend -l+100%FREE -n docker/thinpool
Size of logical volume docker/thinpool_tdata changed from 95.00 GiB (24319 extents) to 198.00 GiB (50688 extents).
Logical volume docker/thinpool_tdata successfully resized.
```
4. Verify the new thin pool size using the `Data Space Available` field in the
output of `docker info`. If you extended the `docker/thinpool_tmeta` logical
volume instead, look for `Metadata Space Available`.
```bash
Storage Driver: devicemapper
Pool Name: docker-thinpool
Pool Blocksize: 524.3 kB
Base Device Size: 10.74 GB
Backing Filesystem: xfs
Data file:
Metadata file:
Data Space Used: 212.3 MB
Data Space Total: 212.6 GB
Data Space Available: 212.4 GB
Metadata Space Used: 286.7 kB
Metadata Space Total: 1.07 GB
Metadata Space Available: 1.069 GB
<...>
```
### Activate the `devicemapper` after reboot
If you reboot the host and find that the `docker` service failed to start,
look for the error, "Non existing device". You need to re-activate the
logical volumes with this command:
```console
$ sudo lvchange -ay docker/thinpool
```
## How the `devicemapper` storage driver works
> **Warning**: Do not directly manipulate any files or directories within
> `/var/lib/docker/`. These files and directories are managed by Docker.
Use the `lsblk` command to see the devices and their pools, from the operating
system's point of view:
```console
$ sudo lsblk
NAME MAJ:MIN RM SIZE RO TYPE MOUNTPOINT
xvda 202:0 0 8G 0 disk
└─xvda1 202:1 0 8G 0 part /
xvdf 202:80 0 100G 0 disk
├─docker-thinpool_tmeta 253:0 0 1020M 0 lvm
│ └─docker-thinpool 253:2 0 95G 0 lvm
└─docker-thinpool_tdata 253:1 0 95G 0 lvm
└─docker-thinpool 253:2 0 95G 0 lvm
```
Use the `mount` command to see the mount-point Docker is using:
```console
$ mount |grep devicemapper
/dev/xvda1 on /var/lib/docker/devicemapper type xfs (rw,relatime,seclabel,attr2,inode64,noquota)
```
When you use `devicemapper`, Docker stores image and layer contents in the
thinpool, and exposes them to containers by mounting them under
subdirectories of `/var/lib/docker/devicemapper/`.
### Image and container layers on-disk
The `/var/lib/docker/devicemapper/metadata/` directory contains metadata about
the Devicemapper configuration itself and about each image and container layer
that exist. The `devicemapper` storage driver uses snapshots, and this metadata
include information about those snapshots. These files are in JSON format.
The `/var/lib/docker/devicemapper/mnt/` directory contains a mount point for each image
and container layer that exists. Image layer mount points are empty, but a
container's mount point shows the container's filesystem as it appears from
within the container.
### Image layering and sharing
The `devicemapper` storage driver uses dedicated block devices rather than
formatted filesystems, and operates on files at the block level for maximum
performance during copy-on-write (CoW) operations.
#### Snapshots
Another feature of `devicemapper` is its use of snapshots (also sometimes called
_thin devices_ or _virtual devices_), which store the differences introduced in
each layer as very small, lightweight thin pools. Snapshots provide many
benefits:
- Layers which are shared in common between containers are only stored on disk
once, unless they are writable. For instance, if you have 10 different
images which are all based on `alpine`, the `alpine` image and all its
parent images are only stored once each on disk.
- Snapshots are an implementation of a copy-on-write (CoW) strategy. This means
that a given file or directory is only copied to the container's writable
layer when it is modified or deleted by that container.
- Because `devicemapper` operates at the block level, multiple blocks in a
writable layer can be modified simultaneously.
- Snapshots can be backed up using standard OS-level backup utilities. Just
make a copy of `/var/lib/docker/devicemapper/`.
#### Devicemapper workflow
When you start Docker with the `devicemapper` storage driver, all objects
related to image and container layers are stored in
`/var/lib/docker/devicemapper/`, which is backed by one or more block-level
devices, either loopback devices (testing only) or physical disks.
- The _base device_ is the lowest-level object. This is the thin pool itself.
You can examine it using `docker info`. It contains a filesystem. This base
device is the starting point for every image and container layer. The base
device is a Device Mapper implementation detail, rather than a Docker layer.
- Metadata about the base device and each image or container layer is stored in
`/var/lib/docker/devicemapper/metadata/` in JSON format. These layers are
copy-on-write snapshots, which means that they are empty until they diverge
from their parent layers.
- Each container's writable layer is mounted on a mountpoint in
`/var/lib/docker/devicemapper/mnt/`. An empty directory exists for each
read-only image layer and each stopped container.
Each image layer is a snapshot of the layer below it. The lowest layer of each
image is a snapshot of the base device that exists in the pool. When you run a
container, it is a snapshot of the image the container is based on. The following
example shows a Docker host with two running containers. The first is a `ubuntu`
container and the second is a `busybox` container.

## How container reads and writes work with `devicemapper`
### Reading files
With `devicemapper`, reads happen at the block level. The diagram below shows
the high level process for reading a single block (`0x44f`) in an example
container.

An application makes a read request for block `0x44f` in the container. Because
the container is a thin snapshot of an image, it doesn't have the block, but it
has a pointer to the block on the nearest parent image where it does exist, and
it reads the block from there. The block now exists in the container's memory.
### Writing files
**Writing a new file**: With the `devicemapper` driver, writing new data to a
container is accomplished by an *allocate-on-demand* operation. Each block of
the new file is allocated in the container's writable layer and the block is
written there.
**Updating an existing file**: The relevant block of the file is read from the
nearest layer where it exists. When the container writes the file, only the
modified blocks are written to the container's writable layer.
**Deleting a file or directory**: When you delete a file or directory in a
container's writable layer, or when an image layer deletes a file that exists
in its parent layer, the `devicemapper` storage driver intercepts further read
attempts on that file or directory and responds that the file or directory does
not exist.
**Writing and then deleting a file**: If a container writes to a file and later
deletes the file, all of those operations happen in the container's writable
layer. In that case, if you are using `direct-lvm`, the blocks are freed. If you
use `loop-lvm`, the blocks may not be freed. This is another reason not to use
`loop-lvm` in production.
## Device Mapper and Docker performance
- **`allocate-on demand` performance impact**:
The `devicemapper` storage driver uses an `allocate-on-demand` operation to
allocate new blocks from the thin pool into a container's writable layer.
Each block is 64KB, so this is the minimum amount of space that is used
for a write.
- **Copy-on-write performance impact**: The first time a container modifies a
specific block, that block is written to the container's writable layer.
Because these writes happen at the level of the block rather than the file,
performance impact is minimized. However, writing a large number of blocks can
still negatively impact performance, and the `devicemapper` storage driver may
actually perform worse than other storage drivers in this scenario. For
write-heavy workloads, you should use data volumes, which bypass the storage
driver completely.
### Performance best practices
Keep these things in mind to maximize performance when using the `devicemapper`
storage driver.
- **Use `direct-lvm`**: The `loop-lvm` mode is not performant and should never
be used in production.
- **Use fast storage**: Solid-state drives (SSDs) provide faster reads and
writes than spinning disks.
- **Memory usage**: the `devicemapper` uses more memory than some other storage
drivers. Each launched container loads one or more copies of its files into
memory, depending on how many blocks of the same file are being modified at
the same time. Due to the memory pressure, the `devicemapper` storage driver
may not be the right choice for certain workloads in high-density use cases.
- **Use volumes for write-heavy workloads**: Volumes provide the best and most
predictable performance for write-heavy workloads. This is because they bypass
the storage driver and do not incur any of the potential overheads introduced
by thin provisioning and copy-on-write. Volumes have other benefits, such as
allowing you to share data among containers and persisting even when no
running container is using them.
- **Note**: when using `devicemapper` and the `json-file` log driver, the log
files generated by a container are still stored in Docker's dataroot directory,
by default `/var/lib/docker`. If your containers generate lots of log messages,
this may lead to increased disk usage or the inability to manage your system due
to a full disk. You can configure a
[log driver](../../config/containers/logging/configure.md) to store your container
logs externally.
## Related Information
- [Volumes](../volumes.md)
- [Understand images, containers, and storage drivers](index.md)
- [Select a storage driver](select-storage-driver.md)
| {
"content_hash": "2d29e68ba02a6a7fe043dc5de278d545",
"timestamp": "",
"source": "github",
"line_count": 836,
"max_line_length": 279,
"avg_line_length": 39.37081339712919,
"alnum_prop": 0.7004010451479613,
"repo_name": "thaJeztah/docker.github.io",
"id": "b3daec7be0f4c301bf035930b2c06d1518175039",
"size": "32940",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "storage/storagedriver/device-mapper-driver.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "4032"
},
{
"name": "Dockerfile",
"bytes": "6346"
},
{
"name": "Go",
"bytes": "13662"
},
{
"name": "HCL",
"bytes": "3185"
},
{
"name": "HTML",
"bytes": "99217"
},
{
"name": "JavaScript",
"bytes": "17521"
},
{
"name": "Makefile",
"bytes": "2459"
},
{
"name": "Ruby",
"bytes": "17262"
},
{
"name": "SCSS",
"bytes": "58511"
},
{
"name": "Shell",
"bytes": "1712"
},
{
"name": "XSLT",
"bytes": "3132"
}
],
"symlink_target": ""
} |
// +build freebsd
package main
import (
"os"
"os/signal"
"github.com/containerd/containerd/sys/reaper"
runc "github.com/containerd/go-runc"
"github.com/containerd/ttrpc"
)
// setupSignals creates a new signal handler for all signals and sets the shim as a
// sub-reaper so that the container processes are reparented
func setupSignals() (chan os.Signal, error) {
signals := make(chan os.Signal, 2048)
signal.Notify(signals)
// make sure runc is setup to use the monitor
// for waiting on processes
runc.Monitor = reaper.Default
return signals, nil
}
func newServer() (*ttrpc.Server, error) {
// for freebsd, we omit the socket credentials because these syscalls are
// slightly different. since we don't have freebsd support yet, this can be
// implemented later and the build can continue without issue.
return ttrpc.NewServer()
}
| {
"content_hash": "0764ad710fb83a2a3caa264bf8f4cf6d",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 83,
"avg_line_length": 25.90909090909091,
"alnum_prop": 0.7426900584795322,
"repo_name": "vdemeester/containerd",
"id": "1fd88546a180a64de0059ce1159bb580dd213b45",
"size": "1449",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cmd/containerd-shim/shim_freebsd.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "595"
},
{
"name": "Go",
"bytes": "2612436"
},
{
"name": "Makefile",
"bytes": "11075"
},
{
"name": "Shell",
"bytes": "11673"
}
],
"symlink_target": ""
} |
package com.puppetlabs.puppetdb.javaclient.model;
import static com.puppetlabs.puppetdb.javaclient.query.Query.field;
import java.lang.reflect.Type;
import java.util.Date;
import java.util.List;
import com.google.gson.reflect.TypeToken;
import com.puppetlabs.puppetdb.javaclient.query.Field;
/**
* A POJO that represents a PuppetDB Node
*/
public class Node extends Entity {
@SuppressWarnings("javadoc")
public static final Field<Node> NAME = field("name");
@SuppressWarnings("javadoc")
public static final Field<Node> DEACTIVATED = field("deactivated");
@SuppressWarnings("javadoc")
public static final Field<Node> CATALOG_TIMESTAMP = field("catalog_timestamp");
@SuppressWarnings("javadoc")
public static final Field<Node> FACTS_TIMESTAMP = field("facts_timestamp");
@SuppressWarnings("javadoc")
public static final Field<Node> REPORT_TIMESTAMP = field("report_timestamp");
// @fmtOff
/**
* A type representing a {@link List} of {@link Resource} instances
*/
public static final Type LIST = new TypeToken<List<Node>>() {}.getType();
// @fmtOn
private String name;
private Date deactivated;
private Date catalog_timestamp;
private Date facts_timestamp;
private Date report_timestamp;
/**
* @return the catalog_timestamp
*/
public Date getCatalogTimestamp() {
return catalog_timestamp;
}
/**
* @return the deactivated
*/
public Date getDeactivated() {
return deactivated;
}
/**
* @return the facts_timestamp
*/
public Date getFactsTimestamp() {
return facts_timestamp;
}
/**
* @return the name
*/
public String getName() {
return name;
}
/**
* @return the report_timestamp
*/
public Date getReportTimestamp() {
return report_timestamp;
}
/**
* @param catalog_timestamp
* the catalog_timestamp to set
*/
public void setCatalogTimestamp(Date catalog_timestamp) {
this.catalog_timestamp = catalog_timestamp;
}
/**
* @param deactivated
* the deactivated to set
*/
public void setDeactivated(Date deactivated) {
this.deactivated = deactivated;
}
/**
* @param facts_timestamp
* the facts_timestamp to set
*/
public void setFactsTimestamp(Date facts_timestamp) {
this.facts_timestamp = facts_timestamp;
}
/**
* @param name
* the name to set
*/
public void setName(String name) {
this.name = name;
}
/**
* @param report_timestamp
* the report_timestamp to set
*/
public void setReportTimestamp(Date report_timestamp) {
this.report_timestamp = report_timestamp;
}
}
| {
"content_hash": "38fd1b110c779630f7b41954bc70d0e6",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 80,
"avg_line_length": 20.829268292682926,
"alnum_prop": 0.6935987509758001,
"repo_name": "thallgren/puppetdb-javaclient",
"id": "148a0a20eb1e10a211dcc123631ac9ba62a19869",
"size": "2935",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/main/java/com/puppetlabs/puppetdb/javaclient/model/Node.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "107047"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://www.netbeans.org/ns/project/1">
<type>org.netbeans.modules.java.j2seproject</type>
<configuration>
<buildExtensions xmlns="http://www.netbeans.org/ns/ant-build-extender/1">
<extension file="jfx-impl.xml" id="jfx3">
<dependency dependsOn="-jfx-copylibs" target="-post-jar"/>
<dependency dependsOn="-rebase-libs" target="-post-jar"/>
<dependency dependsOn="jfx-deployment" target="-post-jar"/>
<dependency dependsOn="jar" target="debug"/>
<dependency dependsOn="jar" target="profile"/>
<dependency dependsOn="jar" target="run"/>
</extension>
</buildExtensions>
<data xmlns="http://www.netbeans.org/ns/j2se-project/3">
<name>DisasterResponseTradeStudy</name>
<source-roots>
<root id="src.dir"/>
</source-roots>
<test-roots>
<root id="test.src.dir"/>
</test-roots>
</data>
<spellchecker-wordlist xmlns="http://www.netbeans.org/ns/spellchecker-wordlist/1">
<word>onboard</word>
<word>optimator</word>
<word>Persistence</word>
<word>Trafficability</word>
</spellchecker-wordlist>
</configuration>
</project>
| {
"content_hash": "d9e9ce3448df7443ac9ce5dbfc5f8718",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 90,
"avg_line_length": 44.645161290322584,
"alnum_prop": 0.5657514450867052,
"repo_name": "astropcr/pmasecapstone",
"id": "47b5ef77536412edd3a47cfdcf1cade73c655ff9",
"size": "1384",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nbproject/project.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5546"
},
{
"name": "Java",
"bytes": "403881"
}
],
"symlink_target": ""
} |
using UnityEngine;
namespace HutongGames.PlayMaker.Actions
{
[ActionCategory(ActionCategory.Network)]
[Tooltip("Get the local network player properties")]
public class NetworkGetLocalPlayerProperties : FsmStateAction
{
[Tooltip("The IP address of this player.")]
[UIHint(UIHint.Variable)]
public FsmString IpAddress;
[Tooltip("The port of this player.")]
[UIHint(UIHint.Variable)]
public FsmInt port;
[Tooltip("The GUID for this player, used when connecting with NAT punchthrough.")]
[UIHint(UIHint.Variable)]
public FsmString guid;
[Tooltip("The external IP address of the network interface. This will only be populated after some external connection has been made.")]
[UIHint(UIHint.Variable)]
public FsmString externalIPAddress;
[Tooltip("Returns the external port of the network interface. This will only be populated after some external connection has been made.")]
[UIHint(UIHint.Variable)]
public FsmInt externalPort;
public override void Reset()
{
IpAddress = null;
port = null;
guid = null;
externalIPAddress = null;
externalPort = null;
}
public override void OnEnter()
{
IpAddress.Value = Network.player.ipAddress;
port.Value = Network.player.port;
guid.Value = Network.player.guid;
externalIPAddress.Value = Network.player.externalIP;
externalPort.Value = Network.player.externalPort;
Finish();
}
}
}
#endif | {
"content_hash": "2b5d2f458737268dbcaad4c8ed8dfa6e",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 140,
"avg_line_length": 27.346153846153847,
"alnum_prop": 0.7313642756680732,
"repo_name": "BigBroken/sLord",
"id": "c23e241a5ed2efb03602be1baa42cb4a47430acb",
"size": "1636",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "Assets/PlayMaker/Actions/Network/NetworkGetLocalPlayerProperties.cs",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "237298"
},
{
"name": "C#",
"bytes": "2671671"
},
{
"name": "GLSL",
"bytes": "41828"
},
{
"name": "HTML",
"bytes": "1096"
},
{
"name": "JavaScript",
"bytes": "9704"
}
],
"symlink_target": ""
} |
package org.switchyard;
import java.util.List;
import org.switchyard.metadata.Registrant;
import org.switchyard.metadata.qos.Throttling;
import org.switchyard.policy.Policy;
/**
* Contains runtime details on services and service references registered
* in SwitchYard. Instances of ServiceMetadata can be created and updated
* using the ServiceMetadataBuilder class.
*/
public interface ServiceMetadata {
/**
* Gets the security.
* @return the security
*/
ServiceSecurity getSecurity();
/**
* Returns a list of required policies for this service reference.
* @return list of required policy
*/
List<Policy> getRequiredPolicies();
/**
* Returns a list of policies provided by this service reference.
* @return list of provided policy
*/
List<Policy> getProvidedPolicies();
/**
* Return the consumer metadata associated with this service.
* @return consumer metadata
*/
Registrant getRegistrant();
/**
* Return the throttling configuration associated with a service reference. Throttling
* config only applies to consumers via service references.
* @return throttling config
*/
Throttling getThrottling();
}
| {
"content_hash": "d5d3678a32388e7d4ce30b108685e7d8",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 91,
"avg_line_length": 26.104166666666668,
"alnum_prop": 0.6919393455706305,
"repo_name": "tadayosi/switchyard",
"id": "3f61e8333e78cd8a5c3759c68640705b31f3032e",
"size": "1886",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "core/api/src/main/java/org/switchyard/ServiceMetadata.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1387"
},
{
"name": "CSS",
"bytes": "1428"
},
{
"name": "Clojure",
"bytes": "239"
},
{
"name": "HTML",
"bytes": "12878"
},
{
"name": "Java",
"bytes": "9850364"
},
{
"name": "Ruby",
"bytes": "1772"
},
{
"name": "XSLT",
"bytes": "84991"
}
],
"symlink_target": ""
} |
/*************************************************************************/
/* tcp_server.h */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2020 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md). */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#ifndef TCP_SERVER_H
#define TCP_SERVER_H
#include "core/io/ip.h"
#include "core/io/net_socket.h"
#include "core/io/stream_peer.h"
#include "core/io/stream_peer_tcp.h"
class TCP_Server : public Reference {
GDCLASS(TCP_Server, Reference);
protected:
enum {
MAX_PENDING_CONNECTIONS = 8
};
Ref<NetSocket> _sock;
static void _bind_methods();
public:
Error listen(uint16_t p_port, const IP_Address &p_bind_address = IP_Address("*"));
bool is_listening() const;
bool is_connection_available() const;
Ref<StreamPeerTCP> take_connection();
void stop(); // Stop listening
TCP_Server();
~TCP_Server();
};
#endif // TCP_SERVER_H
| {
"content_hash": "5772c6c5f83376464fcb8bec5841f625",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 83,
"avg_line_length": 45.88709677419355,
"alnum_prop": 0.49982425307557116,
"repo_name": "Paulloz/godot",
"id": "eb715a745c1442e9090426140b1c886ca2a67451",
"size": "2845",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "core/io/tcp_server.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "50004"
},
{
"name": "C#",
"bytes": "176259"
},
{
"name": "C++",
"bytes": "18569070"
},
{
"name": "GLSL",
"bytes": "1271"
},
{
"name": "Java",
"bytes": "495377"
},
{
"name": "JavaScript",
"bytes": "14680"
},
{
"name": "Makefile",
"bytes": "451"
},
{
"name": "Objective-C",
"bytes": "2645"
},
{
"name": "Objective-C++",
"bytes": "173262"
},
{
"name": "Python",
"bytes": "336142"
},
{
"name": "Shell",
"bytes": "19610"
}
],
"symlink_target": ""
} |
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('d3-selection'), require('d3-transition')) :
typeof define === 'function' && define.amd ? define(['exports', 'd3-selection', 'd3-transition'], factory) :
(factory((global.venn = global.venn || {}),global.d3,global.d3));
}(this, function (exports,d3Selection,d3Transition) { 'use strict';
var SMALL = 1e-10;
/** Returns the intersection area of a bunch of circles (where each circle
is an object having an x,y and radius property) */
function intersectionArea(circles, stats) {
// get all the intersection points of the circles
var intersectionPoints = getIntersectionPoints(circles);
// filter out points that aren't included in all the circles
var innerPoints = intersectionPoints.filter(function (p) {
return containedInCircles(p, circles);
});
var arcArea = 0, polygonArea = 0, arcs = [], i;
// if we have intersection points that are within all the circles,
// then figure out the area contained by them
if (innerPoints.length > 1) {
// sort the points by angle from the center of the polygon, which lets
// us just iterate over points to get the edges
var center = getCenter(innerPoints);
for (i = 0; i < innerPoints.length; ++i ) {
var p = innerPoints[i];
p.angle = Math.atan2(p.x - center.x, p.y - center.y);
}
innerPoints.sort(function(a,b) { return b.angle - a.angle;});
// iterate over all points, get arc between the points
// and update the areas
var p2 = innerPoints[innerPoints.length - 1];
for (i = 0; i < innerPoints.length; ++i) {
var p1 = innerPoints[i];
// polygon area updates easily ...
polygonArea += (p2.x + p1.x) * (p1.y - p2.y);
// updating the arc area is a little more involved
var midPoint = {x : (p1.x + p2.x) / 2,
y : (p1.y + p2.y) / 2},
arc = null;
for (var j = 0; j < p1.parentIndex.length; ++j) {
if (p2.parentIndex.indexOf(p1.parentIndex[j]) > -1) {
// figure out the angle halfway between the two points
// on the current circle
var circle = circles[p1.parentIndex[j]],
a1 = Math.atan2(p1.x - circle.x, p1.y - circle.y),
a2 = Math.atan2(p2.x - circle.x, p2.y - circle.y);
var angleDiff = (a2 - a1);
if (angleDiff < 0) {
angleDiff += 2*Math.PI;
}
// and use that angle to figure out the width of the
// arc
var a = a2 - angleDiff/2,
width = distance(midPoint, {
x : circle.x + circle.radius * Math.sin(a),
y : circle.y + circle.radius * Math.cos(a)
});
// pick the circle whose arc has the smallest width
if ((arc === null) || (arc.width > width)) {
arc = { circle : circle,
width : width,
p1 : p1,
p2 : p2};
}
}
}
if (arc !== null) {
arcs.push(arc);
arcArea += circleArea(arc.circle.radius, arc.width);
p2 = p1;
}
}
} else {
// no intersection points, is either disjoint - or is completely
// overlapped. figure out which by examining the smallest circle
var smallest = circles[0];
for (i = 1; i < circles.length; ++i) {
if (circles[i].radius < smallest.radius) {
smallest = circles[i];
}
}
// make sure the smallest circle is completely contained in all
// the other circles
var disjoint = false;
for (i = 0; i < circles.length; ++i) {
if (distance(circles[i], smallest) > Math.abs(smallest.radius - circles[i].radius)) {
disjoint = true;
break;
}
}
if (disjoint) {
arcArea = polygonArea = 0;
} else {
arcArea = smallest.radius * smallest.radius * Math.PI;
arcs.push({circle : smallest,
p1: { x: smallest.x, y : smallest.y + smallest.radius},
p2: { x: smallest.x - SMALL, y : smallest.y + smallest.radius},
width : smallest.radius * 2 });
}
}
polygonArea /= 2;
if (stats) {
stats.area = arcArea + polygonArea;
stats.arcArea = arcArea;
stats.polygonArea = polygonArea;
stats.arcs = arcs;
stats.innerPoints = innerPoints;
stats.intersectionPoints = intersectionPoints;
}
return arcArea + polygonArea;
}
/** returns whether a point is contained by all of a list of circles */
function containedInCircles(point, circles) {
for (var i = 0; i < circles.length; ++i) {
if (distance(point, circles[i]) > circles[i].radius + SMALL) {
return false;
}
}
return true;
}
/** Gets all intersection points between a bunch of circles */
function getIntersectionPoints(circles) {
var ret = [];
for (var i = 0; i < circles.length; ++i) {
for (var j = i + 1; j < circles.length; ++j) {
var intersect = circleCircleIntersection(circles[i],
circles[j]);
for (var k = 0; k < intersect.length; ++k) {
var p = intersect[k];
p.parentIndex = [i,j];
ret.push(p);
}
}
}
return ret;
}
/** Circular segment area calculation. See http://mathworld.wolfram.com/CircularSegment.html */
function circleArea(r, width) {
return r * r * Math.acos(1 - width/r) - (r - width) * Math.sqrt(width * (2 * r - width));
}
/** euclidean distance between two points */
function distance(p1, p2) {
return Math.sqrt((p1.x - p2.x) * (p1.x - p2.x) +
(p1.y - p2.y) * (p1.y - p2.y));
}
/** Returns the overlap area of two circles of radius r1 and r2 - that
have their centers separated by distance d. Simpler faster
circle intersection for only two circles */
function circleOverlap(r1, r2, d) {
// no overlap
if (d >= r1 + r2) {
return 0;
}
// completely overlapped
if (d <= Math.abs(r1 - r2)) {
return Math.PI * Math.min(r1, r2) * Math.min(r1, r2);
}
var w1 = r1 - (d * d - r2 * r2 + r1 * r1) / (2 * d),
w2 = r2 - (d * d - r1 * r1 + r2 * r2) / (2 * d);
return circleArea(r1, w1) + circleArea(r2, w2);
}
/** Given two circles (containing a x/y/radius attributes),
returns the intersecting points if possible.
note: doesn't handle cases where there are infinitely many
intersection points (circles are equivalent):, or only one intersection point*/
function circleCircleIntersection(p1, p2) {
var d = distance(p1, p2),
r1 = p1.radius,
r2 = p2.radius;
// if to far away, or self contained - can't be done
if ((d >= (r1 + r2)) || (d <= Math.abs(r1 - r2))) {
return [];
}
var a = (r1 * r1 - r2 * r2 + d * d) / (2 * d),
h = Math.sqrt(r1 * r1 - a * a),
x0 = p1.x + a * (p2.x - p1.x) / d,
y0 = p1.y + a * (p2.y - p1.y) / d,
rx = -(p2.y - p1.y) * (h / d),
ry = -(p2.x - p1.x) * (h / d);
return [{x: x0 + rx, y : y0 - ry },
{x: x0 - rx, y : y0 + ry }];
}
/** Returns the center of a bunch of points */
function getCenter(points) {
var center = {x: 0, y: 0};
for (var i =0; i < points.length; ++i ) {
center.x += points[i].x;
center.y += points[i].y;
}
center.x /= points.length;
center.y /= points.length;
return center;
}
/** finds the zeros of a function, given two starting points (which must
* have opposite signs */
function bisect(f, a, b, parameters) {
parameters = parameters || {};
var maxIterations = parameters.maxIterations || 100,
tolerance = parameters.tolerance || 1e-10,
fA = f(a),
fB = f(b),
delta = b - a;
if (fA * fB > 0) {
throw "Initial bisect points must have opposite signs";
}
if (fA === 0) return a;
if (fB === 0) return b;
for (var i = 0; i < maxIterations; ++i) {
delta /= 2;
var mid = a + delta,
fMid = f(mid);
if (fMid * fA >= 0) {
a = mid;
}
if ((Math.abs(delta) < tolerance) || (fMid === 0)) {
return mid;
}
}
return a + delta;
}
// need some basic operations on vectors, rather than adding a dependency,
// just define here
function zeros(x) { var r = new Array(x); for (var i = 0; i < x; ++i) { r[i] = 0; } return r; }
function zerosM(x,y) { return zeros(x).map(function() { return zeros(y); }); }
function dot(a, b) {
var ret = 0;
for (var i = 0; i < a.length; ++i) {
ret += a[i] * b[i];
}
return ret;
}
function norm2(a) {
return Math.sqrt(dot(a, a));
}
function scale(ret, value, c) {
for (var i = 0; i < value.length; ++i) {
ret[i] = value[i] * c;
}
}
function weightedSum(ret, w1, v1, w2, v2) {
for (var j = 0; j < ret.length; ++j) {
ret[j] = w1 * v1[j] + w2 * v2[j];
}
}
/** minimizes a function using the downhill simplex method */
function nelderMead(f, x0, parameters) {
parameters = parameters || {};
var maxIterations = parameters.maxIterations || x0.length * 200,
nonZeroDelta = parameters.nonZeroDelta || 1.05,
zeroDelta = parameters.zeroDelta || 0.001,
minErrorDelta = parameters.minErrorDelta || 1e-6,
minTolerance = parameters.minErrorDelta || 1e-5,
rho = (parameters.rho !== undefined) ? parameters.rho : 1,
chi = (parameters.chi !== undefined) ? parameters.chi : 2,
psi = (parameters.psi !== undefined) ? parameters.psi : -0.5,
sigma = (parameters.sigma !== undefined) ? parameters.sigma : 0.5,
maxDiff;
// initialize simplex.
var N = x0.length,
simplex = new Array(N + 1);
simplex[0] = x0;
simplex[0].fx = f(x0);
simplex[0].id = 0;
for (var i = 0; i < N; ++i) {
var point = x0.slice();
point[i] = point[i] ? point[i] * nonZeroDelta : zeroDelta;
simplex[i+1] = point;
simplex[i+1].fx = f(point);
simplex[i+1].id = i+1;
}
function updateSimplex(value) {
for (var i = 0; i < value.length; i++) {
simplex[N][i] = value[i];
}
simplex[N].fx = value.fx;
}
var sortOrder = function(a, b) { return a.fx - b.fx; };
var centroid = x0.slice(),
reflected = x0.slice(),
contracted = x0.slice(),
expanded = x0.slice();
for (var iteration = 0; iteration < maxIterations; ++iteration) {
simplex.sort(sortOrder);
if (parameters.history) {
// copy the simplex (since later iterations will mutate) and
// sort it to have a consistent order between iterations
var sortedSimplex = simplex.map(function (x) {
var state = x.slice();
state.fx = x.fx;
state.id = x.id;
return state;
});
sortedSimplex.sort(function(a,b) { return a.id - b.id; });
parameters.history.push({x: simplex[0].slice(),
fx: simplex[0].fx,
simplex: sortedSimplex});
}
maxDiff = 0;
for (i = 0; i < N; ++i) {
maxDiff = Math.max(maxDiff, Math.abs(simplex[0][i] - simplex[1][i]));
}
if ((Math.abs(simplex[0].fx - simplex[N].fx) < minErrorDelta) &&
(maxDiff < minTolerance)) {
break;
}
// compute the centroid of all but the worst point in the simplex
for (i = 0; i < N; ++i) {
centroid[i] = 0;
for (var j = 0; j < N; ++j) {
centroid[i] += simplex[j][i];
}
centroid[i] /= N;
}
// reflect the worst point past the centroid and compute loss at reflected
// point
var worst = simplex[N];
weightedSum(reflected, 1+rho, centroid, -rho, worst);
reflected.fx = f(reflected);
// if the reflected point is the best seen, then possibly expand
if (reflected.fx < simplex[0].fx) {
weightedSum(expanded, 1+chi, centroid, -chi, worst);
expanded.fx = f(expanded);
if (expanded.fx < reflected.fx) {
updateSimplex(expanded);
} else {
updateSimplex(reflected);
}
}
// if the reflected point is worse than the second worst, we need to
// contract
else if (reflected.fx >= simplex[N-1].fx) {
var shouldReduce = false;
if (reflected.fx > worst.fx) {
// do an inside contraction
weightedSum(contracted, 1+psi, centroid, -psi, worst);
contracted.fx = f(contracted);
if (contracted.fx < worst.fx) {
updateSimplex(contracted);
} else {
shouldReduce = true;
}
} else {
// do an outside contraction
weightedSum(contracted, 1-psi * rho, centroid, psi*rho, worst);
contracted.fx = f(contracted);
if (contracted.fx < reflected.fx) {
updateSimplex(contracted);
} else {
shouldReduce = true;
}
}
if (shouldReduce) {
// if we don't contract here, we're done
if (sigma >= 1) break;
// do a reduction
for (i = 1; i < simplex.length; ++i) {
weightedSum(simplex[i], 1 - sigma, simplex[0], sigma, simplex[i]);
simplex[i].fx = f(simplex[i]);
}
}
} else {
updateSimplex(reflected);
}
}
simplex.sort(sortOrder);
return {fx : simplex[0].fx,
x : simplex[0]};
}
/// searches along line 'pk' for a point that satifies the wolfe conditions
/// See 'Numerical Optimization' by Nocedal and Wright p59-60
/// f : objective function
/// pk : search direction
/// current: object containing current gradient/loss
/// next: output: contains next gradient/loss
/// returns a: step size taken
function wolfeLineSearch(f, pk, current, next, a, c1, c2) {
var phi0 = current.fx, phiPrime0 = dot(current.fxprime, pk),
phi = phi0, phi_old = phi0,
phiPrime = phiPrime0,
a0 = 0;
a = a || 1;
c1 = c1 || 1e-6;
c2 = c2 || 0.1;
function zoom(a_lo, a_high, phi_lo) {
for (var iteration = 0; iteration < 16; ++iteration) {
a = (a_lo + a_high)/2;
weightedSum(next.x, 1.0, current.x, a, pk);
phi = next.fx = f(next.x, next.fxprime);
phiPrime = dot(next.fxprime, pk);
if ((phi > (phi0 + c1 * a * phiPrime0)) ||
(phi >= phi_lo)) {
a_high = a;
} else {
if (Math.abs(phiPrime) <= -c2 * phiPrime0) {
return a;
}
if (phiPrime * (a_high - a_lo) >=0) {
a_high = a_lo;
}
a_lo = a;
phi_lo = phi;
}
}
return 0;
}
for (var iteration = 0; iteration < 10; ++iteration) {
weightedSum(next.x, 1.0, current.x, a, pk);
phi = next.fx = f(next.x, next.fxprime);
phiPrime = dot(next.fxprime, pk);
if ((phi > (phi0 + c1 * a * phiPrime0)) ||
(iteration && (phi >= phi_old))) {
return zoom(a0, a, phi_old);
}
if (Math.abs(phiPrime) <= -c2 * phiPrime0) {
return a;
}
if (phiPrime >= 0 ) {
return zoom(a, a0, phi);
}
phi_old = phi;
a0 = a;
a *= 2;
}
return a;
}
function conjugateGradient(f, initial, params) {
// allocate all memory up front here, keep out of the loop for perfomance
// reasons
var current = {x: initial.slice(), fx: 0, fxprime: initial.slice()},
next = {x: initial.slice(), fx: 0, fxprime: initial.slice()},
yk = initial.slice(),
pk, temp,
a = 1,
maxIterations;
params = params || {};
maxIterations = params.maxIterations || initial.length * 20;
current.fx = f(current.x, current.fxprime);
pk = current.fxprime.slice();
scale(pk, current.fxprime,-1);
for (var i = 0; i < maxIterations; ++i) {
a = wolfeLineSearch(f, pk, current, next, a);
// todo: history in wrong spot?
if (params.history) {
params.history.push({x: current.x.slice(),
fx: current.fx,
fxprime: current.fxprime.slice(),
alpha: a});
}
if (!a) {
// faiiled to find point that satifies wolfe conditions.
// reset direction for next iteration
scale(pk, current.fxprime, -1);
} else {
// update direction using Polak–Ribiere CG method
weightedSum(yk, 1, next.fxprime, -1, current.fxprime);
var delta_k = dot(current.fxprime, current.fxprime),
beta_k = Math.max(0, dot(yk, next.fxprime) / delta_k);
weightedSum(pk, beta_k, pk, -1, next.fxprime);
temp = current;
current = next;
next = temp;
}
if (norm2(current.fxprime) <= 1e-5) {
break;
}
}
if (params.history) {
params.history.push({x: current.x.slice(),
fx: current.fx,
fxprime: current.fxprime.slice(),
alpha: a});
}
return current;
}
/** given a list of set objects, and their corresponding overlaps.
updates the (x, y, radius) attribute on each set such that their positions
roughly correspond to the desired overlaps */
function venn(areas, parameters) {
parameters = parameters || {};
parameters.maxIterations = parameters.maxIterations || 500;
var initialLayout = parameters.initialLayout || bestInitialLayout;
// add in missing pairwise areas as having 0 size
areas = addMissingAreas(areas);
// initial layout is done greedily
var circles = initialLayout(areas);
// transform x/y coordinates to a vector to optimize
var initial = [], setids = [], setid;
for (setid in circles) {
if (circles.hasOwnProperty(setid)) {
initial.push(circles[setid].x);
initial.push(circles[setid].y);
setids.push(setid);
}
}
// optimize initial layout from our loss function
var totalFunctionCalls = 0;
var solution = nelderMead(
function(values) {
totalFunctionCalls += 1;
var current = {};
for (var i = 0; i < setids.length; ++i) {
var setid = setids[i];
current[setid] = {x: values[2 * i],
y: values[2 * i + 1],
radius : circles[setid].radius,
// size : circles[setid].size
};
}
return lossFunction(current, areas);
},
initial,
parameters);
// transform solution vector back to x/y points
var positions = solution.x;
for (var i = 0; i < setids.length; ++i) {
setid = setids[i];
circles[setid].x = positions[2 * i];
circles[setid].y = positions[2 * i + 1];
}
return circles;
}
var SMALL$1 = 1e-10;
/** Returns the distance necessary for two circles of radius r1 + r2 to
have the overlap area 'overlap' */
function distanceFromIntersectArea(r1, r2, overlap) {
// handle complete overlapped circles
if (Math.min(r1, r2) * Math.min(r1,r2) * Math.PI <= overlap + SMALL$1) {
return Math.abs(r1 - r2);
}
return bisect(function(distance) {
return circleOverlap(r1, r2, distance) - overlap;
}, 0, r1 + r2);
}
/** Missing pair-wise intersection area data can cause problems:
treating as an unknown means that sets will be laid out overlapping,
which isn't what people expect. To reflect that we want disjoint sets
here, set the overlap to 0 for all missing pairwise set intersections */
function addMissingAreas(areas) {
areas = areas.slice();
// two circle intersections that aren't defined
var ids = [], pairs = {}, i, j, a, b;
for (i = 0; i < areas.length; ++i) {
var area = areas[i];
if (area.sets.length == 1) {
ids.push(area.sets[0]);
} else if (area.sets.length == 2) {
a = area.sets[0];
b = area.sets[1];
pairs[[a, b]] = true;
pairs[[b, a]] = true;
}
}
ids.sort(function(a, b) { return a > b; });
for (i = 0; i < ids.length; ++i) {
a = ids[i];
for (j = i + 1; j < ids.length; ++j) {
b = ids[j];
if (!([a, b] in pairs)) {
areas.push({'sets': [a, b],
'size': 0});
}
}
}
return areas;
}
/// Returns two matrices, one of the euclidean distances between the sets
/// and the other indicating if there are subset or disjoint set relationships
function getDistanceMatrices(areas, sets, setids) {
// initialize an empty distance matrix between all the points
var distances = zerosM(sets.length, sets.length),
constraints = zerosM(sets.length, sets.length);
// compute required distances between all the sets such that
// the areas match
areas.filter(function(x) { return x.sets.length == 2; })
.map(function(current) {
var left = setids[current.sets[0]],
right = setids[current.sets[1]],
r1 = Math.sqrt(sets[left].size / Math.PI),
r2 = Math.sqrt(sets[right].size / Math.PI),
distance = distanceFromIntersectArea(r1, r2, current.size);
distances[left][right] = distances[right][left] = distance;
// also update constraints to indicate if its a subset or disjoint
// relationship
var c = 0;
if (current.size + 1e-10 >= Math.min(sets[left].size,
sets[right].size)) {
c = 1;
} else if (current.size <= 1e-10) {
c = -1;
}
constraints[left][right] = constraints[right][left] = c;
});
return {distances: distances, constraints: constraints};
}
/// computes the gradient and loss simulatenously for our constrained MDS optimizer
function constrainedMDSGradient(x, fxprime, distances, constraints) {
var loss = 0, i;
for (i = 0; i < fxprime.length; ++i) {
fxprime[i] = 0;
}
for (i = 0; i < distances.length; ++i) {
var xi = x[2 * i], yi = x[2 * i + 1];
for (var j = i + 1; j < distances.length; ++j) {
var xj = x[2 * j], yj = x[2 * j + 1],
dij = distances[i][j],
constraint = constraints[i][j];
var squaredDistance = (xj - xi) * (xj - xi) + (yj - yi) * (yj - yi),
distance = Math.sqrt(squaredDistance),
delta = squaredDistance - dij * dij;
if (((constraint > 0) && (distance <= dij)) ||
((constraint < 0) && (distance >= dij))) {
continue;
}
loss += 2 * delta * delta;
fxprime[2*i] += 4 * delta * (xi - xj);
fxprime[2*i + 1] += 4 * delta * (yi - yj);
fxprime[2*j] += 4 * delta * (xj - xi);
fxprime[2*j + 1] += 4 * delta * (yj - yi);
}
}
return loss;
}
/// takes the best working variant of either constrained MDS or greedy
function bestInitialLayout(areas, params) {
var initial = greedyLayout(areas, params);
// greedylayout is sufficient for all 2/3 circle cases. try out
// constrained MDS for higher order problems, take its output
// if it outperforms. (greedy is aesthetically better on 2/3 circles
// since it axis aligns)
if (areas.length >= 8) {
var constrained = constrainedMDSLayout(areas, params),
constrainedLoss = lossFunction(constrained, areas),
greedyLoss = lossFunction(initial, areas);
if (constrainedLoss + 1e-8 < greedyLoss) {
initial = constrained;
}
}
return initial;
}
/// use the constrained MDS variant to generate an initial layout
function constrainedMDSLayout(areas, params) {
params = params || {};
var restarts = params.restarts || 10;
// bidirectionally map sets to a rowid (so we can create a matrix)
var sets = [], setids = {}, i;
for (i = 0; i < areas.length; ++i ) {
var area = areas[i];
if (area.sets.length == 1) {
setids[area.sets[0]] = sets.length;
sets.push(area);
}
}
var matrices = getDistanceMatrices(areas, sets, setids),
distances = matrices.distances,
constraints = matrices.constraints;
// keep distances bounded, things get messed up otherwise.
// TODO: proper preconditioner?
var norm = norm2(distances.map(norm2))/(distances.length);
distances = distances.map(function (row) {
return row.map(function (value) { return value / norm; });});
var obj = function(x, fxprime) {
return constrainedMDSGradient(x, fxprime, distances, constraints);
};
var best, current;
for (i = 0; i < restarts; ++i) {
var initial = zeros(distances.length*2).map(Math.random);
current = conjugateGradient(obj, initial, params);
if (!best || (current.fx < best.fx)) {
best = current;
}
}
var positions = best.x;
// translate rows back to (x,y,radius) coordinates
var circles = {};
for (i = 0; i < sets.length; ++i) {
var set = sets[i];
circles[set.sets[0]] = {
x: positions[2*i] * norm,
y: positions[2*i + 1] * norm,
radius: Math.sqrt(set.size / Math.PI)
};
}
if (params.history) {
for (i = 0; i < params.history.length; ++i) {
scale(params.history[i].x, norm);
}
}
return circles;
}
/** Lays out a Venn diagram greedily, going from most overlapped sets to
least overlapped, attempting to position each new set such that the
overlapping areas to already positioned sets are basically right */
function greedyLayout(areas) {
// define a circle for each set
var circles = {}, setOverlaps = {}, set;
for (var i = 0; i < areas.length; ++i) {
var area = areas[i];
if (area.sets.length == 1) {
set = area.sets[0];
circles[set] = {x: 1e10, y: 1e10,
rowid: circles.length,
size: area.size,
radius: Math.sqrt(area.size / Math.PI)};
setOverlaps[set] = [];
}
}
areas = areas.filter(function(a) { return a.sets.length == 2; });
// map each set to a list of all the other sets that overlap it
for (i = 0; i < areas.length; ++i) {
var current = areas[i];
var weight = current.hasOwnProperty('weight') ? current.weight : 1.0;
var left = current.sets[0], right = current.sets[1];
// completely overlapped circles shouldn't be positioned early here
if (current.size + SMALL$1 >= Math.min(circles[left].size,
circles[right].size)) {
weight = 0;
}
setOverlaps[left].push ({set:right, size:current.size, weight:weight});
setOverlaps[right].push({set:left, size:current.size, weight:weight});
}
// get list of most overlapped sets
var mostOverlapped = [];
for (set in setOverlaps) {
if (setOverlaps.hasOwnProperty(set)) {
var size = 0;
for (i = 0; i < setOverlaps[set].length; ++i) {
size += setOverlaps[set][i].size * setOverlaps[set][i].weight;
}
mostOverlapped.push({set: set, size:size});
}
}
// sort by size desc
function sortOrder(a,b) {
return b.size - a.size;
}
mostOverlapped.sort(sortOrder);
// keep track of what sets have been laid out
var positioned = {};
function isPositioned(element) {
return element.set in positioned;
}
// adds a point to the output
function positionSet(point, index) {
circles[index].x = point.x;
circles[index].y = point.y;
positioned[index] = true;
}
// add most overlapped set at (0,0)
positionSet({x: 0, y: 0}, mostOverlapped[0].set);
// get distances between all points. TODO, necessary?
// answer: probably not
// var distances = venn.getDistanceMatrices(circles, areas).distances;
for (i = 1; i < mostOverlapped.length; ++i) {
var setIndex = mostOverlapped[i].set,
overlap = setOverlaps[setIndex].filter(isPositioned);
set = circles[setIndex];
overlap.sort(sortOrder);
if (overlap.length === 0) {
// this shouldn't happen anymore with addMissingAreas
throw "ERROR: missing pairwise overlap information";
}
var points = [];
for (var j = 0; j < overlap.length; ++j) {
// get appropriate distance from most overlapped already added set
var p1 = circles[overlap[j].set],
d1 = distanceFromIntersectArea(set.radius, p1.radius,
overlap[j].size);
// sample positions at 90 degrees for maximum aesthetics
points.push({x : p1.x + d1, y : p1.y});
points.push({x : p1.x - d1, y : p1.y});
points.push({y : p1.y + d1, x : p1.x});
points.push({y : p1.y - d1, x : p1.x});
// if we have at least 2 overlaps, then figure out where the
// set should be positioned analytically and try those too
for (var k = j + 1; k < overlap.length; ++k) {
var p2 = circles[overlap[k].set],
d2 = distanceFromIntersectArea(set.radius, p2.radius,
overlap[k].size);
var extraPoints = circleCircleIntersection(
{ x: p1.x, y: p1.y, radius: d1},
{ x: p2.x, y: p2.y, radius: d2});
for (var l = 0; l < extraPoints.length; ++l) {
points.push(extraPoints[l]);
}
}
}
// we have some candidate positions for the set, examine loss
// at each position to figure out where to put it at
var bestLoss = 1e50, bestPoint = points[0];
for (j = 0; j < points.length; ++j) {
circles[setIndex].x = points[j].x;
circles[setIndex].y = points[j].y;
var loss = lossFunction(circles, areas);
if (loss < bestLoss) {
bestLoss = loss;
bestPoint = points[j];
}
}
positionSet(bestPoint, setIndex);
}
return circles;
}
/** Given a bunch of sets, and the desired overlaps between these sets - computes
the distance from the actual overlaps to the desired overlaps. Note that
this method ignores overlaps of more than 2 circles */
function lossFunction(sets, overlaps) {
var output = 0;
function getCircles(indices) {
return indices.map(function(i) { return sets[i]; });
}
for (var i = 0; i < overlaps.length; ++i) {
var area = overlaps[i], overlap;
if (area.sets.length == 1) {
continue;
} else if (area.sets.length == 2) {
var left = sets[area.sets[0]],
right = sets[area.sets[1]];
overlap = circleOverlap(left.radius, right.radius,
distance(left, right));
} else {
overlap = intersectionArea(getCircles(area.sets));
}
var weight = area.hasOwnProperty('weight') ? area.weight : 1.0;
output += weight * (overlap - area.size) * (overlap - area.size);
}
return output;
}
// orientates a bunch of circles to point in orientation
function orientateCircles(circles, orientation, orientationOrder) {
if (orientationOrder === null) {
circles.sort(function (a, b) { return b.radius - a.radius; });
} else {
circles.sort(orientationOrder);
}
var i;
// shift circles so largest circle is at (0, 0)
if (circles.length > 0) {
var largestX = circles[0].x,
largestY = circles[0].y;
for (i = 0; i < circles.length; ++i) {
circles[i].x -= largestX;
circles[i].y -= largestY;
}
}
// rotate circles so that second largest is at an angle of 'orientation'
// from largest
if (circles.length > 1) {
var rotation = Math.atan2(circles[1].x, circles[1].y) - orientation,
c = Math.cos(rotation),
s = Math.sin(rotation), x, y;
for (i = 0; i < circles.length; ++i) {
x = circles[i].x;
y = circles[i].y;
circles[i].x = c * x - s * y;
circles[i].y = s * x + c * y;
}
}
// mirror solution if third solution is above plane specified by
// first two circles
if (circles.length > 2) {
var angle = Math.atan2(circles[2].x, circles[2].y) - orientation;
while (angle < 0) { angle += 2* Math.PI; }
while (angle > 2*Math.PI) { angle -= 2* Math.PI; }
if (angle > Math.PI) {
var slope = circles[1].y / (1e-10 + circles[1].x);
for (i = 0; i < circles.length; ++i) {
var d = (circles[i].x + slope * circles[i].y) / (1 + slope*slope);
circles[i].x = 2 * d - circles[i].x;
circles[i].y = 2 * d * slope - circles[i].y;
}
}
}
}
function disjointCluster(circles) {
// union-find clustering to get disjoint sets
circles.map(function(circle) { circle.parent = circle; });
// path compression step in union find
function find(circle) {
if (circle.parent !== circle) {
circle.parent = find(circle.parent);
}
return circle.parent;
}
function union(x, y) {
var xRoot = find(x), yRoot = find(y);
xRoot.parent = yRoot;
}
// get the union of all overlapping sets
for (var i = 0; i < circles.length; ++i) {
for (var j = i + 1; j < circles.length; ++j) {
var maxDistance = circles[i].radius + circles[j].radius;
if (distance(circles[i], circles[j]) + 1e-10 < maxDistance) {
union(circles[j], circles[i]);
}
}
}
// find all the disjoint clusters and group them together
var disjointClusters = {}, setid;
for (i = 0; i < circles.length; ++i) {
setid = find(circles[i]).parent.setid;
if (!(setid in disjointClusters)) {
disjointClusters[setid] = [];
}
disjointClusters[setid].push(circles[i]);
}
// cleanup bookkeeping
circles.map(function(circle) { delete circle.parent; });
// return in more usable form
var ret = [];
for (setid in disjointClusters) {
if (disjointClusters.hasOwnProperty(setid)) {
ret.push(disjointClusters[setid]);
}
}
return ret;
}
function getBoundingBox(circles) {
var minMax = function(d) {
var hi = Math.max.apply(null, circles.map(
function(c) { return c[d] + c.radius; } )),
lo = Math.min.apply(null, circles.map(
function(c) { return c[d] - c.radius;} ));
return {max:hi, min:lo};
};
return {xRange: minMax('x'), yRange: minMax('y')};
}
function normalizeSolution(solution, orientation, orientationOrder) {
if (orientation === null){
orientation = Math.PI/2;
}
// work with a list instead of a dictionary, and take a copy so we
// don't mutate input
var circles = [], i, setid;
for (setid in solution) {
if (solution.hasOwnProperty(setid)) {
var previous = solution[setid];
circles.push({x: previous.x,
y: previous.y,
radius: previous.radius,
setid: setid});
}
}
// get all the disjoint clusters
var clusters = disjointCluster(circles);
// orientate all disjoint sets, get sizes
for (i = 0; i < clusters.length; ++i) {
orientateCircles(clusters[i], orientation, orientationOrder);
var bounds = getBoundingBox(clusters[i]);
clusters[i].size = (bounds.xRange.max - bounds.xRange.min) * (bounds.yRange.max - bounds.yRange.min);
clusters[i].bounds = bounds;
}
clusters.sort(function(a, b) { return b.size - a.size; });
// orientate the largest at 0,0, and get the bounds
circles = clusters[0];
var returnBounds = circles.bounds;
var spacing = (returnBounds.xRange.max - returnBounds.xRange.min)/50;
function addCluster(cluster, right, bottom) {
if (!cluster) return;
var bounds = cluster.bounds, xOffset, yOffset, centreing;
if (right) {
xOffset = returnBounds.xRange.max - bounds.xRange.min + spacing;
} else {
xOffset = returnBounds.xRange.max - bounds.xRange.max;
centreing = (bounds.xRange.max - bounds.xRange.min) / 2 -
(returnBounds.xRange.max - returnBounds.xRange.min) / 2;
if (centreing < 0) xOffset += centreing;
}
if (bottom) {
yOffset = returnBounds.yRange.max - bounds.yRange.min + spacing;
} else {
yOffset = returnBounds.yRange.max - bounds.yRange.max;
centreing = (bounds.yRange.max - bounds.yRange.min) / 2 -
(returnBounds.yRange.max - returnBounds.yRange.min) / 2;
if (centreing < 0) yOffset += centreing;
}
for (var j = 0; j < cluster.length; ++j) {
cluster[j].x += xOffset;
cluster[j].y += yOffset;
circles.push(cluster[j]);
}
}
var index = 1;
while (index < clusters.length) {
addCluster(clusters[index], true, false);
addCluster(clusters[index+1], false, true);
addCluster(clusters[index+2], true, true);
index += 3;
// have one cluster (in top left). lay out next three relative
// to it in a grid
returnBounds = getBoundingBox(circles);
}
// convert back to solution form
var ret = {};
for (i = 0; i < circles.length; ++i) {
ret[circles[i].setid] = circles[i];
}
return ret;
}
/** Scales a solution from venn.venn or venn.greedyLayout such that it fits in
a rectangle of width/height - with padding around the borders. also
centers the diagram in the available space at the same time */
function scaleSolution(solution, width, height, padding) {
var circles = [], setids = [];
for (var setid in solution) {
if (solution.hasOwnProperty(setid)) {
setids.push(setid);
circles.push(solution[setid]);
}
}
width -= 2*padding;
height -= 2*padding;
var bounds = getBoundingBox(circles),
xRange = bounds.xRange,
yRange = bounds.yRange;
if ((xRange.max == xRange.min) ||
(yRange.max == yRange.min)) {
console.log("not scaling solution: zero size detected");
return solution;
}
var xScaling = width / (xRange.max - xRange.min),
yScaling = height / (yRange.max - yRange.min),
scaling = Math.min(yScaling, xScaling),
// while we're at it, center the diagram too
xOffset = (width - (xRange.max - xRange.min) * scaling) / 2,
yOffset = (height - (yRange.max - yRange.min) * scaling) / 2;
var scaled = {};
for (var i = 0; i < circles.length; ++i) {
var circle = circles[i];
scaled[setids[i]] = {
radius: scaling * circle.radius,
x: padding + xOffset + (circle.x - xRange.min) * scaling,
y: padding + yOffset + (circle.y - yRange.min) * scaling,
};
}
return scaled;
}
/*global console:true*/
function VennDiagram() {
var width = 600,
height = 350,
padding = 15,
duration = 1000,
orientation = Math.PI / 2,
normalize = true,
wrap = true,
styled = true,
fontSize = null,
orientationOrder = null,
// mimic the behaviour of d3.scale.category10 from the previous
// version of d3
colourMap = {},
// so this is the same as d3.schemeCategory10, which is only defined in d3 4.0
// since we can support older versions of d3 as long as we don't force this,
// I'm hackily redefining below. TODO: remove this and change to d3.schemeCategory10
colourScheme = ["#1f77b4", "#ff7f0e", "#2ca02c", "#d62728", "#9467bd", "#8c564b", "#e377c2", "#7f7f7f", "#bcbd22", "#17becf"],
colourIndex = 0,
colours = function(key) {
if (key in colourMap) {
return colourMap[key];
}
var ret = colourMap[key] = colourScheme[colourIndex];
colourIndex += 1;
if (colourIndex >= colourScheme.length) {
colourIndex = 0;
}
return ret;
},
layoutFunction = venn;
function chart(selection) {
var data = selection.datum();
var solution = layoutFunction(data);
if (normalize) {
solution = normalizeSolution(solution,
orientation,
orientationOrder);
}
var circles = scaleSolution(solution, width, height, padding);
var textCentres = computeTextCentres(circles, data);
// Figure out the current label for each set. These can change
// and D3 won't necessarily update (fixes https://github.com/benfred/venn.js/issues/103)
var labels = {};
data.forEach(function(datum) { labels[datum.sets] = datum.label; });
function label(d) {
if (d.sets in labels) {
return labels[d.sets];
}
if (d.sets.length == 1) {
return '' + d.sets[0];
}
}
// create svg if not already existing
selection.selectAll("svg").data([circles]).enter().append("svg");
var svg = selection.select("svg")
.attr("width", width)
.attr("height", height);
// to properly transition intersection areas, we need the
// previous circles locations. load from elements
var previous = {}, hasPrevious = false;
svg.selectAll(".venn-area path").each(function (d) {
var path = d3Selection.select(this).attr("d");
if ((d.sets.length == 1) && path) {
hasPrevious = true;
previous[d.sets[0]] = circleFromPath(path);
}
});
// interpolate intersection area paths between previous and
// current paths
var pathTween = function(d) {
return function(t) {
var c = d.sets.map(function(set) {
var start = previous[set], end = circles[set];
if (!start) {
start = {x : width/2, y : height/2, radius : 1};
}
if (!end) {
end = {x : width/2, y : height/2, radius : 1};
}
return {'x' : start.x * (1 - t) + end.x * t,
'y' : start.y * (1 - t) + end.y * t,
'radius' : start.radius * (1 - t) + end.radius * t};
});
return intersectionAreaPath(c);
};
};
// update data, joining on the set ids
var nodes = svg.selectAll(".venn-area")
.data(data, function(d) { return d.sets; });
// create new nodes
var enter = nodes.enter()
.append('g')
.attr("class", function(d) {
return "venn-area venn-" +
(d.sets.length == 1 ? "circle" : "intersection");
})
.attr("data-venn-sets", function(d) {
return d.sets.join("_");
});
var enterPath = enter.append("path"),
enterText = enter.append("text")
.attr("class", "label")
.text(function (d) { return label(d); } )
.attr("text-anchor", "middle")
.attr("dy", ".35em")
.attr("x", width/2)
.attr("y", height/2);
// apply minimal style if wanted
if (styled) {
enterPath.style("fill-opacity", "0")
.filter(function (d) { return d.sets.length == 1; } )
.style("fill", function(d) { return colours(d.sets); })
.style("fill-opacity", ".25");
enterText
.style("fill", function(d) { return d.sets.length == 1 ? colours(d.sets) : "#444"; });
}
// update existing, using pathTween if necessary
var update = selection;
if (hasPrevious) {
update = selection.transition("venn").duration(duration);
update.selectAll("path")
.attrTween("d", pathTween);
} else {
update.selectAll("path")
.attr("d", function(d) {
return intersectionAreaPath(d.sets.map(function (set) { return circles[set]; }));
});
}
var updateText = update.selectAll("text")
.filter(function (d) { return d.sets in textCentres; })
.text(function (d) { return label(d); } )
.attr("x", function(d) { return Math.floor(textCentres[d.sets].x);})
.attr("y", function(d) { return Math.floor(textCentres[d.sets].y);});
if (wrap) {
if (hasPrevious) {
// d3 4.0 uses 'on' for events on transitions,
// but d3 3.0 used 'each' instead. switch appropiately
if ('on' in updateText) {
updateText.on("end", wrapText(circles, label));
} else {
updateText.each("end", wrapText(circles, label));
}
} else {
updateText.each(wrapText(circles, label));
}
}
// remove old
var exit = nodes.exit().transition('venn').duration(duration).remove();
exit.selectAll("path")
.attrTween("d", pathTween);
var exitText = exit.selectAll("text")
.attr("x", width/2)
.attr("y", height/2);
// if we've been passed a fontSize explicitly, use it to
// transition
if (fontSize !== null) {
enterText.style("font-size", "0px");
updateText.style("font-size", fontSize);
exitText.style("font-size", "0px");
}
return {'circles': circles,
'textCentres': textCentres,
'nodes': nodes,
'enter': enter,
'update': update,
'exit': exit};
}
chart.wrap = function(_) {
if (!arguments.length) return wrap;
wrap = _;
return chart;
};
chart.width = function(_) {
if (!arguments.length) return width;
width = _;
return chart;
};
chart.height = function(_) {
if (!arguments.length) return height;
height = _;
return chart;
};
chart.padding = function(_) {
if (!arguments.length) return padding;
padding = _;
return chart;
};
chart.colours = function(_) {
if (!arguments.length) return colours;
colours = _;
return chart;
};
chart.fontSize = function(_) {
if (!arguments.length) return fontSize;
fontSize = _;
return chart;
};
chart.duration = function(_) {
if (!arguments.length) return duration;
duration = _;
return chart;
};
chart.layoutFunction = function(_) {
if (!arguments.length) return layoutFunction;
layoutFunction = _;
return chart;
};
chart.normalize = function(_) {
if (!arguments.length) return normalize;
normalize = _;
return chart;
};
chart.styled = function(_) {
if (!arguments.length) return styled;
styled = _;
return chart;
};
chart.orientation = function(_) {
if (!arguments.length) return orientation;
orientation = _;
return chart;
};
chart.orientationOrder = function(_) {
if (!arguments.length) return orientationOrder;
orientationOrder = _;
return chart;
};
return chart;
}
// sometimes text doesn't fit inside the circle, if thats the case lets wrap
// the text here such that it fits
// todo: looks like this might be merged into d3 (
// https://github.com/mbostock/d3/issues/1642),
// also worth checking out is
// http://engineering.findthebest.com/wrapping-axis-labels-in-d3-js/
// this seems to be one of those things that should be easy but isn't
function wrapText(circles, labeller) {
return function() {
var text = d3Selection.select(this),
data = text.datum(),
width = circles[data.sets[0]].radius || 50,
label = labeller(data) || '';
var words = label.split(/\s+/).reverse(),
maxLines = 3,
minChars = (label.length + words.length) / maxLines,
word = words.pop(),
line = [word],
joined,
lineNumber = 0,
lineHeight = 1.1, // ems
tspan = text.text(null).append("tspan").text(word);
while (true) {
word = words.pop();
if (!word) break;
line.push(word);
joined = line.join(" ");
tspan.text(joined);
if (joined.length > minChars && tspan.node().getComputedTextLength() > width) {
line.pop();
tspan.text(line.join(" "));
line = [word];
tspan = text.append("tspan").text(word);
lineNumber++;
}
}
var initial = 0.35 - lineNumber * lineHeight / 2,
x = text.attr("x"),
y = text.attr("y");
text.selectAll("tspan")
.attr("x", x)
.attr("y", y)
.attr("dy", function(d, i) {
return (initial + i * lineHeight) + "em";
});
};
}
function circleMargin(current, interior, exterior) {
var margin = interior[0].radius - distance(interior[0], current), i, m;
for (i = 1; i < interior.length; ++i) {
m = interior[i].radius - distance(interior[i], current);
if (m <= margin) {
margin = m;
}
}
for (i = 0; i < exterior.length; ++i) {
m = distance(exterior[i], current) - exterior[i].radius;
if (m <= margin) {
margin = m;
}
}
return margin;
}
// compute the center of some circles by maximizing the margin of
// the center point relative to the circles (interior) after subtracting
// nearby circles (exterior)
function computeTextCentre(interior, exterior) {
// get an initial estimate by sampling around the interior circles
// and taking the point with the biggest margin
var points = [], i;
for (i = 0; i < interior.length; ++i) {
var c = interior[i];
points.push({x: c.x, y: c.y});
points.push({x: c.x + c.radius/2, y: c.y});
points.push({x: c.x - c.radius/2, y: c.y});
points.push({x: c.x, y: c.y + c.radius/2});
points.push({x: c.x, y: c.y - c.radius/2});
}
var initial = points[0], margin = circleMargin(points[0], interior, exterior);
for (i = 1; i < points.length; ++i) {
var m = circleMargin(points[i], interior, exterior);
if (m >= margin) {
initial = points[i];
margin = m;
}
}
// maximize the margin numerically
var solution = nelderMead(
function(p) { return -1 * circleMargin({x: p[0], y: p[1]}, interior, exterior); },
[initial.x, initial.y],
{maxIterations:500, minErrorDelta:1e-10}).x;
var ret = {x: solution[0], y: solution[1]};
// check solution, fallback as needed (happens if fully overlapped
// etc)
var valid = true;
for (i = 0; i < interior.length; ++i) {
if (distance(ret, interior[i]) > interior[i].radius) {
valid = false;
break;
}
}
for (i = 0; i < exterior.length; ++i) {
if (distance(ret, exterior[i]) < exterior[i].radius) {
valid = false;
break;
}
}
if (!valid) {
if (interior.length == 1) {
ret = {x: interior[0].x, y: interior[0].y};
} else {
var areaStats = {};
intersectionArea(interior, areaStats);
if (areaStats.arcs.length === 0) {
ret = {'x': 0, 'y': -1000, disjoint:true};
} else if (areaStats.arcs.length == 1) {
ret = {'x': areaStats.arcs[0].circle.x,
'y': areaStats.arcs[0].circle.y};
} else if (exterior.length) {
// try again without other circles
ret = computeTextCentre(interior, []);
} else {
// take average of all the points in the intersection
// polygon. this should basically never happen
// and has some issues:
// https://github.com/benfred/venn.js/issues/48#issuecomment-146069777
ret = getCenter(areaStats.arcs.map(function (a) { return a.p1; }));
}
}
}
return ret;
}
// given a dictionary of {setid : circle}, returns
// a dictionary of setid to list of circles that completely overlap it
function getOverlappingCircles(circles) {
var ret = {}, circleids = [];
for (var circleid in circles) {
circleids.push(circleid);
ret[circleid] = [];
}
for (var i = 0; i < circleids.length; i++) {
var a = circles[circleids[i]];
for (var j = i + 1; j < circleids.length; ++j) {
var b = circles[circleids[j]],
d = distance(a, b);
if (d + b.radius <= a.radius + 1e-10) {
ret[circleids[j]].push(circleids[i]);
} else if (d + a.radius <= b.radius + 1e-10) {
ret[circleids[i]].push(circleids[j]);
}
}
}
return ret;
}
function computeTextCentres(circles, areas) {
var ret = {}, overlapped = getOverlappingCircles(circles);
for (var i = 0; i < areas.length; ++i) {
var area = areas[i].sets, areaids = {}, exclude = {};
for (var j = 0; j < area.length; ++j) {
areaids[area[j]] = true;
var overlaps = overlapped[area[j]];
// keep track of any circles that overlap this area,
// and don't consider for purposes of computing the text
// centre
for (var k = 0; k < overlaps.length; ++k) {
exclude[overlaps[k]] = true;
}
}
var interior = [], exterior = [];
for (var setid in circles) {
if (setid in areaids) {
interior.push(circles[setid]);
} else if (!(setid in exclude)) {
exterior.push(circles[setid]);
}
}
var centre = computeTextCentre(interior, exterior);
ret[area] = centre;
if (centre.disjoint && (areas[i].size > 0)) {
console.log("WARNING: area " + area + " not represented on screen");
}
}
return ret;
}
// sorts all areas in the venn diagram, so that
// a particular area is on top (relativeTo) - and
// all other areas are so that the smallest areas are on top
function sortAreas(div, relativeTo) {
// figure out sets that are completly overlapped by relativeTo
var overlaps = getOverlappingCircles(div.selectAll("svg").datum());
var exclude = {};
for (var i = 0; i < relativeTo.sets.length; ++i) {
var check = relativeTo.sets[i];
for (var setid in overlaps) {
var overlap = overlaps[setid];
for (var j = 0; j < overlap.length; ++j) {
if (overlap[j] == check) {
exclude[setid] = true;
break;
}
}
}
}
// checks that all sets are in exclude;
function shouldExclude(sets) {
for (var i = 0; i < sets.length; ++i) {
if (!(sets[i] in exclude)) {
return false;
}
}
return true;
}
// need to sort div's so that Z order is correct
div.selectAll("g").sort(function (a, b) {
// highest order set intersections first
if (a.sets.length != b.sets.length) {
return a.sets.length - b.sets.length;
}
if (a == relativeTo) {
return shouldExclude(b.sets) ? -1 : 1;
}
if (b == relativeTo) {
return shouldExclude(a.sets) ? 1 : -1;
}
// finally by size
return b.size - a.size;
});
}
function circlePath(x, y, r) {
var ret = [];
ret.push("\nM", x, y);
ret.push("\nm", -r, 0);
ret.push("\na", r, r, 0, 1, 0, r *2, 0);
ret.push("\na", r, r, 0, 1, 0,-r *2, 0);
return ret.join(" ");
}
// inverse of the circlePath function, returns a circle object from an svg path
function circleFromPath(path) {
var tokens = path.split(' ');
return {'x' : parseFloat(tokens[1]),
'y' : parseFloat(tokens[2]),
'radius' : -parseFloat(tokens[4])
};
}
/** returns a svg path of the intersection area of a bunch of circles */
function intersectionAreaPath(circles) {
var stats = {};
intersectionArea(circles, stats);
var arcs = stats.arcs;
if (arcs.length === 0) {
return "M 0 0";
} else if (arcs.length == 1) {
var circle = arcs[0].circle;
return circlePath(circle.x, circle.y, circle.radius);
} else {
// draw path around arcs
var ret = ["\nM", arcs[0].p2.x, arcs[0].p2.y];
for (var i = 0; i < arcs.length; ++i) {
var arc = arcs[i], r = arc.circle.radius, wide = arc.width > r;
ret.push("\nA", r, r, 0, wide ? 1 : 0, 1,
arc.p1.x, arc.p1.y);
}
return ret.join(" ");
}
}
exports.intersectionArea = intersectionArea;
exports.circleCircleIntersection = circleCircleIntersection;
exports.circleOverlap = circleOverlap;
exports.circleArea = circleArea;
exports.distance = distance;
exports.venn = venn;
exports.greedyLayout = greedyLayout;
exports.scaleSolution = scaleSolution;
exports.normalizeSolution = normalizeSolution;
exports.bestInitialLayout = bestInitialLayout;
exports.lossFunction = lossFunction;
exports.disjointCluster = disjointCluster;
exports.distanceFromIntersectArea = distanceFromIntersectArea;
exports.VennDiagram = VennDiagram;
exports.wrapText = wrapText;
exports.computeTextCentres = computeTextCentres;
exports.computeTextCentre = computeTextCentre;
exports.sortAreas = sortAreas;
exports.circlePath = circlePath;
exports.circleFromPath = circleFromPath;
exports.intersectionAreaPath = intersectionAreaPath;
Object.defineProperty(exports, '__esModule', { value: true });
})); | {
"content_hash": "81ca0b28123d67eeeeaf5ce62bb8ca79",
"timestamp": "",
"source": "github",
"line_count": 1814,
"max_line_length": 138,
"avg_line_length": 36.895259095920615,
"alnum_prop": 0.4770798469997609,
"repo_name": "rluo/rluo.github.io",
"id": "77e609205044be014db5e21a9f3d3f9a8891e20b",
"size": "66930",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "statcomp/bower_components/venn.js/venn.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "788091"
},
{
"name": "HTML",
"bytes": "4331888"
},
{
"name": "JavaScript",
"bytes": "1829605"
},
{
"name": "Jupyter Notebook",
"bytes": "320557"
},
{
"name": "Less",
"bytes": "695"
},
{
"name": "Makefile",
"bytes": "734"
},
{
"name": "SCSS",
"bytes": "196697"
}
],
"symlink_target": ""
} |
TIMEOUT = 3000
MOCHA = ./node_modules/.bin/_mocha
MOCHA_OPTIONS = -t $(TIMEOUT) --recursive -r ./test/util/common.js
ISTANBUL = ./node_modules/.bin/istanbul
COVERALLS = ./node_modules/coveralls/bin/coveralls.js
clean:
@rm -rf node_modules
install:
@npm install -d --registry=http://registry.npm.taobao.org/
debug-test:
@NODE_ENV=test DEBUG=toshihiko:* $(MOCHA) -t $(TIMEOUT) --recursive
test:
@NODE_ENV=test $(MOCHA) $(MOCHA_OPTIONS)
coverage:
@NODE_ENV=test $(ISTANBUL) cover $(MOCHA) -- $(MOCHA_OPTIONS)
before-test-travis: install
@mysql -e 'create database toshihiko;' & \
memcached -p 11211 -d
test-coveralls: install
NODE_ENV=test $(ISTANBUL) cover $(MOCHA) \
--report lcovonly \
-- \
$(MOCHA_OPTIONS) \
-R spec && cat ./coverage/lcov.info | \
\
$(COVERALLS) && rm -rf ./coverage
.PHONY: test coverage
| {
"content_hash": "4b8fa1fe76a8c0e927d2b49cf7b5f94d",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 68,
"avg_line_length": 23.942857142857143,
"alnum_prop": 0.6754176610978521,
"repo_name": "plusmancn/Toshihiko",
"id": "77a472ddd5902fc46398667db42ccbad891524cb",
"size": "838",
"binary": false,
"copies": "1",
"ref": "refs/heads/feature/v1",
"path": "Makefile",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "262970"
},
{
"name": "Makefile",
"bytes": "838"
}
],
"symlink_target": ""
} |
<?php
namespace Vest\SharedBundle\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* ShopKategorieRvRaum
*/
class ShopKategorieRvRaum
{
/**
* @var integer
*/
private $kategorieRaumId;
/**
* @var integer
*/
private $raumId;
/**
* @var integer
*/
private $kategorieId;
/**
* Get kategorieRaumId
*
* @return integer
*/
public function getKategorieRaumId()
{
return $this->kategorieRaumId;
}
/**
* Set raumId
*
* @param integer $raumId
* @return ShopKategorieRvRaum
*/
public function setRaumId($raumId)
{
$this->raumId = $raumId;
return $this;
}
/**
* Get raumId
*
* @return integer
*/
public function getRaumId()
{
return $this->raumId;
}
/**
* Set kategorieId
*
* @param integer $kategorieId
* @return ShopKategorieRvRaum
*/
public function setKategorieId($kategorieId)
{
$this->kategorieId = $kategorieId;
return $this;
}
/**
* Get kategorieId
*
* @return integer
*/
public function getKategorieId()
{
return $this->kategorieId;
}
}
| {
"content_hash": "9f616d8ad0d36b10586310a5109e0940",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 48,
"avg_line_length": 15.024096385542169,
"alnum_prop": 0.5252606255012029,
"repo_name": "mehnert-vecam/Symfony",
"id": "3398431f81abf2a85e4b289edb499427fcb02713",
"size": "1247",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Vest/SharedBundle/Entity/ShopKategorieRvRaum.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1047816"
},
{
"name": "JavaScript",
"bytes": "62726"
},
{
"name": "PHP",
"bytes": "594439"
},
{
"name": "Shell",
"bytes": "77"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>compcert: Not compatible</title>
<link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" />
<link href="../../../../../bootstrap.min.css" rel="stylesheet">
<link href="../../../../../bootstrap-custom.css" rel="stylesheet">
<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet">
<script src="../../../../../moment.min.js"></script>
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div class="container">
<div class="navbar navbar-default" role="navigation">
<div class="container-fluid">
<div class="navbar-header">
<a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a>
</div>
<div id="navbar" class="collapse navbar-collapse">
<ul class="nav navbar-nav">
<li><a href="../..">clean / released</a></li>
<li class="active"><a href="">8.8.1 / compcert - 3.6+8.11</a></li>
</ul>
</div>
</div>
</div>
<div class="article">
<div class="row">
<div class="col-md-12">
<a href="../..">« Up</a>
<h1>
compcert
<small>
3.6+8.11
<span class="label label-info">Not compatible</span>
</small>
</h1>
<p><em><script>document.write(moment("2020-09-01 14:15:30 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2020-09-01 14:15:30 UTC)</em><p>
<h2>Context</h2>
<pre># Packages matching: installed
# Name # Installed # Synopsis
base-bigarray base
base-num base Num library distributed with the OCaml compiler
base-threads base
base-unix base
camlp5 7.12 Preprocessor-pretty-printer of OCaml
conf-findutils 1 Virtual package relying on findutils
conf-m4 1 Virtual package relying on m4
coq 8.8.1 Formal proof management system.
num 0 The Num library for arbitrary-precision integer and rational arithmetic
ocaml 4.05.0 The OCaml compiler (virtual package)
ocaml-base-compiler 4.05.0 Official 4.05.0 release
ocaml-config 1 OCaml Switch Configuration
ocamlfind 1.8.1 A library manager for OCaml
# opam file:
opam-version: "2.0"
authors: "Xavier Leroy <[email protected]>"
maintainer: "Jacques-Henri Jourdan <[email protected]>"
homepage: "http://compcert.inria.fr/"
dev-repo: "git+https://github.com/AbsInt/CompCert.git"
bug-reports: "https://github.com/AbsInt/CompCert/issues"
license: "INRIA Non-Commercial License Agreement"
build: [
["./configure" "ia32-linux" {os = "linux"}
"ia32-macosx" {os = "macos"}
"ia32-cygwin" {os = "cygwin"}
"-bindir" "%{bin}%"
"-libdir" "%{lib}%/compcert"
"-install-coqdev"
"-clightgen"
"-coqdevdir" "%{lib}%/coq/user-contrib/compcert"
"-ignore-coq-version"]
[make "-j%{jobs}%" {ocaml:version >= "4.06"}]
]
patches: "compat-8-11.patch"
extra-files: ["compat-8-11.patch" "sha256=1d54e39e9cda9ce8a408158580c09d0d76ff2accbd7524d1986aee4a7b0563dd"]
install: [
[make "install"]
["install" "-m" "0644" "VERSION" "%{lib}%/coq/user-contrib/compcert/"]
]
depends: [
# This are the release versions of Coq which include this version of compcert
# See compcert_CI_REF in
# https://github.com/coq/coq/blob/V8.11.0/dev/ci/ci-basic-overlay.sh
# See make_addon_compcert in
# https://github.com/coq/coq/blob/V8.11.0/dev/build/windows/makecoq_mingw.sh
"coq" {= "8.11.0"}
"menhir" {>= "20190626" & < "20200123"}
"ocaml" {>= "4.05.0"}
]
synopsis: "The CompCert C compiler (patched for Coq 8.11 compatibility)"
tags: [
"category:CS/Semantics and Compilation/Compilation"
"category:CS/Semantics and Compilation/Semantics"
"keyword:C"
"keyword:compiler"
"logpath:compcert"
"date:2019-10-02"
]
url {
src: "https://github.com/AbsInt/CompCert/archive/v3.6.tar.gz"
checksum: "sha256=7a77839f6b990ab632ba14feccf4f17da189f0e3b95d6ce2ef0986e4caebc575"
}
</pre>
<h2>Lint</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Dry install</h2>
<p>Dry install with the current Coq version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam install -y --show-action coq-compcert.3.6+8.11 coq.8.8.1</code></dd>
<dt>Return code</dt>
<dd>5120</dd>
<dt>Output</dt>
<dd><pre>[NOTE] Package coq is already installed (current version is 8.8.1).
The following dependencies couldn't be met:
- coq-compcert -> coq = 8.11.0
Your request can't be satisfied:
- No available version of coq satisfies the constraints
No solution found, exiting
</pre></dd>
</dl>
<p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-compcert.3.6+8.11</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Install dependencies</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Install</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Installation size</h2>
<p>No files were installed.</p>
<h2>Uninstall</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Missing removes</dt>
<dd>
none
</dd>
<dt>Wrong removes</dt>
<dd>
none
</dd>
</dl>
</div>
</div>
</div>
<hr/>
<div class="footer">
<p class="text-center">
<small>Sources are on <a href="https://github.com/coq-bench">GitHub</a>. © Guillaume Claret.</small>
</p>
</div>
</div>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script src="../../../../../bootstrap.min.js"></script>
</body>
</html>
| {
"content_hash": "2fd63a1781a58bc858a83025f3b379ca",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 157,
"avg_line_length": 42.16315789473684,
"alnum_prop": 0.5649731619023842,
"repo_name": "coq-bench/coq-bench.github.io",
"id": "1be03ef8c65836cbdd063c5991e6a3de738ff61c",
"size": "8013",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "clean/Linux-x86_64-4.05.0-2.0.6/released/8.8.1/compcert/3.6+8.11.html",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.