repo_name
stringlengths 4
116
| path
stringlengths 3
942
| size
stringlengths 1
7
| content
stringlengths 3
1.05M
| license
stringclasses 15
values |
---|---|---|---|---|
skyisle/iosched2011
|
android/src/com/google/android/apps/iosched/ui/BaseSinglePaneActivity.java
|
2149
|
/*
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.apps.iosched.ui;
import com.google.android.apps.iosched.R;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.Fragment;
/**
* A {@link BaseActivity} that simply contains a single fragment. The intent used to invoke this
* activity is forwarded to the fragment as arguments during fragment instantiation. Derived
* activities should only need to implement
* {@link com.google.android.apps.iosched.ui.BaseSinglePaneActivity#onCreatePane()}.
*/
public abstract class BaseSinglePaneActivity extends BaseActivity {
private Fragment mFragment;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_singlepane_empty);
final String customTitle = getIntent().getStringExtra(Intent.EXTRA_TITLE);
//getActivityHelper().setActionBarTitle(customTitle != null ? customTitle : getTitle());
if (savedInstanceState == null) {
mFragment = onCreatePane();
mFragment.setArguments(intentToFragmentArguments(getIntent()));
getSupportFragmentManager().beginTransaction()
.add(R.id.root_container, mFragment)
.commit();
}
}
/**
* Called in <code>onCreate</code> when the fragment constituting this activity is needed.
* The returned fragment's arguments will be set to the intent used to invoke this activity.
*/
protected abstract Fragment onCreatePane();
}
|
apache-2.0
|
GoogleCloudPlatform/spring-cloud-gcp
|
spring-cloud-gcp-data-datastore/src/main/java/com/google/cloud/spring/data/datastore/core/mapping/event/ReadEvent.java
|
1298
|
/*
* Copyright 2017-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.spring.data.datastore.core.mapping.event;
import org.springframework.context.ApplicationEvent;
/** An event published when entities are read from Cloud Datastore. */
public class ReadEvent extends ApplicationEvent {
/**
* Constructor.
*
* @param results A list of results from the read operation where each item was mapped from a
* Cloud Datastore entity.
*/
public ReadEvent(Iterable results) {
super(results);
}
/**
* Get the list of results from the read operation.
*
* @return the list of results from the read operation.
*/
public Iterable getResults() {
return (Iterable) getSource();
}
}
|
apache-2.0
|
bisigc/art
|
doc/controllers/smell/class-use/routes.javascript.html
|
4269
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="de">
<head>
<!-- Generated by javadoc (1.8.0_60) on Thu Feb 18 18:53:20 CET 2016 -->
<title>Uses of Class controllers.smell.routes.javascript</title>
<meta name="date" content="2016-02-18">
<link rel="stylesheet" type="text/css" href="../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class controllers.smell.routes.javascript";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../controllers/smell/routes.javascript.html" title="class in controllers.smell">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?controllers/smell/class-use/routes.javascript.html" target="_top">Frames</a></li>
<li><a href="routes.javascript.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class controllers.smell.routes.javascript" class="title">Uses of Class<br>controllers.smell.routes.javascript</h2>
</div>
<div class="classUseContainer">No usage of controllers.smell.routes.javascript</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../controllers/smell/routes.javascript.html" title="class in controllers.smell">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?controllers/smell/class-use/routes.javascript.html" target="_top">Frames</a></li>
<li><a href="routes.javascript.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
|
apache-2.0
|
richardliaw/ray
|
python/ray/tune/registry.py
|
5525
|
import logging
from types import FunctionType
import ray
import ray.cloudpickle as pickle
from ray.experimental.internal_kv import _internal_kv_initialized, \
_internal_kv_get, _internal_kv_put
from ray.tune.error import TuneError
TRAINABLE_CLASS = "trainable_class"
ENV_CREATOR = "env_creator"
RLLIB_MODEL = "rllib_model"
RLLIB_PREPROCESSOR = "rllib_preprocessor"
RLLIB_ACTION_DIST = "rllib_action_dist"
TEST = "__test__"
KNOWN_CATEGORIES = [
TRAINABLE_CLASS, ENV_CREATOR, RLLIB_MODEL, RLLIB_PREPROCESSOR,
RLLIB_ACTION_DIST, TEST
]
logger = logging.getLogger(__name__)
def has_trainable(trainable_name):
return _global_registry.contains(TRAINABLE_CLASS, trainable_name)
def get_trainable_cls(trainable_name):
validate_trainable(trainable_name)
return _global_registry.get(TRAINABLE_CLASS, trainable_name)
def validate_trainable(trainable_name):
if not has_trainable(trainable_name):
# Make sure everything rllib-related is registered.
from ray.rllib import _register_all
_register_all()
if not has_trainable(trainable_name):
raise TuneError("Unknown trainable: " + trainable_name)
def register_trainable(name, trainable, warn=True):
"""Register a trainable function or class.
This enables a class or function to be accessed on every Ray process
in the cluster.
Args:
name (str): Name to register.
trainable (obj): Function or tune.Trainable class. Functions must
take (config, status_reporter) as arguments and will be
automatically converted into a class during registration.
"""
from ray.tune.trainable import Trainable
from ray.tune.function_runner import wrap_function
if isinstance(trainable, type):
logger.debug("Detected class for trainable.")
elif isinstance(trainable, FunctionType):
logger.debug("Detected function for trainable.")
trainable = wrap_function(trainable, warn=warn)
elif callable(trainable):
logger.info(
"Detected unknown callable for trainable. Converting to class.")
trainable = wrap_function(trainable, warn=warn)
if not issubclass(trainable, Trainable):
raise TypeError("Second argument must be convertable to Trainable",
trainable)
_global_registry.register(TRAINABLE_CLASS, name, trainable)
def register_env(name, env_creator):
"""Register a custom environment for use with RLlib.
This enables the environment to be accessed on every Ray process
in the cluster.
Args:
name (str): Name to register.
env_creator (obj): Function that creates an env.
"""
if not isinstance(env_creator, FunctionType):
raise TypeError("Second argument must be a function.", env_creator)
_global_registry.register(ENV_CREATOR, name, env_creator)
def check_serializability(key, value):
_global_registry.register(TEST, key, value)
def _make_key(category, key):
"""Generate a binary key for the given category and key.
Args:
category (str): The category of the item
key (str): The unique identifier for the item
Returns:
The key to use for storing a the value.
"""
return (b"TuneRegistry:" + category.encode("ascii") + b"/" +
key.encode("ascii"))
class _Registry:
def __init__(self):
self._to_flush = {}
def register(self, category, key, value):
"""Registers the value with the global registry.
Raises:
PicklingError if unable to pickle to provided file.
"""
if category not in KNOWN_CATEGORIES:
from ray.tune import TuneError
raise TuneError("Unknown category {} not among {}".format(
category, KNOWN_CATEGORIES))
self._to_flush[(category, key)] = pickle.dumps_debug(value)
if _internal_kv_initialized():
self.flush_values()
def contains(self, category, key):
if _internal_kv_initialized():
value = _internal_kv_get(_make_key(category, key))
return value is not None
else:
return (category, key) in self._to_flush
def get(self, category, key):
if _internal_kv_initialized():
value = _internal_kv_get(_make_key(category, key))
if value is None:
raise ValueError(
"Registry value for {}/{} doesn't exist.".format(
category, key))
return pickle.loads(value)
else:
return pickle.loads(self._to_flush[(category, key)])
def flush_values(self):
for (category, key), value in self._to_flush.items():
_internal_kv_put(_make_key(category, key), value, overwrite=True)
self._to_flush.clear()
_global_registry = _Registry()
ray.worker._post_init_hooks.append(_global_registry.flush_values)
class _ParameterRegistry:
def __init__(self):
self.to_flush = {}
self.references = {}
def put(self, k, v):
self.to_flush[k] = v
if ray.is_initialized():
self.flush()
def get(self, k):
if not ray.is_initialized():
return self.to_flush[k]
return ray.get(self.references[k])
def flush(self):
for k, v in self.to_flush.items():
self.references[k] = ray.put(v)
self.to_flush.clear()
parameter_registry = _ParameterRegistry()
ray.worker._post_init_hooks.append(parameter_registry.flush)
|
apache-2.0
|
uber/vertica-python
|
vertica_python/vertica/messages/frontend_messages/copy_data.py
|
2393
|
# Copyright (c) 2018-2021 Micro Focus or one of its affiliates.
# Copyright (c) 2018 Uber Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright (c) 2013-2017 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import print_function, division, absolute_import
from six import text_type, binary_type
from ..message import BulkFrontendMessage
class CopyData(BulkFrontendMessage):
message_id = b'd'
def __init__(self, data, unicode_error='strict'):
BulkFrontendMessage.__init__(self)
if isinstance(data, text_type):
self.bytes_ = data.encode(encoding='utf-8', errors=unicode_error)
elif isinstance(data, binary_type):
self.bytes_ = data
else:
raise TypeError("Data should be string or bytes")
def read_bytes(self):
return self.bytes_
|
apache-2.0
|
sdw2330976/SpringBC
|
springbootjpa/src/test/java/com/sdw/soft/demo/rxjava/TestRxJava.java
|
726
|
package com.sdw.soft.demo.rxjava;
import com.google.common.base.Joiner;
import org.junit.Test;
import rx.Observable;
import rx.functions.Action1;
import rx.functions.Func1;
/**
* Created by shangyd on 2017/5/13.
*/
public class TestRxJava {
@Test
public void test01() {
Observable observable = Observable.just("hello").map(new Func1() {
@Override
public Object call(Object o) {
return Joiner.on(" ").useForNull("").join(new Object[]{o,"world"});
}
});
observable.subscribe(new Action1() {
@Override
public void call(Object o) {
System.out.println(o.toString());
}
});
}
}
|
apache-2.0
|
lliss/model-my-watershed
|
src/mmw/js/src/draw/tests.js
|
7248
|
"use strict";
require('../core/setup');
var $ = require('jquery'),
L = require('leaflet'),
assert = require('chai').assert,
sinon = require('sinon'),
Marionette = require('../../shim/backbone.marionette'),
App = require('../app'),
models = require('./models'),
utils = require('./utils'),
views = require('./views'),
settings = require('../core/settings'),
testUtils = require('../core/testUtils');
var sandboxId = 'sandbox',
sandboxSelector = '#' + sandboxId,
TEST_SHAPE = {
'type': 'MultiPolygon',
'coordinates': [[[-5e6, -1e6], [-4e6, 1e6], [-3e6, -1e6]]]
};
var SandboxRegion = Marionette.Region.extend({
el: sandboxSelector
});
describe('Draw', function() {
before(function() {
// Ensure that draw tools are enabled before testing
settings.set('draw_tools', [
'SelectArea', // Boundary Selector
'Draw', // Custom Area or 1 Sq Km stamp
'PlaceMarker', // Delineate Watershed
'ResetDraw',
]);
});
beforeEach(function() {
$('body').append('<div id="sandbox">');
});
afterEach(function() {
$(sandboxSelector).remove();
window.location.hash = '';
testUtils.resetApp(App);
});
describe('ToolbarView', function() {
// Setup the toolbar controls, enable/disable them, and verify
// the correct CSS classes are applied.
it('enables/disables toolbar controls when the model enableTools/disableTools methods are called', function() {
var sandbox = new SandboxRegion(),
$el = sandbox.$el,
model = new models.ToolbarModel(),
view = new views.ToolbarView({
model: model
});
sandbox.show(view);
populateSelectAreaDropdown($el, model);
// Nothing should be disabled at this point.
// Test that toggling the `toolsEnabled` property on the model
// will disable all drawing tools.
assert.equal($el.find('.disabled').size(), 0);
model.disableTools();
assert.equal($el.find('.disabled').size(), 3);
model.enableTools();
assert.equal($el.find('.disabled').size(), 0);
});
it('adds an AOI to the map after calling getShapeAndAnalyze', function(done) {
var successCount = 2,
deferred = setupGetShapeAndAnalyze(successCount),
success;
deferred.
done(function() {
assert.equal(App.map.get('areaOfInterest'), TEST_SHAPE);
success = true;
}).
fail(function() {
success = false;
}).
always(function() {
assert.equal(success, true);
done();
});
});
it('fails to add AOI when shape id cannot be retrieved by getShapeAndAnalyze', function(done) {
// Set successCount high enough so that the polling will fail.
var successCount = 6,
deferred = setupGetShapeAndAnalyze(successCount),
success;
deferred.
done(function() {
success = true;
}).
fail(function() {
success = false;
}).
always(function() {
assert.equal(success, false);
done();
});
});
it('resets the current area of interest on Reset', function() {
var setup = setupResetTestObject();
App.map.set('areaOfInterest', TEST_SHAPE);
setup.resetRegion.currentView.resetDrawingState();
assert.isNull(App.map.get('areaOfInterest',
'Area of Interest was not removed on reset from the map'));
});
it('resets the boundary layer on Reset', function() {
var setup = setupResetTestObject(),
ofg = L.featureGroup(),
testFeature = {
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [-104.99404, 39.75621]
}
};
ofg.addLayer(L.geoJson(testFeature));
assert.equal(ofg.getLayers().length, 1);
setup.model.set('outlineFeatureGroup', ofg);
setup.resetRegion.currentView.resetDrawingState();
assert.equal(ofg.getLayers().length, 0,
'Boundary Layer should have been removed from layer group');
});
it('removes in progress drawing on Reset', function() {
var setup = setupResetTestObject(),
spy = sinon.spy(utils, 'cancelDrawing');
utils.drawPolygon(setup.map);
setup.resetRegion.currentView.resetDrawingState();
assert.equal(spy.callCount, 1);
});
});
});
function setupGetShapeAndAnalyze(successCount) {
var sandbox = new SandboxRegion(),
model = new models.ToolbarModel(),
view = new views.ToolbarView({
model: model
}),
shapeId = 1,
e = {latlng: L.latLng(50.5, 30.5)},
ofg = model.get('outlineFeatureGroup'),
grid = {
callCount: 0,
_objectForEvent: function() { //mock grid returns shapeId on second call
this.callCount++;
if (this.callCount >= successCount) {
return {data: {id: shapeId}};
} else {
return {};
}
}
},
tableId = 2;
sandbox.show(view);
App.restApi = {
getPolygon: function() {
return $.Deferred().resolve(TEST_SHAPE).promise();
}
};
return views.getShapeAndAnalyze(e, model, ofg, grid, tableId);
}
function setupResetTestObject() {
var sandbox = new SandboxRegion(),
model = new models.ToolbarModel(),
view = new views.ToolbarView({
model: model
}),
resetRegion = view.getRegion('resetRegion'),
map = App.getLeafletMap();
sandbox.show(view);
return {
sandbox: sandbox,
model: model,
view: view,
resetRegion: resetRegion,
map: map
};
}
function assertTextEqual($el, sel, text) {
assert.equal($el.find(sel).text().trim(), text);
}
function populateSelectAreaDropdown($el, toolbarModel) {
// This control should start off in a Loading state.
assertTextEqual($el, '#select-area-region button', 'Loading...');
// Load some shapes...
toolbarModel.set('predefinedShapeTypes', [
{
"endpoint": "http://localhost:4000/0/{z}/{x}/{y}",
"display": "Congressional Districts",
"name": "tiles"
}]);
// This dropdown should now be populated.
assertTextEqual($el, '#select-area-region button', 'Select by Boundary');
assertTextEqual($el, '#select-area-region li', 'Congressional Districts');
}
|
apache-2.0
|
gravitee-io/graviteeio-access-management
|
gravitee-am-service/src/test/java/io/gravitee/am/service/validators/PathValidatorTest.java
|
2272
|
/**
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gravitee.am.service.validators;
import io.gravitee.am.service.exception.InvalidPathException;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* @author Jeoffrey HAEYAERT (jeoffrey.haeyaert at graviteesource.com)
* @author GraviteeSource Team
*/
public class PathValidatorTest {
@Test
public void validate() {
Throwable throwable = PathValidator.validate("/test").blockingGet();
assertNull(throwable);
}
@Test
public void validateSpecialCharacters() {
Throwable throwable = PathValidator.validate("/test/subpath/subpath2_with-and.dot/AND_UPPERCASE").blockingGet();
assertNull(throwable);
}
@Test
public void validate_invalidEmptyPath() {
Throwable throwable = PathValidator.validate("").blockingGet();
assertNotNull(throwable);
assertTrue(throwable instanceof InvalidPathException);
}
@Test
public void validate_nullPath() {
Throwable throwable = PathValidator.validate(null).blockingGet();
assertNotNull(throwable);
assertTrue(throwable instanceof InvalidPathException);
}
@Test
public void validate_multipleSlashesPath() {
Throwable throwable = PathValidator.validate("/////test////").blockingGet();
assertNotNull(throwable);
assertTrue(throwable instanceof InvalidPathException);
}
@Test
public void validate_invalidCharacters() {
Throwable throwable = PathValidator.validate("/test$:\\;,+").blockingGet();
assertNotNull(throwable);
assertTrue(throwable instanceof InvalidPathException);
}
}
|
apache-2.0
|
JohnSnowLabs/spark-nlp
|
docs/_posts/dcecchini/2021-03-10-pos_vtb_vi.md
|
4467
|
---
layout: model
title: Part of Speech for Vietnamese
author: John Snow Labs
name: pos_vtb
date: 2021-03-10
tags: [open_source, pos, vi]
supported: true
task: Part of Speech Tagging
language: vi
edition: Spark NLP 2.7.5
spark_version: 2.4
article_header:
type: cover
use_language_switcher: "Python-Scala-Java"
---
## Description
A [Part of Speech](https://en.wikipedia.org/wiki/Part_of_speech) classifier predicts a grammatical label for every token in the input text. Implemented with an `averaged perceptron` architecture.
## Predicted Entities
- ADJ
- ADP
- ADV
- AUX
- CCONJ
- DET
- NOUN
- NUM
- PART
- PRON
- PROPN
- PUNCT
- VERB
- X
{:.btn-box}
[Live Demo](https://demo.johnsnowlabs.com/public/GRAMMAR_EN/){:.button.button-orange}
[Open in Colab](https://colab.research.google.com/github/JohnSnowLabs/spark-nlp-workshop/blob/master/tutorials/streamlit_notebooks/GRAMMAR_EN.ipynb){:.button.button-orange.button-orange-trans.co.button-icon}
[Download](https://s3.amazonaws.com/auxdata.johnsnowlabs.com/public/models/pos_vtb_vi_2.7.5_2.4_1615401332222.zip){:.button.button-orange.button-orange-trans.arr.button-icon}
## How to use
<div class="tabs-box" markdown="1">
{% include programmingLanguageSelectScalaPythonNLU.html %}
```python
document_assembler = DocumentAssembler() \
.setInputCol("text") \
.setOutputCol("document")
sentence_detector = SentenceDetector() \
.setInputCols(["document"]) \
.setOutputCol("sentence")
pos = PerceptronModel.pretrained("pos_vtb", "vi") \
.setInputCols(["document", "token"]) \
.setOutputCol("pos")
pipeline = Pipeline(stages=[
document_assembler,
sentence_detector,
posTagger
])
example = spark.createDataFrame([['Thắng sẽ tìm nghề mới cho Lan .']], ["text"])
result = pipeline.fit(example).transform(example)
```
```scala
val document_assembler = DocumentAssembler()
.setInputCol("text")
.setOutputCol("document")
val sentence_detector = SentenceDetector()
.setInputCols(["document"])
.setOutputCol("sentence")
val pos = PerceptronModel.pretrained("pos_vtb", "vi")
.setInputCols(Array("document", "token"))
.setOutputCol("pos")
val pipeline = new Pipeline().setStages(Array(document_assembler, sentence_detector, pos))
val data = Seq("Thắng sẽ tìm nghề mới cho Lan .").toDF("text")
val result = pipeline.fit(data).transform(data)
```
{:.nlu-block}
```python
import nlu
text = [""Thắng sẽ tìm nghề mới cho Lan .""]
token_df = nlu.load('vi.pos.vtb').predict(text)
token_df
```
</div>
## Results
```bash
+-------------------------------+--------------------------------------------+
|text |result |
+-------------------------------+--------------------------------------------+
|Thắng sẽ tìm nghề mới cho Lan .|[NOUN, X, VERB, NOUN, ADJ, ADP, NOUN, PUNCT]|
+-------------------------------+--------------------------------------------+
```
{:.model-param}
## Model Information
{:.table-model}
|---|---|
|Model Name:|pos_vtb|
|Compatibility:|Spark NLP 2.7.5+|
|License:|Open Source|
|Edition:|Official|
|Input Labels:|[sentence, token]|
|Output Labels:|[pos]|
|Language:|vi|
## Data Source
The model was trained on the [Universal Dependencies](https://www.universaldependencies.org) data set.
## Benchmarking
```bash
| | precision | recall | f1-score | support |
|--------------|-----------|--------|----------|---------|
| ADJ | 0.58 | 0.49 | 0.53 | 738 |
| ADP | 0.84 | 0.87 | 0.86 | 688 |
| AUX | 0.79 | 0.95 | 0.87 | 132 |
| CCONJ | 0.85 | 0.80 | 0.83 | 335 |
| DET | 0.95 | 0.85 | 0.90 | 232 |
| INTJ | 1.00 | 0.14 | 0.25 | 7 |
| NOUN | 0.84 | 0.86 | 0.85 | 3838 |
| NUM | 0.94 | 0.91 | 0.92 | 412 |
| PART | 0.53 | 0.30 | 0.38 | 87 |
| PROPN | 0.85 | 0.85 | 0.85 | 494 |
| PUNCT | 0.97 | 0.99 | 0.98 | 1722 |
| SCONJ | 0.99 | 0.98 | 0.98 | 122 |
| VERB | 0.73 | 0.76 | 0.74 | 2178 |
| X | 0.81 | 0.76 | 0.79 | 970 |
| accuracy | | | 0.83 | 11955 |
| macro avg | 0.83 | 0.75 | 0.77 | 11955 |
| weighted avg | 0.83 | 0.83 | 0.83 | 11955 |
```
|
apache-2.0
|
sxyunfeng/fcms
|
apps/admin/controllers/PayconfigController.php
|
7003
|
<?php
/**
* 支付配置
* @author hfc
* @date 2015-8-31
*/
namespace apps\admin\controllers;
use apps\admin\models\Payment;
use apps\admin\models\PaymentPlugin;
use enums\SystemEnums;
use Phalcon\Db\Profiler;
use Phalcon\Paginator\Adapter\Model as PaginatorModel;
use Phalcon\Validation;
use Phalcon\Validation\Validator\PresenceOf;
class PayconfigController extends AdminBaseController
{
public function initialize()
{
parent::initialize();
}
/**
* @author( author='hfc' )
* @date( date = '2015-8-31' )
* @comment( comment = '使用支付' )
* @method( method = 'indexAction' )
* @op( op = 'r' )
*/
public function indexAction()
{
$pageNum = $this->request->getQuery( 'page', 'int' );
$currentPage = $pageNum ? $pageNum : 1;
$payModel = new Payment();
$pay = $payModel->getPayment( $this->shopId );
$pagination = new PaginatorModel( array( 'data' => $pay,
'limit' => 10,
'page' => $currentPage
));
$page = $pagination->getPaginate();
$this->view->page = $page;
}
/**
* @author( author='hfc' )
* @date( date = '2015-8-31' )
* @comment( comment = '全部支付' )
* @method( method = 'indexAction' )
* @op( op = 'r' )
*/
public function allAction()
{
$pageNum = $this->request->getQuery( 'page', 'int' );
$currentPage = $pageNum ? $pageNum : 1;
$pay = PaymentPlugin::find( 'delsign=' . SystemEnums::DELSIGN_NO );
$pagination = new PaginatorModel( array( 'data' => $pay,
'limit' => 10,
'page' => $currentPage
));
$page = $pagination->getPaginate();
$this->view->page = $page;
}
/**
* @author( author='hfc' )
* @date( date = '2015-8-31' )
* @comment( comment = '删除已使用的支付' )
* @method( method = 'deleteAction' )
* @op( op = 'd' )
*/
public function deleteAction()
{
$id = $this->request->getPost( 'id', 'int' );
$profile = new Profiler();
$payment = Payment::findFirst( array( 'id=?0', 'bind' => array( $id )));
if( $payment )
{
$status = $payment->update( array( 'delsign' => SystemEnums::DELSIGN_YES ) );
if( $status )
{
$this->success( '删除成功' );
}
else
{
$this->error( '删除失败' );
}
}
}
/**
* @author( author='hfc' )
* @date( date = '2015-9-1' )
* @comment( comment = '使用支付的添加显示' )
* @method( method = 'editAction' )
* @op( op = '' )
*/
public function addAction()
{
$id = $this->request->getQuery( 'id', 'int' );
if( $id )
{
$plugin = PaymentPlugin::findFirst( array( 'id=?0', 'bind' => array( $id ), 'columns' => 'id,name'));
if( $plugin )
{
$this->view->plugin = $plugin->toArray();
}
}
}
/**
* @author( author='hfc' )
* @date( date = '2015-9-1' )
* @comment( comment = '使用支付的编辑' )
* @method( method = 'editAction' )
* @op( op = '' )
*/
public function editAction()
{
$id = $this->request->getQuery( 'id', 'int' );
$pay = Payment::findFirst( array( 'id=?0', 'bind' => array( $id )));
if( $pay )
{
$this->view->pay = $pay->toArray();
}
}
/**
* @author( author='hfc' )
* @date( date = '2015-9-1' )
* @comment( comment = '使用支付的编辑' )
* @method( method = 'editAction' )
* @op( op = '' )
*/
public function readAction()
{
$this->editAction();
$this->view->isRead = true;
$this->view->pick( 'payconfig/edit' );
}
/**
* @author( author='hfc' )
* @date( date = '2015-9-1' )
* @comment( comment = '使用支付的更新' )
* @method( method = 'updateAction' )
* @op( op = 'u' )
*/
public function updateAction()
{
$this->csrfCheck();
$id = $this->request->getPost( 'id', 'int' );
$data[ 'pay_name' ] = $this->request->getPost( 'pay_name', 'string' );
$data[ 'partner_id' ] = $this->request->getPost( 'partner_id', 'string' );
$data[ 'partner_key' ] = $this->request->getPost( 'partner_key', 'string' );
$data[ 'status' ] = $this->request->getPost( 'status', 'int' );
$data[ 'sort' ] = $this->request->getPost( 'sort', 'int' );
$this->validation( $data );
$pay = Payment::findFirst( array( 'id=?0', 'bind' => array( $id )));
if( $pay )
{
$status = $pay->update( $data );
if( $status )
{
$this->success( '更新成功' );
}
}
$this->error( '更新失败' );
}
/**
* @author( author='hfc' )
* @date( date = '2015-9-1' )
* @comment( comment = '使用支付的添加' )
* @method( method = 'insertAction' )
* @op( op = 'c' )
*/
public function insertAction()
{
$this->csrfCheck();
$data[ 'pay_name' ] = $this->request->getPost( 'pay_name', 'string' );
$data[ 'plugin_id' ] = $this->request->getPost( 'plugin_id', 'string' );
$data[ 'partner_id' ] = $this->request->getPost( 'partner_id', 'string' );
$data[ 'partner_key' ] = $this->request->getPost( 'partner_key', 'string' );
$data[ 'status' ] = $this->request->getPost( 'status', 'int' );
$data[ 'sort' ] = $this->request->getPost( 'sort', 'int' );
$this->validation( $data );
$data[ 'delsign' ] = $data[ 'status' ] = 0;
$data[ 'shop_id' ] = $this->shopId;
$pay = new Payment();
if( $pay )
{
$status = $pay->save( $data );
if( $status )
{
$this->success( '添加成功' );
}
}
$this->error( '添加失败' );
}
/**
* @author( author='hfc' )
* @date( date = '2015-9-1' )
* @comment( comment = '验证数据' )
* @method( method = 'validation' )
* @op( op = 'u' )
*/
private function validation( $data )
{
$validation = new Validation();
$validation->add( 'partner_id', new PresenceOf( array(
'message' => '合作者身份必学填写'
) ) );
$validation->add( 'partner_key', new PresenceOf( array(
'message' => '安全校验码必学填写'
) ) );
$msgs = $validation->validate( $data );
if( count( $msgs ))
{
foreach( $msgs as $m )
{
$this->error( $m->getMessage() );
}
}
}
}
|
apache-2.0
|
googleads/google-ads-php
|
src/Google/Ads/GoogleAds/V8/Services/Gapic/AdGroupServiceGapicClient.php
|
16250
|
<?php
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* GENERATED CODE WARNING
* Generated by gapic-generator-php from the file
* https://github.com/google/googleapis/blob/master/google/ads/googleads/v8/services/ad_group_service.proto
* Updates to the above are reflected here through a refresh process.
*/
namespace Google\Ads\GoogleAds\V8\Services\Gapic;
use Google\Ads\GoogleAds\V8\Resources\AdGroup;
use Google\Ads\GoogleAds\V8\Services\AdGroupOperation;
use Google\Ads\GoogleAds\V8\Services\GetAdGroupRequest;
use Google\Ads\GoogleAds\V8\Services\MutateAdGroupsRequest;
use Google\Ads\GoogleAds\V8\Services\MutateAdGroupsResponse;
use Google\ApiCore\ApiException;
use Google\ApiCore\CredentialsWrapper;
use Google\ApiCore\GapicClientTrait;
use Google\ApiCore\PathTemplate;
use Google\ApiCore\RequestParamsHeaderDescriptor;
use Google\ApiCore\RetrySettings;
use Google\ApiCore\Transport\TransportInterface;
use Google\ApiCore\ValidationException;
use Google\Auth\FetchAuthTokenInterface;
/**
* Service Description: Service to manage ad groups.
*
* This class provides the ability to make remote calls to the backing service through method
* calls that map to API methods. Sample code to get started:
*
* ```
* $adGroupServiceClient = new AdGroupServiceClient();
* try {
* $formattedResourceName = $adGroupServiceClient->adGroupName('[CUSTOMER_ID]', '[AD_GROUP_ID]');
* $response = $adGroupServiceClient->getAdGroup($formattedResourceName);
* } finally {
* $adGroupServiceClient->close();
* }
* ```
*
* Many parameters require resource names to be formatted in a particular way. To
* assist with these names, this class includes a format method for each type of
* name, and additionally a parseName method to extract the individual identifiers
* contained within formatted names that are returned by the API.
*/
class AdGroupServiceGapicClient
{
use GapicClientTrait;
/**
* The name of the service.
*/
const SERVICE_NAME = 'google.ads.googleads.v8.services.AdGroupService';
/**
* The default address of the service.
*/
const SERVICE_ADDRESS = 'googleads.googleapis.com';
/**
* The default port of the service.
*/
const DEFAULT_SERVICE_PORT = 443;
/**
* The name of the code generator, to be included in the agent header.
*/
const CODEGEN_NAME = 'gapic';
/**
* The default scopes required by the service.
*/
public static $serviceScopes = [
'https://www.googleapis.com/auth/adwords',
];
private static $adGroupNameTemplate;
private static $pathTemplateMap;
private static function getClientDefaults()
{
return [
'serviceName' => self::SERVICE_NAME,
'serviceAddress' => self::SERVICE_ADDRESS . ':' . self::DEFAULT_SERVICE_PORT,
'clientConfig' => __DIR__ . '/../resources/ad_group_service_client_config.json',
'descriptorsConfigPath' => __DIR__ . '/../resources/ad_group_service_descriptor_config.php',
'gcpApiConfigPath' => __DIR__ . '/../resources/ad_group_service_grpc_config.json',
'credentialsConfig' => [
'defaultScopes' => self::$serviceScopes,
],
'transportConfig' => [
'rest' => [
'restClientConfigPath' => __DIR__ . '/../resources/ad_group_service_rest_client_config.php',
],
],
];
}
private static function getAdGroupNameTemplate()
{
if (self::$adGroupNameTemplate == null) {
self::$adGroupNameTemplate = new PathTemplate('customers/{customer_id}/adGroups/{ad_group_id}');
}
return self::$adGroupNameTemplate;
}
private static function getPathTemplateMap()
{
if (self::$pathTemplateMap == null) {
self::$pathTemplateMap = [
'adGroup' => self::getAdGroupNameTemplate(),
];
}
return self::$pathTemplateMap;
}
/**
* Formats a string containing the fully-qualified path to represent a ad_group
* resource.
*
* @param string $customerId
* @param string $adGroupId
*
* @return string The formatted ad_group resource.
*/
public static function adGroupName($customerId, $adGroupId)
{
return self::getAdGroupNameTemplate()->render([
'customer_id' => $customerId,
'ad_group_id' => $adGroupId,
]);
}
/**
* Parses a formatted name string and returns an associative array of the components in the name.
* The following name formats are supported:
* Template: Pattern
* - adGroup: customers/{customer_id}/adGroups/{ad_group_id}
*
* The optional $template argument can be supplied to specify a particular pattern,
* and must match one of the templates listed above. If no $template argument is
* provided, or if the $template argument does not match one of the templates
* listed, then parseName will check each of the supported templates, and return
* the first match.
*
* @param string $formattedName The formatted name string
* @param string $template Optional name of template to match
*
* @return array An associative array from name component IDs to component values.
*
* @throws ValidationException If $formattedName could not be matched.
*/
public static function parseName($formattedName, $template = null)
{
$templateMap = self::getPathTemplateMap();
if ($template) {
if (!isset($templateMap[$template])) {
throw new ValidationException("Template name $template does not exist");
}
return $templateMap[$template]->match($formattedName);
}
foreach ($templateMap as $templateName => $pathTemplate) {
try {
return $pathTemplate->match($formattedName);
} catch (ValidationException $ex) {
// Swallow the exception to continue trying other path templates
}
}
throw new ValidationException("Input did not match any known format. Input: $formattedName");
}
/**
* Constructor.
*
* @param array $options {
* Optional. Options for configuring the service API wrapper.
*
* @type string $serviceAddress
* The address of the API remote host. May optionally include the port, formatted
* as "<uri>:<port>". Default 'googleads.googleapis.com:443'.
* @type string|array|FetchAuthTokenInterface|CredentialsWrapper $credentials
* The credentials to be used by the client to authorize API calls. This option
* accepts either a path to a credentials file, or a decoded credentials file as a
* PHP array.
* *Advanced usage*: In addition, this option can also accept a pre-constructed
* {@see \Google\Auth\FetchAuthTokenInterface} object or
* {@see \Google\ApiCore\CredentialsWrapper} object. Note that when one of these
* objects are provided, any settings in $credentialsConfig will be ignored.
* @type array $credentialsConfig
* Options used to configure credentials, including auth token caching, for the
* client. For a full list of supporting configuration options, see
* {@see \Google\ApiCore\CredentialsWrapper::build()} .
* @type bool $disableRetries
* Determines whether or not retries defined by the client configuration should be
* disabled. Defaults to `false`.
* @type string|array $clientConfig
* Client method configuration, including retry settings. This option can be either
* a path to a JSON file, or a PHP array containing the decoded JSON data. By
* default this settings points to the default client config file, which is
* provided in the resources folder.
* @type string|TransportInterface $transport
* The transport used for executing network requests. May be either the string
* `rest` or `grpc`. Defaults to `grpc` if gRPC support is detected on the system.
* *Advanced usage*: Additionally, it is possible to pass in an already
* instantiated {@see \Google\ApiCore\Transport\TransportInterface} object. Note
* that when this object is provided, any settings in $transportConfig, and any
* $serviceAddress setting, will be ignored.
* @type array $transportConfig
* Configuration options that will be used to construct the transport. Options for
* each supported transport type should be passed in a key for that transport. For
* example:
* $transportConfig = [
* 'grpc' => [...],
* 'rest' => [...],
* ];
* See the {@see \Google\ApiCore\Transport\GrpcTransport::build()} and
* {@see \Google\ApiCore\Transport\RestTransport::build()} methods for the
* supported options.
* @type callable $clientCertSource
* A callable which returns the client cert as a string. This can be used to
* provide a certificate and private key to the transport layer for mTLS.
* }
*
* @throws ValidationException
*/
public function __construct(array $options = [])
{
$clientOptions = $this->buildClientOptions($options);
$this->setClientOptions($clientOptions);
}
/**
* Returns the requested ad group in full detail.
*
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
*
* Sample code:
* ```
* $adGroupServiceClient = new AdGroupServiceClient();
* try {
* $formattedResourceName = $adGroupServiceClient->adGroupName('[CUSTOMER_ID]', '[AD_GROUP_ID]');
* $response = $adGroupServiceClient->getAdGroup($formattedResourceName);
* } finally {
* $adGroupServiceClient->close();
* }
* ```
*
* @param string $resourceName Required. The resource name of the ad group to fetch.
* @param array $optionalArgs {
* Optional.
*
* @type RetrySettings|array $retrySettings
* Retry settings to use for this call. Can be a
* {@see Google\ApiCore\RetrySettings} object, or an associative array of retry
* settings parameters. See the documentation on
* {@see Google\ApiCore\RetrySettings} for example usage.
* }
*
* @return \Google\Ads\GoogleAds\V8\Resources\AdGroup
*
* @throws ApiException if the remote call fails
*/
public function getAdGroup($resourceName, array $optionalArgs = [])
{
$request = new GetAdGroupRequest();
$requestParamHeaders = [];
$request->setResourceName($resourceName);
$requestParamHeaders['resource_name'] = $resourceName;
$requestParams = new RequestParamsHeaderDescriptor($requestParamHeaders);
$optionalArgs['headers'] = isset($optionalArgs['headers']) ? array_merge($requestParams->getHeader(), $optionalArgs['headers']) : $requestParams->getHeader();
return $this->startCall('GetAdGroup', AdGroup::class, $optionalArgs, $request)->wait();
}
/**
* Creates, updates, or removes ad groups. Operation statuses are returned.
*
* List of thrown errors:
* [AdGroupError]()
* [AdxError]()
* [AuthenticationError]()
* [AuthorizationError]()
* [BiddingError]()
* [BiddingStrategyError]()
* [DatabaseError]()
* [DateError]()
* [DistinctError]()
* [FieldError]()
* [FieldMaskError]()
* [HeaderError]()
* [IdError]()
* [InternalError]()
* [ListOperationError]()
* [MultiplierError]()
* [MutateError]()
* [NewResourceCreationError]()
* [NotEmptyError]()
* [NullError]()
* [OperationAccessDeniedError]()
* [OperatorError]()
* [QuotaError]()
* [RangeError]()
* [RequestError]()
* [ResourceCountLimitExceededError]()
* [SettingError]()
* [SizeLimitError]()
* [StringFormatError]()
* [StringLengthError]()
* [UrlFieldError]()
*
* Sample code:
* ```
* $adGroupServiceClient = new AdGroupServiceClient();
* try {
* $customerId = 'customer_id';
* $operations = [];
* $response = $adGroupServiceClient->mutateAdGroups($customerId, $operations);
* } finally {
* $adGroupServiceClient->close();
* }
* ```
*
* @param string $customerId Required. The ID of the customer whose ad groups are being modified.
* @param AdGroupOperation[] $operations Required. The list of operations to perform on individual ad groups.
* @param array $optionalArgs {
* Optional.
*
* @type bool $partialFailure
* If true, successful operations will be carried out and invalid
* operations will return errors. If false, all operations will be carried
* out in one transaction if and only if they are all valid.
* Default is false.
* @type bool $validateOnly
* If true, the request is validated but not executed. Only errors are
* returned, not results.
* @type int $responseContentType
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned post mutation.
* For allowed values, use constants defined on {@see \Google\Ads\GoogleAds\V8\Enums\ResponseContentTypeEnum\ResponseContentType}
* @type RetrySettings|array $retrySettings
* Retry settings to use for this call. Can be a
* {@see Google\ApiCore\RetrySettings} object, or an associative array of retry
* settings parameters. See the documentation on
* {@see Google\ApiCore\RetrySettings} for example usage.
* }
*
* @return \Google\Ads\GoogleAds\V8\Services\MutateAdGroupsResponse
*
* @throws ApiException if the remote call fails
*/
public function mutateAdGroups($customerId, $operations, array $optionalArgs = [])
{
$request = new MutateAdGroupsRequest();
$requestParamHeaders = [];
$request->setCustomerId($customerId);
$request->setOperations($operations);
$requestParamHeaders['customer_id'] = $customerId;
if (isset($optionalArgs['partialFailure'])) {
$request->setPartialFailure($optionalArgs['partialFailure']);
}
if (isset($optionalArgs['validateOnly'])) {
$request->setValidateOnly($optionalArgs['validateOnly']);
}
if (isset($optionalArgs['responseContentType'])) {
$request->setResponseContentType($optionalArgs['responseContentType']);
}
$requestParams = new RequestParamsHeaderDescriptor($requestParamHeaders);
$optionalArgs['headers'] = isset($optionalArgs['headers']) ? array_merge($requestParams->getHeader(), $optionalArgs['headers']) : $requestParams->getHeader();
return $this->startCall('MutateAdGroups', MutateAdGroupsResponse::class, $optionalArgs, $request)->wait();
}
}
|
apache-2.0
|
skuda/client-python
|
kubernetes/test/test_v1beta1_cluster_role_list.py
|
917
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.6.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1beta1_cluster_role_list import V1beta1ClusterRoleList
class TestV1beta1ClusterRoleList(unittest.TestCase):
""" V1beta1ClusterRoleList unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1beta1ClusterRoleList(self):
"""
Test V1beta1ClusterRoleList
"""
model = kubernetes.client.models.v1beta1_cluster_role_list.V1beta1ClusterRoleList()
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
michaelhkw/incubator-impala
|
be/src/exec/read-write-util.h
|
10069
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
#ifndef IMPALA_EXEC_READ_WRITE_UTIL_H
#define IMPALA_EXEC_READ_WRITE_UTIL_H
#include <boost/cstdint.hpp>
#include <sstream>
#include "common/logging.h"
#include "common/status.h"
#include "util/bit-util.h"
namespace impala {
#define RETURN_IF_FALSE(x) if (UNLIKELY(!(x))) return false
/// Class for reading and writing various data types.
/// Note: be very careful using *signed* ints. Casting from a signed int to
/// an unsigned is not a problem. However, bit shifts will do sign extension
/// on unsigned ints, which is rarely the right thing to do for byte level
/// operations.
class ReadWriteUtil {
public:
/// Maximum length for Writeable VInt
static const int MAX_VINT_LEN = 9;
/// Maximum lengths for Zigzag encodings.
const static int MAX_ZINT_LEN = 5;
const static int MAX_ZLONG_LEN = 10;
/// Put a zigzag encoded integer into a buffer and return its length.
static int PutZInt(int32_t integer, uint8_t* buf);
/// Put a zigzag encoded long integer into a buffer and return its length.
static int PutZLong(int64_t longint, uint8_t* buf);
/// Get a big endian integer from a buffer. The buffer does not have to be word aligned.
template<typename T>
static T GetInt(const uint8_t* buffer);
/// Get a variable-length Long or int value from a byte buffer.
/// Returns the length of the long/int
/// If the size byte is corrupted then return -1;
static int GetVLong(uint8_t* buf, int64_t* vlong);
static int GetVInt(uint8_t* buf, int32_t* vint);
/// Writes a variable-length Long or int value to a byte buffer.
/// Returns the number of bytes written
static int64_t PutVLong(int64_t val, uint8_t* buf);
static int64_t PutVInt(int32_t val, uint8_t* buf);
/// returns size of the encoded long value, not including the 1 byte for length
static int VLongRequiredBytes(int64_t val);
/// Read a variable-length Long value from a byte buffer starting at the specified
/// byte offset.
static int GetVLong(uint8_t* buf, int64_t offset, int64_t* vlong);
/// Put an Integer into a buffer in big endian order. The buffer must be big
/// enough.
static void PutInt(uint8_t* buf, uint16_t integer);
static void PutInt(uint8_t* buf, uint32_t integer);
static void PutInt(uint8_t* buf, uint64_t integer);
/// Dump the first length bytes of buf to a Hex string.
static std::string HexDump(const uint8_t* buf, int64_t length);
static std::string HexDump(const char* buf, int64_t length);
/// Determines the sign of a VInt/VLong from the first byte.
static bool IsNegativeVInt(int8_t byte);
/// Determines the total length in bytes of a Writable VInt/VLong from the first byte.
static int DecodeVIntSize(int8_t byte);
/// Return values for ReadZLong() and ReadZInt(). We return these in a single struct,
/// rather than using an output parameter, for performance (this way both values are
/// returned as registers).
template <typename T>
struct ZResult {
/// False if there was a problem reading the value.
bool ok;
/// The decoded value. Only valid if 'ok' is true.
T val;
ZResult(T v) : ok(true), val(v) { }
static ZResult error() { return ZResult(); }
private:
ZResult() : ok(false) { }
};
typedef ZResult<int64_t> ZLongResult;
typedef ZResult<int32_t> ZIntResult;
/// Read a zig-zag encoded long. This is the integer encoding defined by google.com
/// protocol-buffers: https://developers.google.com/protocol-buffers/docs/encoding. *buf
/// is incremented past the encoded long. 'buf_end' should point to the end of 'buf'
/// (i.e. the first invalid byte).
///
/// Returns a non-OK result if the encoded int spans too much many bytes. Unspecified
/// for values that have the correct number of bytes but overflow the destination type
/// (for both long and int, there are extra bits in the highest-order byte).
static inline ZLongResult ReadZLong(uint8_t** buf, uint8_t* buf_end) {
return ReadZInteger<MAX_ZLONG_LEN, ZLongResult>(buf, buf_end);
}
/// Read a zig-zag encoded int.
static inline ZIntResult ReadZInt(uint8_t** buf, uint8_t* buf_end) {
return ReadZInteger<MAX_ZINT_LEN, ZIntResult>(buf, buf_end);
}
/// The following methods read data from a buffer without assuming the buffer is long
/// enough. If the buffer isn't long enough or another error occurs, they return false
/// and update the status with the error. Otherwise they return true. buffer is advanced
/// past the data read and buf_len is decremented appropriately.
/// Read a native type T (e.g. bool, float) directly into output (i.e. input is cast
/// directly to T and incremented by sizeof(T)).
template <class T>
static bool Read(uint8_t** buf, int* buf_len, T* val, Status* status);
/// Skip the next num_bytes bytes.
static bool SkipBytes(uint8_t** buf, int* buf_len, int num_bytes, Status* status);
private:
/// Implementation for ReadZLong() and ReadZInt(). MAX_LEN is MAX_ZLONG_LEN or
/// MAX_ZINT_LEN.
template<int MAX_LEN, typename ZResult>
static ZResult ReadZInteger(uint8_t** buf, uint8_t* buf_end);
};
template<>
inline uint16_t ReadWriteUtil::GetInt(const uint8_t* buf) {
return (buf[0] << 8) | buf[1];
}
template<>
inline uint32_t ReadWriteUtil::GetInt(const uint8_t* buf) {
return (buf[0] << 24) | (buf[1] << 16) | (buf[2] << 8) | buf[3];
}
template<>
inline uint64_t ReadWriteUtil::GetInt(const uint8_t* buf) {
uint64_t upper_half = GetInt<uint32_t>(buf);
uint64_t lower_half = GetInt<uint32_t>(buf + 4);
return lower_half | upper_half << 32;
}
inline void ReadWriteUtil::PutInt(uint8_t* buf, uint16_t integer) {
buf[0] = integer >> 8;
buf[1] = integer;
}
inline void ReadWriteUtil::PutInt(uint8_t* buf, uint32_t integer) {
uint32_t big_endian = BitUtil::ByteSwap(integer);
memcpy(buf, &big_endian, sizeof(uint32_t));
}
inline void ReadWriteUtil::PutInt(uint8_t* buf, uint64_t integer) {
uint64_t big_endian = BitUtil::ByteSwap(integer);
memcpy(buf, &big_endian, sizeof(uint64_t));
}
inline int ReadWriteUtil::GetVInt(uint8_t* buf, int32_t* vint) {
int64_t vlong = 0;
int len = GetVLong(buf, &vlong);
*vint = static_cast<int32_t>(vlong);
return len;
}
inline int ReadWriteUtil::GetVLong(uint8_t* buf, int64_t* vlong) {
return GetVLong(buf, 0, vlong);
}
inline int ReadWriteUtil::GetVLong(uint8_t* buf, int64_t offset, int64_t* vlong) {
int8_t firstbyte = (int8_t) buf[0 + offset];
int len = DecodeVIntSize(firstbyte);
if (len > MAX_VINT_LEN) return -1;
if (len == 1) {
*vlong = static_cast<int64_t>(firstbyte);
return len;
}
*vlong &= ~*vlong;
for (int i = 1; i < len; i++) {
*vlong = (*vlong << 8) | buf[i+offset];
}
if (IsNegativeVInt(firstbyte)) {
*vlong = *vlong ^ ((int64_t) - 1);
}
return len;
}
inline int ReadWriteUtil::VLongRequiredBytes(int64_t val) {
// returns size of the encoded long value, not including the 1 byte for length
if (val & 0xFF00000000000000llu) return 8;
if (val & 0x00FF000000000000llu) return 7;
if (val & 0x0000FF0000000000llu) return 6;
if (val & 0x000000FF00000000llu) return 5;
if (val & 0x00000000FF000000llu) return 4;
if (val & 0x0000000000FF0000llu) return 3;
if (val & 0x000000000000FF00llu) return 2;
// Values between -112 and 127 are stored using 1 byte,
// values between -127 and -112 are stored using 2 bytes
// See ReadWriteUtil::DecodeVIntSize for this case
if (val < -112) return 2;
return 1;
}
inline int64_t ReadWriteUtil::PutVLong(int64_t val, uint8_t* buf) {
int64_t num_bytes = VLongRequiredBytes(val);
if (num_bytes == 1) {
// store the value itself instead of the length
buf[0] = static_cast<int8_t>(val);
return 1;
}
// This is how we encode the length for a length less than or equal to 8
buf[0] = -119 + num_bytes;
// write to buffer in reversed endianness
for (int i = 0; i < num_bytes; ++i) {
buf[i+1] = (val >> (8 * (num_bytes - i - 1))) & 0xFF;
}
// +1 for the length byte
return num_bytes + 1;
}
inline int64_t ReadWriteUtil::PutVInt(int32_t val, uint8_t* buf) {
return PutVLong(val, buf);
}
template <class T>
inline bool ReadWriteUtil::Read(uint8_t** buf, int* buf_len, T* val, Status* status) {
int val_len = sizeof(T);
if (UNLIKELY(val_len > *buf_len)) {
std::stringstream ss;
ss << "Cannot read " << val_len << " bytes, buffer length is " << *buf_len;
*status = Status(ss.str());
return false;
}
*val = *reinterpret_cast<T*>(*buf);
*buf += val_len;
*buf_len -= val_len;
return true;
}
inline bool ReadWriteUtil::SkipBytes(uint8_t** buf, int* buf_len, int num_bytes,
Status* status) {
DCHECK_GE(*buf_len, 0);
if (UNLIKELY(num_bytes > *buf_len)) {
std::stringstream ss;
ss << "Cannot skip " << num_bytes << " bytes, buffer length is " << *buf_len;
*status = Status(ss.str());
return false;
}
*buf += num_bytes;
*buf_len -= num_bytes;
return true;
}
inline bool ReadWriteUtil::IsNegativeVInt(int8_t byte) {
return byte < -120 || (byte >= -112 && byte < 0);
}
inline int ReadWriteUtil::DecodeVIntSize(int8_t byte) {
if (byte >= -112) {
return 1;
} else if (byte < -120) {
return -119 - byte;
}
return -111 - byte;
}
}
#endif
|
apache-2.0
|
play2-maven-plugin/play2-maven-plugin.github.io
|
play2-maven-plugin/1.0.0-rc3/play2-provider-api/distribution-management.html
|
5770
|
<!DOCTYPE html>
<!--
| Generated by Apache Maven Doxia Site Renderer 1.8.1 at 2018-06-24
| Rendered using Apache Maven Fluido Skin 1.6
-->
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta name="Date-Revision-yyyymmdd" content="20180624" />
<meta http-equiv="Content-Language" content="en" />
<title>Play! 2.x Provider API – Project Distribution Management</title>
<link rel="stylesheet" href="./css/apache-maven-fluido-1.6.min.css" />
<link rel="stylesheet" href="./css/site.css" />
<link rel="stylesheet" href="./css/print.css" media="print" />
<script type="text/javascript" src="./js/apache-maven-fluido-1.6.min.js"></script>
<link rel="stylesheet" href="./css/site.css" type="text/css" />
<!-- Google Analytics -->
<script type="text/javascript">
var _gaq = _gaq || [];
_gaq.push(['_setAccount', 'UA-17472708-2']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
</script>
</head>
<body class="topBarDisabled">
<div class="container-fluid">
<div id="banner">
<div class="pull-left"><div id="bannerLeft"><h2>Play! 2.x Provider API</h2>
</div>
</div>
<div class="pull-right"><div id="bannerRight"><img src="images/my-avatar-80.png" alt="avatar"/></div>
</div>
<div class="clear"><hr/></div>
</div>
<div id="breadcrumbs">
<ul class="breadcrumb">
<li id="publishDate">Last Published: 2018-06-24<span class="divider">|</span>
</li>
<li id="projectVersion">Version: 1.0.0-rc3</li>
</ul>
</div>
<div class="row-fluid">
<div id="leftColumn" class="span3">
<div class="well sidebar-nav">
<ul class="nav nav-list">
<li class="nav-header">Parent Project</li>
<li><a href="../index.html" title="Play! 2.x"><span class="none"></span>Play! 2.x</a> </li>
<li class="nav-header">Overview</li>
<li><a href="index.html" title="Introduction"><span class="none"></span>Introduction</a> </li>
<li><a href="apidocs/index.html" title="JavaDocs"><span class="none"></span>JavaDocs</a> </li>
<li class="nav-header">Project Documentation</li>
<li><a href="project-info.html" title="Project Information"><span class="icon-chevron-down"></span>Project Information</a>
<ul class="nav nav-list">
<li><a href="integration.html" title="CI Management"><span class="none"></span>CI Management</a> </li>
<li><a href="dependency-convergence.html" title="Dependency Convergence"><span class="none"></span>Dependency Convergence</a> </li>
<li><a href="dependency-info.html" title="Dependency Information"><span class="none"></span>Dependency Information</a> </li>
<li class="active"><a href="#"><span class="none"></span>Distribution Management</a>
</li>
<li><a href="issue-tracking.html" title="Issue Management"><span class="none"></span>Issue Management</a> </li>
<li><a href="license.html" title="Licenses"><span class="none"></span>Licenses</a> </li>
<li><a href="plugin-management.html" title="Plugin Management"><span class="none"></span>Plugin Management</a> </li>
<li><a href="plugins.html" title="Plugins"><span class="none"></span>Plugins</a> </li>
<li><a href="team-list.html" title="Team"><span class="none"></span>Team</a> </li>
<li><a href="source-repository.html" title="Source Code Management"><span class="none"></span>Source Code Management</a> </li>
<li><a href="project-summary.html" title="Summary"><span class="none"></span>Summary</a> </li>
</ul>
</li>
<li><a href="project-reports.html" title="Project Reports"><span class="icon-chevron-right"></span>Project Reports</a> </li>
</ul>
<hr />
<div id="poweredBy">
<div class="clear"></div>
<div class="clear"></div>
<div class="clear"></div>
<div class="clear"></div>
<a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
</div>
</div>
</div>
<div id="bodyColumn" class="span9" >
<div class="section">
<h2><a name="Overview"></a>Overview</h2><a name="Overview"></a>
<p>The following is the distribution management information used by this project.</p>
<div class="section">
<h3><a name="Repository_-_sonatype-nexus-staging"></a>Repository - sonatype-nexus-staging</h3><a name="Repository_-_sonatype-nexus-staging"></a><a class="externalLink" href="https://oss.sonatype.org/service/local/staging/deploy/maven2/">https://oss.sonatype.org/service/local/staging/deploy/maven2/</a></div>
<div class="section">
<h3><a name="Snapshot_Repository_-_sonatype-nexus-snapshots"></a>Snapshot Repository - sonatype-nexus-snapshots</h3><a name="Snapshot_Repository_-_sonatype-nexus-snapshots"></a><a class="externalLink" href="https://oss.sonatype.org/content/repositories/snapshots/">https://oss.sonatype.org/content/repositories/snapshots/</a></div></div>
</div>
</div>
</div>
<hr/>
<footer>
<div class="container-fluid">
<div class="row-fluid">
<p>Copyright ©2013–2018.
All rights reserved.</p>
</div>
</div>
</footer>
</body>
</html>
|
apache-2.0
|
killbill/killbill
|
beatrix/src/test/java/org/killbill/billing/beatrix/integration/TestIntegrationVoidInvoice.java
|
12168
|
/*
* Copyright 2014-2020 Groupon, Inc
* Copyright 2020-2021 Equinix, Inc
* Copyright 2014-2021 The Billing Project, LLC
*
* The Billing Project licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.beatrix.integration;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import org.joda.time.DateTime;
import org.joda.time.LocalDate;
import org.killbill.billing.ErrorCode;
import org.killbill.billing.ObjectType;
import org.killbill.billing.account.api.Account;
import org.killbill.billing.api.TestApiListener.NextEvent;
import org.killbill.billing.beatrix.util.InvoiceChecker.ExpectedInvoiceItemCheck;
import org.killbill.billing.catalog.api.BillingActionPolicy;
import org.killbill.billing.catalog.api.BillingPeriod;
import org.killbill.billing.catalog.api.PlanPhaseSpecifier;
import org.killbill.billing.catalog.api.ProductCategory;
import org.killbill.billing.entitlement.api.DefaultEntitlement;
import org.killbill.billing.entitlement.api.DefaultEntitlementSpecifier;
import org.killbill.billing.entitlement.api.Entitlement;
import org.killbill.billing.entitlement.api.Entitlement.EntitlementActionPolicy;
import org.killbill.billing.invoice.api.Invoice;
import org.killbill.billing.invoice.api.InvoiceApiException;
import org.killbill.billing.invoice.api.InvoiceItem;
import org.killbill.billing.invoice.api.InvoiceItemType;
import org.killbill.billing.invoice.api.InvoiceStatus;
import org.killbill.billing.invoice.model.CreditAdjInvoiceItem;
import org.killbill.billing.payment.api.Payment;
import org.killbill.billing.payment.api.PluginProperty;
import org.killbill.billing.subscription.api.user.DefaultSubscriptionBase;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableList;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertTrue;
public class TestIntegrationVoidInvoice extends TestIntegrationBase {
@Test(groups = "slow")
public void testVoidInvoice() throws Exception {
final int billingDay = 14;
final DateTime initialCreationDate = new DateTime(2015, 5, 15, 0, 0, 0, 0, testTimeZone);
// set clock to the initial start date
clock.setTime(initialCreationDate);
log.info("Beginning test with BCD of " + billingDay);
final Account account = createAccountWithNonOsgiPaymentMethod(getAccountData(billingDay));
add_AUTO_PAY_OFF_Tag(account.getId(), ObjectType.ACCOUNT);
DefaultEntitlement baseEntitlement = createBaseEntitlementAndCheckForCompletion(account.getId(), "bundleKey", "Shotgun", ProductCategory.BASE, BillingPeriod.MONTHLY, NextEvent.CREATE, NextEvent.BLOCK, NextEvent.INVOICE);
DefaultSubscriptionBase subscription = subscriptionDataFromSubscription(baseEntitlement.getSubscriptionBase());
final List<ExpectedInvoiceItemCheck> expectedInvoices = new ArrayList<ExpectedInvoiceItemCheck>();
expectedInvoices.add(new ExpectedInvoiceItemCheck(new LocalDate(2015, 6, 14), new LocalDate(2015, 7, 14), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
// Move through time and verify we get the same invoice
busHandler.pushExpectedEvents(NextEvent.PHASE, NextEvent.INVOICE);
clock.addDays(30);
assertListenerStatus();
List<Invoice> invoices = invoiceUserApi.getInvoicesByAccount(account.getId(), false, false, callContext);
invoiceChecker.checkInvoice(invoices.get(1).getId(), callContext, expectedInvoices);
// Void the invoice
busHandler.pushExpectedEvents(NextEvent.INVOICE_ADJUSTMENT);
invoiceUserApi.voidInvoice(invoices.get(1).getId(), callContext);
assertListenerStatus();
remove_AUTO_PAY_OFF_Tag(account.getId(), ObjectType.ACCOUNT);
// Move through time
busHandler.pushExpectedEvents(NextEvent.INVOICE, NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT);
clock.addDays(31);
assertListenerStatus();
// get all invoices including the VOIDED; includeVoidedInvoices = true;
invoices = invoiceUserApi.getInvoicesByAccount(account.getId(), false, true, callContext);
assertEquals(invoices.size(), 3);
// verify integrity of the voided
invoiceChecker.checkInvoice(invoices.get(1).getId(), callContext, expectedInvoices);
assertEquals(invoices.get(1).getStatus(), InvoiceStatus.VOID);
// verify that the new invoice contains current and VOIDED charge
expectedInvoices.add(new ExpectedInvoiceItemCheck(new LocalDate(2015, 7, 14), new LocalDate(2015, 8, 14), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkInvoice(invoices.get(2).getId(), callContext, expectedInvoices);
// verify that the account balance is fully paid and a payment exists
final BigDecimal accountBalance = invoiceUserApi.getAccountBalance(account.getId(), callContext);
assertTrue(accountBalance.compareTo(BigDecimal.ZERO) == 0);
final List<Payment> payments = paymentApi.getAccountPayments(account.getId(), false, false, ImmutableList.<PluginProperty>of(), callContext);
assertEquals(payments.size(), 1);
final Payment payment = payments.get(0);
assertTrue(payment.getPurchasedAmount().compareTo(invoices.get(2).getChargedAmount()) == 0);
// try to void an invoice that is already paid, it should fail.
try {
invoiceUserApi.voidInvoice(invoices.get(2).getId(), callContext);
Assert.fail("Should fail to void invoice that is already paid");
} catch (final InvoiceApiException e) {
Assert.assertEquals(e.getCode(), ErrorCode.CAN_NOT_VOID_INVOICE_THAT_IS_PAID.getCode());
}
// Refund the payment
busHandler.pushExpectedEvents(NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT);
paymentApi.createRefundWithPaymentControl(account, payment.getId(), payment.getPurchasedAmount(), payment.getCurrency(), clock.getUTCNow(), null, PLUGIN_PROPERTIES, PAYMENT_OPTIONS, callContext);
assertListenerStatus();
busHandler.pushExpectedEvents(NextEvent.INVOICE_ADJUSTMENT);
invoiceUserApi.voidInvoice(invoices.get(2).getId(), callContext);
assertListenerStatus();
invoices = invoiceUserApi.getInvoicesByAccount(account.getId(), false, true, callContext);
assertEquals(invoices.size(), 3);
assertEquals(invoices.get(1).getStatus(), InvoiceStatus.VOID);
assertEquals(invoices.get(2).getStatus(), InvoiceStatus.VOID);
}
@Test(groups = "slow")
public void testVoidRepairedInvoice() throws Exception {
final DateTime initialDate = new DateTime(2013, 6, 15, 0, 0, 0, 0, testTimeZone);
final LocalDate startDate = initialDate.toLocalDate();
clock.setDeltaFromReality(initialDate.getMillis() - clock.getUTCNow().getMillis());
final Account account = createAccountWithNonOsgiPaymentMethod(getAccountData(15));
assertNotNull(account);
add_AUTO_PAY_OFF_Tag(account.getId(), ObjectType.ACCOUNT);
final PlanPhaseSpecifier spec = new PlanPhaseSpecifier("pistol-monthly-notrial");
busHandler.pushExpectedEvents(NextEvent.INVOICE);
final InvoiceItem inputCredit = new CreditAdjInvoiceItem(null, account.getId(), startDate, "credit invoice", new BigDecimal("20.00"), account.getCurrency(), null);
invoiceUserApi.insertCredits(account.getId(), startDate, ImmutableList.of(inputCredit), true, null, callContext);
assertListenerStatus();
final BigDecimal accountBalance1 = invoiceUserApi.getAccountBalance(account.getId(), callContext);
final BigDecimal accountCBA1 = invoiceUserApi.getAccountCBA(account.getId(), callContext);
busHandler.pushExpectedEvents(NextEvent.BLOCK, NextEvent.CREATE, NextEvent.INVOICE);
final UUID entitlementId = entitlementApi.createBaseEntitlement(account.getId(), new DefaultEntitlementSpecifier(spec, null, null, null), null, startDate, startDate, false, false, ImmutableList.<PluginProperty>of(), callContext);
final Entitlement bpEntitlement = entitlementApi.getEntitlementForId(entitlementId, callContext);
assertListenerStatus();
final Invoice invoice2 = invoiceChecker.checkInvoice(account.getId(), 2, callContext,
new ExpectedInvoiceItemCheck(new LocalDate(2013, 6, 15), new LocalDate(2013, 7, 15), InvoiceItemType.RECURRING, new BigDecimal("19.95")),
new ExpectedInvoiceItemCheck(new LocalDate(2013, 6, 15), new LocalDate(2013, 6, 15), InvoiceItemType.CBA_ADJ, new BigDecimal("-19.95")));
// 2013-07-01
clock.addDays(16);
busHandler.pushExpectedEvents(NextEvent.BLOCK, NextEvent.CANCEL, NextEvent.INVOICE);
bpEntitlement.cancelEntitlementWithPolicyOverrideBillingPolicy(EntitlementActionPolicy.IMMEDIATE, BillingActionPolicy.IMMEDIATE, ImmutableList.<PluginProperty>of(), callContext);
assertListenerStatus();
final Invoice invoice3 = invoiceChecker.checkInvoice(account.getId(), 3, callContext,
new ExpectedInvoiceItemCheck(new LocalDate(2013, 7, 1), new LocalDate(2013, 7, 15), InvoiceItemType.REPAIR_ADJ, new BigDecimal("-9.31")),
new ExpectedInvoiceItemCheck(new LocalDate(2013, 7, 1), new LocalDate(2013, 7, 1), InvoiceItemType.CBA_ADJ, new BigDecimal("9.31")));
// We disallow to void the invoice as it was repaired
try {
invoiceUserApi.voidInvoice(invoice2.getId(), callContext);
Assert.fail("Should fail to void a repaired invoice");
} catch (final RuntimeException e) {
assertTrue(e.getMessage().contains("because it contains items being repaired"));
}
// Void the invoice where the REPAIR_ADJ occurred first
busHandler.pushExpectedEvents(NextEvent.INVOICE_ADJUSTMENT);
invoiceUserApi.voidInvoice(invoice3.getId(), callContext);
assertListenerStatus();
// NOW check we allow voiding the invoice2
busHandler.pushExpectedEvents(NextEvent.INVOICE_ADJUSTMENT);
invoiceUserApi.voidInvoice(invoice2.getId(), callContext);
assertListenerStatus();
// We were left with an unstable state by VOIDing the previous periods....
busHandler.pushExpectedEvents(NextEvent.INVOICE);
invoiceUserApi.triggerInvoiceGeneration(account.getId(), clock.getUTCToday(), callContext);
assertListenerStatus();
invoiceChecker.checkInvoice(account.getId(), 2, callContext,
new ExpectedInvoiceItemCheck(new LocalDate(2013, 6, 15), new LocalDate(2013, 7, 1), InvoiceItemType.RECURRING, new BigDecimal("10.64")),
new ExpectedInvoiceItemCheck(new LocalDate(2013, 7, 1), new LocalDate(2013, 7, 1), InvoiceItemType.CBA_ADJ, new BigDecimal("-10.64")));
// 20 - 10.64 = 9.36
final BigDecimal accountBalance2 = invoiceUserApi.getAccountBalance(account.getId(), callContext);
Assert.assertEquals(accountBalance2.compareTo(new BigDecimal("-9.36")), 0);
final BigDecimal accountCBA2 = invoiceUserApi.getAccountCBA(account.getId(), callContext);
Assert.assertEquals(accountCBA2.compareTo(new BigDecimal("9.36")), 0);
checkNoMoreInvoiceToGenerate(account.getId());
}
}
|
apache-2.0
|
lshain/hbase-0.98.6-hadoop2
|
docs/devapidocs/org/apache/hadoop/hbase/protobuf/generated/class-use/ZooKeeperProtos.ReplicationHLogPosition.html
|
22576
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_65) on Wed Sep 03 20:05:58 PDT 2014 -->
<meta http-equiv="Content-Type" content="text/html" charset="UTF-8">
<title>Uses of Class org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition (HBase 0.98.6-hadoop2 API)</title>
<meta name="date" content="2014-09-03">
<link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition (HBase 0.98.6-hadoop2 API)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/protobuf/generated/class-use/ZooKeeperProtos.ReplicationHLogPosition.html" target="_top">Frames</a></li>
<li><a href="ZooKeeperProtos.ReplicationHLogPosition.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition" class="title">Uses of Class<br>org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition</h2>
</div>
<div class="classUseContainer">
<ul class="blockList">
<li class="blockList">
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#org.apache.hadoop.hbase.protobuf.generated">org.apache.hadoop.hbase.protobuf.generated</a></td>
<td class="colLast"> </td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<ul class="blockList">
<li class="blockList"><a name="org.apache.hadoop.hbase.protobuf.generated">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a> in <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/package-summary.html">org.apache.hadoop.hbase.protobuf.generated</a></h3>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing fields, and an explanation">
<caption><span>Fields in <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/package-summary.html">org.apache.hadoop.hbase.protobuf.generated</a> with type parameters of type <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Field and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>static com.google.protobuf.Parser<<a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a>></code></td>
<td class="colLast"><span class="strong">ZooKeeperProtos.ReplicationHLogPosition.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html#PARSER">PARSER</a></strong></code> </td>
</tr>
</tbody>
</table>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/package-summary.html">org.apache.hadoop.hbase.protobuf.generated</a> that return <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a></code></td>
<td class="colLast"><span class="strong">ZooKeeperProtos.ReplicationHLogPosition.Builder.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.Builder.html#build()">build</a></strong>()</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a></code></td>
<td class="colLast"><span class="strong">ZooKeeperProtos.ReplicationHLogPosition.Builder.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.Builder.html#buildPartial()">buildPartial</a></strong>()</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>static <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a></code></td>
<td class="colLast"><span class="strong">ZooKeeperProtos.ReplicationHLogPosition.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html#getDefaultInstance()">getDefaultInstance</a></strong>()</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a></code></td>
<td class="colLast"><span class="strong">ZooKeeperProtos.ReplicationHLogPosition.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html#getDefaultInstanceForType()">getDefaultInstanceForType</a></strong>()</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a></code></td>
<td class="colLast"><span class="strong">ZooKeeperProtos.ReplicationHLogPosition.Builder.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.Builder.html#getDefaultInstanceForType()">getDefaultInstanceForType</a></strong>()</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>static <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a></code></td>
<td class="colLast"><span class="strong">ZooKeeperProtos.ReplicationHLogPosition.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html#parseDelimitedFrom(java.io.InputStream)">parseDelimitedFrom</a></strong>(<a href="http://docs.oracle.com/javase/6/docs/api/java/io/InputStream.html?is-external=true" title="class or interface in java.io">InputStream</a> input)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>static <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a></code></td>
<td class="colLast"><span class="strong">ZooKeeperProtos.ReplicationHLogPosition.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html#parseDelimitedFrom(java.io.InputStream,%20com.google.protobuf.ExtensionRegistryLite)">parseDelimitedFrom</a></strong>(<a href="http://docs.oracle.com/javase/6/docs/api/java/io/InputStream.html?is-external=true" title="class or interface in java.io">InputStream</a> input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>static <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a></code></td>
<td class="colLast"><span class="strong">ZooKeeperProtos.ReplicationHLogPosition.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html#parseFrom(byte[])">parseFrom</a></strong>(byte[] data)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>static <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a></code></td>
<td class="colLast"><span class="strong">ZooKeeperProtos.ReplicationHLogPosition.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html#parseFrom(byte[],%20com.google.protobuf.ExtensionRegistryLite)">parseFrom</a></strong>(byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>static <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a></code></td>
<td class="colLast"><span class="strong">ZooKeeperProtos.ReplicationHLogPosition.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html#parseFrom(com.google.protobuf.ByteString)">parseFrom</a></strong>(com.google.protobuf.ByteString data)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>static <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a></code></td>
<td class="colLast"><span class="strong">ZooKeeperProtos.ReplicationHLogPosition.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html#parseFrom(com.google.protobuf.ByteString,%20com.google.protobuf.ExtensionRegistryLite)">parseFrom</a></strong>(com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>static <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a></code></td>
<td class="colLast"><span class="strong">ZooKeeperProtos.ReplicationHLogPosition.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html#parseFrom(com.google.protobuf.CodedInputStream)">parseFrom</a></strong>(com.google.protobuf.CodedInputStream input)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>static <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a></code></td>
<td class="colLast"><span class="strong">ZooKeeperProtos.ReplicationHLogPosition.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html#parseFrom(com.google.protobuf.CodedInputStream,%20com.google.protobuf.ExtensionRegistryLite)">parseFrom</a></strong>(com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>static <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a></code></td>
<td class="colLast"><span class="strong">ZooKeeperProtos.ReplicationHLogPosition.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html#parseFrom(java.io.InputStream)">parseFrom</a></strong>(<a href="http://docs.oracle.com/javase/6/docs/api/java/io/InputStream.html?is-external=true" title="class or interface in java.io">InputStream</a> input)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>static <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a></code></td>
<td class="colLast"><span class="strong">ZooKeeperProtos.ReplicationHLogPosition.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html#parseFrom(java.io.InputStream,%20com.google.protobuf.ExtensionRegistryLite)">parseFrom</a></strong>(<a href="http://docs.oracle.com/javase/6/docs/api/java/io/InputStream.html?is-external=true" title="class or interface in java.io">InputStream</a> input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)</code> </td>
</tr>
</tbody>
</table>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/package-summary.html">org.apache.hadoop.hbase.protobuf.generated</a> that return types with arguments of type <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>com.google.protobuf.Parser<<a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a>></code></td>
<td class="colLast"><span class="strong">ZooKeeperProtos.ReplicationHLogPosition.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html#getParserForType()">getParserForType</a></strong>()</code> </td>
</tr>
</tbody>
</table>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/package-summary.html">org.apache.hadoop.hbase.protobuf.generated</a> with parameters of type <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition.Builder</a></code></td>
<td class="colLast"><span class="strong">ZooKeeperProtos.ReplicationHLogPosition.Builder.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.Builder.html#mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition)">mergeFrom</a></strong>(<a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a> other)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>static <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition.Builder</a></code></td>
<td class="colLast"><span class="strong">ZooKeeperProtos.ReplicationHLogPosition.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html#newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition)">newBuilder</a></strong>(<a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">ZooKeeperProtos.ReplicationHLogPosition</a> prototype)</code> </td>
</tr>
</tbody>
</table>
</li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.ReplicationHLogPosition.html" title="class in org.apache.hadoop.hbase.protobuf.generated">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/protobuf/generated/class-use/ZooKeeperProtos.ReplicationHLogPosition.html" target="_top">Frames</a></li>
<li><a href="ZooKeeperProtos.ReplicationHLogPosition.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2014 <a href="http://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
</body>
</html>
|
apache-2.0
|
EvilMcJerkface/crate
|
server/src/main/java/io/crate/execution/ddl/RelationNameSwap.java
|
1976
|
/*
* Licensed to Crate under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership. Crate licenses this file
* to you under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial
* agreement.
*/
package io.crate.execution.ddl;
import io.crate.metadata.RelationName;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import java.io.IOException;
public final class RelationNameSwap implements Writeable {
private final RelationName source;
private final RelationName target;
public RelationNameSwap(RelationName source, RelationName target) {
this.source = source;
this.target = target;
}
public RelationNameSwap(StreamInput in) throws IOException {
this.source = new RelationName(in);
this.target = new RelationName(in);
}
public RelationName source() {
return source;
}
public RelationName target() {
return target;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
source.writeTo(out);
target.writeTo(out);
}
}
|
apache-2.0
|
conversationai/harassment-manager
|
src/server/middleware/twitter.middleware.ts
|
11910
|
/**
* Copyright 2022 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import axios, { AxiosBasicCredentials, AxiosError, AxiosInstance } from 'axios';
import addOAuthInterceptor from 'axios-oauth-1.0a';
import { Request, Response } from 'express';
import firebase from 'firebase/app';
import * as fs from 'fs';
import {
BlockTwitterUsersRequest,
BlockTwitterUsersResponse,
GetTweetsRequest,
GetTweetsResponse,
HideRepliesTwitterRequest,
HideRepliesTwitterResponse,
MuteTwitterUsersRequest,
MuteTwitterUsersResponse,
Tweet,
TweetObject,
TwitterApiResponse,
} from '../../common-types';
import { TwitterApiCredentials } from '../serving';
// Max results per twitter call.
const BATCH_SIZE = 500;
interface TwitterApiRequest {
query: string;
maxResults?: number;
fromDate?: string;
toDate?: string;
next?: string;
}
export async function getTweets(
req: Request,
res: Response,
apiCredentials: TwitterApiCredentials
) {
let twitterDataPromise: Promise<TwitterApiResponse>;
if (fs.existsSync('src/server/twitter_sample_results.json')) {
twitterDataPromise = loadLocalTwitterData();
} else {
if (!enterpriseSearchCredentialsAreValid(apiCredentials)) {
res.send(new Error('Invalid Twitter Enterprise Search API credentials'));
return;
}
twitterDataPromise = loadTwitterData(apiCredentials, req.body);
}
try {
const twitterData = await twitterDataPromise;
const tweets = twitterData.results.map(parseTweet);
res.send({ tweets, nextPageToken: twitterData.next } as GetTweetsResponse);
} catch (e) {
console.error('Error loading Twitter data: ' + e);
res.status(500).send('Error loading Twitter data');
}
}
export async function blockTwitterUsers(
req: Request,
res: Response,
apiCredentials: TwitterApiCredentials
) {
if (!standardApiCredentialsAreValid(apiCredentials)) {
res.send(new Error('Invalid Twitter Standard API credentials'));
return;
}
const request = req.body as BlockTwitterUsersRequest;
const userCredential = firebase.auth.AuthCredential.fromJSON(
request.credential
) as firebase.auth.OAuthCredential;
const response = await blockUsers(apiCredentials, userCredential, request);
if (response.error) {
// All block API requests failed. Send an error.
res.status(500).send(response);
} else {
res.send(response);
}
}
export async function muteTwitterUsers(
req: Request,
res: Response,
apiCredentials: TwitterApiCredentials
) {
if (!standardApiCredentialsAreValid(apiCredentials)) {
res.send(new Error('Invalid Twitter Standard API credentials'));
return;
}
const request = req.body as MuteTwitterUsersRequest;
const userCredential = firebase.auth.AuthCredential.fromJSON(
request.credential
) as firebase.auth.OAuthCredential;
const response = await muteUsers(apiCredentials, userCredential, request);
if (response.error) {
// All mute API requests failed. Send an error.
res.status(500).send(response);
} else {
res.send(response);
}
}
export async function hideTwitterReplies(
req: Request,
res: Response,
apiCredentials: TwitterApiCredentials
) {
if (!standardApiCredentialsAreValid(apiCredentials)) {
res.send(new Error('Invalid Twitter Standard API credentials'));
return;
}
const request = req.body as HideRepliesTwitterRequest;
const userCredential = firebase.auth.AuthCredential.fromJSON(
request.credential
) as firebase.auth.OAuthCredential;
const response = await hideReplies(apiCredentials, userCredential, request);
if (response.error) {
// All hide reply API requests failed. Send an error.
res.status(500).send(response);
} else {
res.send(response);
}
}
async function blockUsers(
apiCredentials: TwitterApiCredentials,
userCredential: firebase.auth.OAuthCredential,
request: BlockTwitterUsersRequest
): Promise<BlockTwitterUsersResponse> {
const client = createAxiosInstance(apiCredentials, userCredential);
const requestUrl = 'https://api.twitter.com/1.1/blocks/create.json';
const response: BlockTwitterUsersResponse = {};
const requests = request.users.map(user =>
client
.post<BlockTwitterUsersResponse>(
requestUrl,
{},
{ params: { screen_name: user } }
)
.catch(e => {
console.error(`Unable to block Twitter user: @${user} because ${e}`);
response.failedScreennames = [
...(response.failedScreennames ?? []),
user,
];
})
);
await Promise.all(requests);
if (request.users.length === response.failedScreennames?.length) {
response.error = 'Unable to block Twitter users';
}
return response;
}
async function muteUsers(
apiCredentials: TwitterApiCredentials,
userCredential: firebase.auth.OAuthCredential,
request: MuteTwitterUsersRequest
): Promise<MuteTwitterUsersResponse> {
const client = createAxiosInstance(apiCredentials, userCredential);
const requestUrl = 'https://api.twitter.com/1.1/mutes/users/create.json';
const response: MuteTwitterUsersResponse = {};
const requests = request.users.map(user =>
client
.post<MuteTwitterUsersResponse>(
requestUrl,
{},
{ params: { screen_name: user } }
)
.catch(e => {
console.error(`Unable to mute Twitter user: @${user} because ${e}`);
response.failedScreennames = [
...(response.failedScreennames ?? []),
user,
];
})
);
await Promise.all(requests);
if (request.users.length === response.failedScreennames?.length) {
response.error = 'Unable to mute Twitter users';
}
return response;
}
async function hideReplies(
apiCredentials: TwitterApiCredentials,
userCredential: firebase.auth.OAuthCredential,
request: HideRepliesTwitterRequest
): Promise<HideRepliesTwitterResponse> {
const client = createAxiosInstance(apiCredentials, userCredential);
const response: HideRepliesTwitterResponse = {};
let quotaExhaustedErrors = 0;
let otherErrors = 0;
const requests = request.tweetIds.map(id =>
client
.put<HideRepliesTwitterResponse>(
`https://api.twitter.com/2/tweets/${id}/hidden`,
{ hidden: true }
)
.catch((e: AxiosError) => {
console.error(`Unable to hide tweet ID: ${id} because ${e}`);
if (
e.response?.status === 429 ||
e.response?.statusText.includes('Too Many Requests')
) {
quotaExhaustedErrors += 1;
} else {
otherErrors += 1;
}
})
);
await Promise.all(requests);
response.numQuotaFailures = quotaExhaustedErrors;
response.numOtherFailures = otherErrors;
if (otherErrors === request.tweetIds.length) {
response.error = 'Unable to hide replies';
}
return response;
}
function loadTwitterData(
credentials: TwitterApiCredentials,
request: GetTweetsRequest
): Promise<TwitterApiResponse> {
const requestUrl = `https://gnip-api.twitter.com/search/fullarchive/accounts/${credentials.accountName}/prod.json`;
// These are the *user's* credentials for Twitter.
const user = request.credentials?.additionalUserInfo?.username;
if (!user) {
throw new Error('No user credentials in GetTweetsRequest');
}
const twitterApiRequest: TwitterApiRequest = {
query: `(@${user} OR url:twitter.com/${user}) -from:${user} -is:retweet`,
maxResults: BATCH_SIZE,
};
if (request.fromDate) {
twitterApiRequest.fromDate = request.fromDate;
}
if (request.toDate) {
twitterApiRequest.toDate = request.toDate;
}
if (request.nextPageToken) {
twitterApiRequest.next = request.nextPageToken;
}
const auth: AxiosBasicCredentials = {
username: credentials!.username,
password: credentials!.password,
};
return axios
.post<TwitterApiResponse>(requestUrl, twitterApiRequest, { auth })
.then(response => response.data)
.catch(error => {
const errorStr =
`Error while fetching tweets with request ` +
`${JSON.stringify(request)}: ${error}`;
throw new Error(errorStr);
});
}
function loadLocalTwitterData(): Promise<TwitterApiResponse> {
return fs.promises
.readFile('src/server/twitter_sample_results.json')
.then((data: Buffer) => {
// Remove the `next` page token so the client doesn't infinitely issue
// requests for a next page of data.
const response = JSON.parse(data.toString()) as TwitterApiResponse;
response.next = '';
return response;
});
}
function enterpriseSearchCredentialsAreValid(
credentials: TwitterApiCredentials
): boolean {
return (
!!credentials.accountName &&
!!credentials.username &&
!!credentials.password
);
}
function standardApiCredentialsAreValid(
credentials: TwitterApiCredentials
): boolean {
return !!credentials.appKey && !!credentials.appToken;
}
function createAxiosInstance(
apiCredentials: TwitterApiCredentials,
userCredential: firebase.auth.OAuthCredential
): AxiosInstance {
const token = userCredential.accessToken;
const tokenSecret = userCredential.secret;
if (!token || !tokenSecret) {
throw new Error('Twitter user access token and secret are missing');
}
const client = axios.create();
// Add OAuth 1.0a credentials.
addOAuthInterceptor(client, {
algorithm: 'HMAC-SHA1',
key: apiCredentials.appKey,
includeBodyHash: false,
secret: apiCredentials.appToken,
token,
tokenSecret,
});
return client;
}
function parseTweet(tweetObject: TweetObject): Tweet {
// Still pass the rest of the metadata in case we want to use it
// later, but surface the comment in a top-level field.
//
// Firestore doesn't support writing nested arrays, so we have to
// manually build the Tweet object to avoid accidentally including the nested
// arrays in the TweetObject from the Twitter API response.
const tweet: Tweet = {
created_at: tweetObject.created_at,
date: new Date(),
display_text_range: tweetObject.display_text_range,
entities: tweetObject.entities,
extended_entities: tweetObject.extended_entities,
extended_tweet: tweetObject.extended_tweet,
favorite_count: tweetObject.favorite_count,
favorited: tweetObject.favorited,
in_reply_to_status_id: tweetObject.in_reply_to_status_id,
id_str: tweetObject.id_str,
lang: tweetObject.lang,
reply_count: tweetObject.reply_count,
retweet_count: tweetObject.retweet_count,
retweeted_status: tweetObject.retweeted_status,
source: tweetObject.source,
text: tweetObject.text,
truncated: tweetObject.truncated,
url: `https://twitter.com/i/web/status/${tweetObject.id_str}`,
user: tweetObject.user,
};
if (tweetObject.truncated && tweetObject.extended_tweet) {
tweet.text = tweetObject.extended_tweet.full_text;
}
if (tweetObject.created_at) {
tweet.date = new Date(tweetObject.created_at);
}
if (tweetObject.user) {
tweet.authorName = tweetObject.user.name;
tweet.authorScreenName = tweetObject.user.screen_name;
tweet.authorUrl = `https://twitter.com/${tweetObject.user.screen_name}`;
tweet.authorAvatarUrl = tweetObject.user.profile_image_url;
tweet.verified = tweetObject.user.verified;
}
if (tweetObject.extended_entities && tweetObject.extended_entities.media) {
tweet.hasImage = true;
}
return tweet;
}
|
apache-2.0
|
lnc2014/school
|
template/js/add_per.js
|
13016
|
/**
* 添加教师js,主要处理积点的问题。
* 分模块化
* Created by admin on 2016/9/26.
*/
$(function(){
//第一个问题
var work_load = $('#is_work_load').val();
$("#work_load input[type='checkbox']").live('click', function(e){
var is_work_load = $(this).val();
if($(this).is(':checked') && is_work_load == 1){
$("#is_work_load").val(1);
$('#work_load').find(".no").attr('checked', false);
$('#work_load').find(".yes").attr('checked', true);
}else{
$("#is_work_load").val(0);
$('#work_load').find(".no").attr('checked', true);
$('#work_load').find(".yes").attr('checked', false);
}
});
//第二个问题
var teaching_good = $('#is_teaching_good').val();
$("#teaching_good input[type='checkbox']").live('click', function(e){
var is_teaching_good = $(this).val();
if($(this).is(':checked') && is_teaching_good == 1){
$("#is_teaching_good").val(1);
$('#teaching_good').find(".no").attr('checked', false);
$('#teaching_good').find(".yes").attr('checked', true);
}else{
$("#is_teaching_good").val(0);
$('#teaching_good').find(".no").attr('checked', true);
$('#teaching_good').find(".yes").attr('checked', false);
}
});
//第三个问题
var teaching_behavior = $('#is_teaching_behavior').val();
$("#teaching_behavior input[type='checkbox']").live('click', function(e){
var is_teaching_behavior = $(this).val();
if($(this).is(':checked') && is_teaching_behavior == 1){
$("#is_teaching_behavior").val(1);
$('#teaching_behavior').find(".no").attr('checked', false);
$('#teaching_behavior').find(".yes").attr('checked', true);
}else{
$("#is_teaching_behavior").val(0);
$('#teaching_behavior').find(".no").attr('checked', true);
$('#teaching_behavior').find(".yes").attr('checked', false);
}
});
//第四个问题
var student_statis = $('#is_student_statis').val();
$("#student_statis input[type='checkbox']").live('click', function(e){
var is_student_statis = $(this).val();
if($(this).is(':checked') && is_student_statis == 1){
$("#is_student_statis").val(1);
$('#student_statis').find(".no").attr('checked', false);
$('#student_statis').find(".yes").attr('checked', true);
}else{
$("#is_student_statis").val(0);
$('#student_statis').find(".no").attr('checked', true);
$('#student_statis').find(".yes").attr('checked', false);
}
});
//第五个问题
var subject_good = $('#is_subject_good').val();
$("#subject_good input[type='checkbox']").live('click', function(e){
var is_subject_good = $(this).val();
if($(this).is(':checked') && is_subject_good == 1){
$("#is_subject_good").val(1);
$('#subject_good').find(".no").attr('checked', false);
$('#subject_good').find(".yes").attr('checked', true);
}else{
$("#is_subject_good").val(0);
$('#subject_good').find(".no").attr('checked', true);
$('#subject_good').find(".yes").attr('checked', false);
}
});
//第六个问题
var academic = $('#is_academic').val();
$("#academic input[type='checkbox']").live('click', function(e){
var is_academic = $(this).val();
if($(this).is(':checked') && is_academic == 1){
$("#is_academic").val(1);
$('#academic').find(".no").attr('checked', false);
$('#academic').find(".yes").attr('checked', true);
}else{
$("#is_academic").val(0);
$('#academic').find(".no").attr('checked', true);
$('#academic').find(".yes").attr('checked', false);
}
});
//第七个问题
var organ_sub = $('#is_organ_sub').val();
$("#organ_sub input[type='checkbox']").live('click', function(e){
var is_organ_sub = $(this).val();
if($(this).is(':checked') && is_organ_sub == 1){
$("#is_organ_sub").val(1);
$('#organ_sub').find(".no").attr('checked', false);
$('#organ_sub').find(".yes").attr('checked', true);
}else{
$("#is_organ_sub").val(0);
$('#organ_sub').find(".no").attr('checked', true);
$('#organ_sub').find(".yes").attr('checked', false);
}
});
//第八个问题
var school_forum = $('#is_school_forum').val();
$("#school_forum input[type='checkbox']").live('click', function(e){
var is_school_forum = $(this).val();
if($(this).is(':checked') && is_school_forum == 1){
$("#is_school_forum").val(1);
$('#school_forum').find(".no").attr('checked', false);
$('#school_forum').find(".yes").attr('checked', true);
}else{
$("#is_school_forum").val(0);
$('#school_forum').find(".no").attr('checked', true);
$('#school_forum').find(".yes").attr('checked', false);
}
});
//第九个问题
var backtone_teacher = $('#is_backtone_teacher').val();
$("#backtone_teacher input[type='checkbox']").live('click', function(e){
var is_backtone_teacher = $(this).val();
if($(this).is(':checked') && is_backtone_teacher == 1){
$("#is_backtone_teacher").val(1);
$('#backtone_teacher').find(".no").attr('checked', false);
$('#backtone_teacher').find(".yes").attr('checked', true);
}else{
$("#is_backtone_teacher").val(0);
$('#backtone_teacher').find(".no").attr('checked', true);
$('#backtone_teacher').find(".yes").attr('checked', false);
}
});
//第10个问题
var teach_intern = $('#is_teach_intern').val();
$("#teach_intern input[type='checkbox']").live('click', function(e){
var is_teach_intern = $(this).val();
if($(this).is(':checked') && is_teach_intern == 1){
$("#is_teach_intern").val(1);
$('#teach_intern').find(".no").attr('checked', false);
$('#teach_intern').find(".yes").attr('checked', true);
}else{
$("#is_teach_intern").val(0);
$('#teach_intern').find(".no").attr('checked', true);
$('#teach_intern').find(".yes").attr('checked', false);
}
});
//第11个问题
var person_write = $('#is_person_write').val();
$("#person_write input[type='checkbox']").live('click', function(e){
var is_person_write = $(this).val();
if($(this).is(':checked') && is_person_write == 1){
$("#is_teach_intern").val(1);
$('#person_write').find(".no").attr('checked', false);
$('#person_write').find(".yes").attr('checked', true);
}else{
$("#is_person_write").val(0);
$('#person_write').find(".no").attr('checked', true);
$('#person_write').find(".yes").attr('checked', false);
}
});
//第12个问题
var semester = $('#is_semester').val();
$("#semester input[type='checkbox']").live('click', function(e){
var is_semester = $(this).val();
if($(this).is(':checked') && is_semester == 1){
$("#is_teach_intern").val(1);
$('#semester').find(".no").attr('checked', false);
$('#semester').find(".yes").attr('checked', true);
}else{
$("#is_semester").val(0);
$('#semester').find(".no").attr('checked', true);
$('#semester').find(".yes").attr('checked', false);
}
});
//第13个问题
var head_teacher = $('#is_head_teacher').val();
$("#head_teacher input[type='checkbox']").live('click', function(e){
var is_head_teacher = $(this).val();
if($(this).is(':checked') && is_head_teacher == 1){
$("#is_head_teacher").val(1);
$('#head_teacher').find(".no").attr('checked', false);
$('#head_teacher').find(".yes").attr('checked', true);
}else{
$("#is_head_teacher").val(0);
$('#head_teacher').find(".no").attr('checked', true);
$('#head_teacher').find(".yes").attr('checked', false);
}
});
//第14个问题
var learning_exper = $('#is_learning_exper').val();
$("#learning_exper input[type='checkbox']").live('click', function(e){
var is_learning_exper = $(this).val();
if($(this).is(':checked') && is_learning_exper == 1){
$("#is_learning_exper").val(1);
$('#learning_exper').find(".no").attr('checked', false);
$('#learning_exper').find(".yes").attr('checked', true);
}else{
$("#is_learning_exper").val(0);
$('#learning_exper').find(".no").attr('checked', true);
$('#learning_exper').find(".yes").attr('checked', false);
}
});
//第15个问题
var class_meeting = $('#is_class_meeting').val();
$("#class_meeting input[type='checkbox']").live('click', function(e){
var is_class_meeting = $(this).val();
if($(this).is(':checked') && is_class_meeting == 1){
$("#is_class_meeting").val(1);
$('#class_meeting').find(".no").attr('checked', false);
$('#class_meeting').find(".yes").attr('checked', true);
}else{
$("#is_class_meeting").val(0);
$('#class_meeting').find(".no").attr('checked', true);
$('#class_meeting').find(".yes").attr('checked', false);
}
});
//第16个问题
var good_head_teacher = $('#is_good_head_teacher').val();
$("#good_head_teacher input[type='checkbox']").live('click', function(e){
var is_good_head_teacher = $(this).val();
if($(this).is(':checked') && is_good_head_teacher == 1){
$("#is_good_head_teacher").val(1);
$('#good_head_teacher').find(".no").attr('checked', false);
$('#good_head_teacher').find(".yes").attr('checked', true);
}else{
$("#is_good_head_teacher").val(0);
$('#good_head_teacher').find(".no").attr('checked', true);
$('#good_head_teacher').find(".yes").attr('checked', false);
}
});
//第17个问题
var fes_activity = $('#is_fes_activity').val();
$("#fes_activity input[type='checkbox']").live('click', function(e){
var is_fes_activity = $(this).val();
if($(this).is(':checked') && is_fes_activity == 1){
$("#is_fes_activity").val(1);
$('#fes_activity').find(".no").attr('checked', false);
$('#fes_activity').find(".yes").attr('checked', true);
}else{
$("#is_fes_activity").val(0);
$('#fes_activity').find(".no").attr('checked', true);
$('#fes_activity').find(".yes").attr('checked', false);
}
});
//第18个问题
var work_for_school = $('#is_work_for_school').val();
$("#work_for_school input[type='checkbox']").live('click', function(e){
var is_work_for_school = $(this).val();
if($(this).is(':checked') && is_work_for_school == 1){
$("#is_work_for_school").val(1);
$('#work_for_school').find(".no").attr('checked', false);
$('#work_for_school').find(".yes").attr('checked', true);
}else{
$("#is_work_for_school").val(0);
$('#work_for_school').find(".no").attr('checked', true);
$('#work_for_school').find(".yes").attr('checked', false);
}
});
//第19个问题
var manange_school = $('#is_manange_school').val();
$("#manange_school input[type='checkbox']").live('click', function(e){
var is_manange_school = $(this).val();
if($(this).is(':checked') && is_manange_school == 1){
$("#is_manange_school").val(1);
$('#manange_school').find(".no").attr('checked', false);
$('#manange_school').find(".yes").attr('checked', true);
}else{
$("#is_manange_school").val(0);
$('#manange_school').find(".no").attr('checked', true);
$('#manange_school').find(".yes").attr('checked', false);
}
});
//第20个问题
var research_school = $('#is_research_school').val();
$("#research_school input[type='checkbox']").live('click', function(e){
var is_research_school = $(this).val();
if($(this).is(':checked') && is_research_school == 1){
$("#is_research_school").val(1);
$('#research_school').find(".no").attr('checked', false);
$('#research_school').find(".yes").attr('checked', true);
}else{
$("#is_research_school").val(0);
$('#research_school').find(".no").attr('checked', true);
$('#research_school').find(".yes").attr('checked', false);
}
});
});
|
apache-2.0
|
ospray/OSPRay
|
modules/mpi/ospray/render/distributed/AlphaCompositeTileOperation.cpp
|
4549
|
// Copyright 2009-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
#include "AlphaCompositeTileOperation.h"
#include <memory>
#include "../../fb/DistributedFrameBuffer.h"
#include "fb/DistributedFrameBuffer_ispc.h"
namespace ospray {
struct BufferedTile
{
ospray::Tile tile;
/*! determines order of this tile relative to other tiles.
Tiles will get blended with the 'over' operator in
increasing 'BufferedTile::sortOrder' value */
float sortOrder;
};
/* LiveTileOperation for data-parallel or hybrid-parallel rendering, where
* different (or partially shared) data is rendered on each rank
*/
struct LiveAlphaCompositeTile : public LiveTileOperation
{
LiveAlphaCompositeTile(DistributedFrameBuffer *dfb,
const vec2i &begin,
size_t tileID,
size_t ownerID);
void newFrame() override;
void process(const ospray::Tile &tile) override;
private:
std::vector<std::unique_ptr<BufferedTile>> bufferedTiles;
int currentGeneration;
int expectedInNextGeneration;
int missingInCurrentGeneration;
std::mutex mutex;
void reportCompositingError(const vec2i &tile);
};
LiveAlphaCompositeTile::LiveAlphaCompositeTile(DistributedFrameBuffer *dfb,
const vec2i &begin,
size_t tileID,
size_t ownerID)
: LiveTileOperation(dfb, begin, tileID, ownerID)
{}
void LiveAlphaCompositeTile::newFrame()
{
std::lock_guard<std::mutex> lock(mutex);
currentGeneration = 0;
expectedInNextGeneration = 0;
missingInCurrentGeneration = 1;
if (!bufferedTiles.empty()) {
handleError(OSP_INVALID_OPERATION,
std::to_string(mpicommon::workerRank())
+ " is starting with buffered tiles!");
}
}
void LiveAlphaCompositeTile::process(const ospray::Tile &tile)
{
std::lock_guard<std::mutex> lock(mutex);
{
auto addTile = rkcommon::make_unique<BufferedTile>();
std::memcpy(&addTile->tile, &tile, sizeof(tile));
bufferedTiles.push_back(std::move(addTile));
}
if (tile.generation == currentGeneration) {
--missingInCurrentGeneration;
expectedInNextGeneration += tile.children;
if (missingInCurrentGeneration < 0) {
reportCompositingError(tile.region.lower);
}
while (missingInCurrentGeneration == 0 && expectedInNextGeneration > 0) {
currentGeneration++;
missingInCurrentGeneration = expectedInNextGeneration;
expectedInNextGeneration = 0;
for (uint32_t i = 0; i < bufferedTiles.size(); i++) {
const BufferedTile *bt = bufferedTiles[i].get();
if (bt->tile.generation == currentGeneration) {
--missingInCurrentGeneration;
expectedInNextGeneration += bt->tile.children;
}
if (missingInCurrentGeneration < 0) {
reportCompositingError(tile.region.lower);
}
}
}
}
if (missingInCurrentGeneration < 0) {
reportCompositingError(tile.region.lower);
}
if (missingInCurrentGeneration == 0) {
// Sort for back-to-front blending
std::sort(bufferedTiles.begin(),
bufferedTiles.end(),
[](const std::unique_ptr<BufferedTile> &a,
const std::unique_ptr<BufferedTile> &b) {
return a->tile.sortOrder > b->tile.sortOrder;
});
Tile **tileArray = STACK_BUFFER(Tile *, bufferedTiles.size());
std::transform(bufferedTiles.begin(),
bufferedTiles.end(),
tileArray,
[](std::unique_ptr<BufferedTile> &t) { return &t->tile; });
ispc::DFB_sortAndBlendFragments(
(ispc::VaryingTile **)tileArray, bufferedTiles.size());
finished.region = tile.region;
finished.fbSize = tile.fbSize;
finished.rcp_fbSize = tile.rcp_fbSize;
accumulate(bufferedTiles[0]->tile);
bufferedTiles.clear();
tileIsFinished();
}
}
void LiveAlphaCompositeTile::reportCompositingError(const vec2i &tile)
{
std::stringstream str;
str << "negative missing on " << mpicommon::workerRank()
<< ", missing = " << missingInCurrentGeneration
<< ", expectedInNex = " << expectedInNextGeneration
<< ", current generation = " << currentGeneration << ", tile = " << tile;
handleError(OSP_INVALID_OPERATION, str.str());
}
std::shared_ptr<LiveTileOperation> AlphaCompositeTileOperation::makeTile(
DistributedFrameBuffer *dfb,
const vec2i &tileBegin,
size_t tileID,
size_t ownerID)
{
return std::make_shared<LiveAlphaCompositeTile>(
dfb, tileBegin, tileID, ownerID);
}
std::string AlphaCompositeTileOperation::toString() const
{
return "ospray::AlphaCompositeTileOperation";
}
} // namespace ospray
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Lamiales/Plantaginaceae/Plantago/Plantago maritima/Plantago maritima alpina/README.md
|
219
|
# Plantago maritima subsp. alpina (L.) O.Bolòs & Vigo SUBSPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
Plantago alpina L.
### Remarks
null
|
apache-2.0
|
pedroigor/keycloak
|
server-spi/src/main/java/org/keycloak/services/clientpolicy/ClientPolicyEvent.java
|
1353
|
/*
* Copyright 2020 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.services.clientpolicy;
/**
* Events on which client policies mechanism detects and do its operation
*
* @author <a href="mailto:[email protected]">Takashi Norimatsu</a>
*/
public enum ClientPolicyEvent {
REGISTER,
REGISTERED,
UPDATE,
UPDATED,
VIEW,
UNREGISTER,
AUTHORIZATION_REQUEST,
TOKEN_REQUEST,
SERVICE_ACCOUNT_TOKEN_REQUEST,
TOKEN_REFRESH,
TOKEN_REVOKE,
TOKEN_INTROSPECT,
USERINFO_REQUEST,
LOGOUT_REQUEST,
BACKCHANNEL_AUTHENTICATION_REQUEST,
BACKCHANNEL_TOKEN_REQUEST,
PUSHED_AUTHORIZATION_REQUEST,
DEVICE_AUTHORIZATION_REQUEST,
DEVICE_TOKEN_REQUEST
}
|
apache-2.0
|
zstackio/zstack-woodpecker
|
integrationtest/vm/e2e_mini/host/host.py
|
1882
|
# -*- coding:utf-8 -*-
from test_stub import *
class HOST(MINI):
def __init__(self, uri=None, initialized=False):
self.host_name = None
self.host_list = []
if initialized:
# if initialized is True, uri should not be None
self.uri = uri
return
super(HOST, self).__init__()
def host_ops(self, host_name, action, details_page=False):
self.navigate('minihost')
host_list = []
if isinstance(host_name, types.ListType):
host_list = host_name
else:
host_list.append(host_name)
ops_list = {'enable': u'启用',
'disable': u'停用',
'reconnect': u'重连',
'maintenance': u'维护模式',
'light': u'识别灯亮'}
test_util.test_logger('Host (%s) execute action[%s]' % (' '.join(host_list), action))
for host in host_list:
for elem in self.get_elements('ant-row-flex-middle'):
if host in elem.text:
if not details_page:
if not elem.get_element(CHECKBOX).selected:
elem.get_element(CHECKBOX).click()
else:
elem.get_element('left-part').click()
time.sleep(1)
break
if details_page:
self.get_element(MOREOPERATIONBTN).click()
time.sleep(1)
self.operate(ops_list[action])
else:
if action in ['enable', 'disable']:
self.click_button(ops_list[action])
else:
self.get_element(MOREOPERATIONBTN).click()
time.sleep(1)
self.operate(ops_list[action])
self.wait_for_element(MESSAGETOAST, timeout=300, target='disappear')
|
apache-2.0
|
Saber-Team/soi
|
README.md
|
10289
|
__Build Info__
[![project][project-image]][project-url]
[![Build status][travis-image]][travis-url]
[![Dependency Status][david-image]][david-url]
__Downloads Info__
[![Downloads][downloads-image]][downloads-url]
[![Downloads][downloads-all-image]][downloads-url]
__Miscellaneous__
[![NPM version][npm-image]][npm-url]
[![License][license-image]][license-url]
[![maintain][maintain-image]][project-url]
## 前端构建工具
[](https://www.codacy.com/app/zmike86/soi?utm_source=github.com&utm_medium=referral&utm_content=Saber-Team/soi&utm_campaign=badger)
<img src="./doc/assets/sloc.png" alt="sloc stats" style="display: inline-block; position: relative; width: 80%; height: auto;" />
**soi** 是一个前端项目构建打包构建工具, 内部集成了 [neo](https://github.com/AceMood/neo) 作为其资源扫描器, soi 相当于 neo 的后处理服务, 提供常见的打包插件, 如压缩, 合并, 文件指纹, CommonJS包装, less解析等等。
在整个前端工程化的体系中, **soi** 作为构建工具所承载的任务和角色如下图所示,
<img src="./doc/assets/arch.png" alt="sloc stats" style="display: inline-block; position: relative; width: 80%; height: auto;" />
## 处理流程
<img src="./doc/assets/workflow.png" alt="workflow" style="display: inline-block; position: relative; width: 80%; height: auto;" />
大致如流程图所示:
1. 利用 neo-core 模块扫描工程目录下的所有匹配文件, 解析记录其中依赖关系
2. 得到原始的依赖关系表后做一次输出, 给接下来的外部程序
3. soi 作为 neo-core 的外部使用程序得到原始表后根据用户配置进行一系列编译操作
4. 将操作后的文件内容和简化后的资源表一并写入到配置指定的磁盘位置
之所以说大致, 是因为有一些细节和机制随时调整, 比如编译缓存的实现。但于整体来说流程没有改变过, 插件机制也没有改变过。最终生成的资源表有两个, 一个记录单独资源格式大致如下, 结合服务端资源加载框架, 可以实现类似bigpipe, quickling, bigrender等多种加载方式。另一个是packages.json存放打包信息, 单独存放是便于自动打包系统实现后的集成, 可由系统自动生成此文件而不必改动资源表文件。
```
{
"resource": {
"js": {
"Backbone": {
"uri": "https://bd.static0.com/dev/static/bb/7fedd6102.backbone.js",
"type": "js",
"path": "src/bb/backbone.js",
"localPathName": "/static/bb/7fedd6102.backbone.js",
"version": "7fedd6102",
"deps": [
"underscore",
"jquery"
]
},
"jquery": {
"uri": "https://bd.static0.com/dev/static/bb/5cd353a52.jquery.js",
"type": "js",
"path": "src/bb/jquery.js",
"localPathName": "/static/bb/5cd353a52.jquery.js",
"version": "5cd353a52"
},
"underscore": {
"uri": "https://bd.static0.com/dev/static/bb/71b05f21b.underscore.js",
"type": "js",
"path": "src/bb/underscore.js",
"localPathName": "/static/bb/71b05f21b.underscore.js",
"version": "71b05f21b"
}
},
"css": {
"icon-style": {
"uri": "https://bd.static0.com/dev/static/css/82e2712e8.font.css",
"type": "css",
"path": "src/css/font.css",
"localPathName": "/static/css/82e2712e8.font.css",
"version": "82e2712e8",
"within": [
"p0"
]
},
"reset-style": {
"uri": "https://bd.static0.com/dev/static/css/566f3b0ba.reset.css",
"type": "css",
"path": "src/css/reset.css",
"localPathName": "/static/css/566f3b0ba.reset.css",
"version": "566f3b0ba",
"within": [
"p0"
]
},
"main": {
"uri": "https://bd.static0.com/dev/static/css/31d9b6967.withId.css",
"type": "css",
"path": "src/css/withId.css",
"localPathName": "/static/css/31d9b6967.withId.css",
"version": "31d9b6967",
"within": [
"p0"
]
}
}
},
"paths": {
"src/bb/backbone.js": "Backbone",
"src/bb/jquery.js": "jquery",
"src/bb/underscore.js": "underscore",
"src/css/font.css": "icon-style",
"src/css/reset.css": "reset-style",
"src/css/withId.css": "main"
},
"cssClassMap": {}
}
```
## 安装
确保本地安装了 node 安装包(大于v4.0.0版本), 通过包管理器 npm 进行安装.
**注意**:
若安装过v0.14.0之前版本的 **soi**, 需要通过以下命令卸载依赖的 soi-cli
```
$ npm uninstall -g soi-cli
```
原因是老版本的 **soi** 绑定命令行执行是由 soi-cli 模块实现的, 而新版的 **soi** 内部就直接通过 package.json 的 bin 字段绑定了, 不再需要 soi-cli 模块.
如第一次安装 **soi**, 或者之前没有安装过 soi-cli, 则直接运行以下脚本:
```
$ npm install -g soi
```
安装新版本 **soi** 切记加 -g 全局安装标志, 这样可以在任意目录使用 **soi** 构建前端项目.
接着安装 soi 依赖模块, 进入 soi 模块的安装目录, 运行
```
$ npm install
```
## 使用
在任意目录建立你的代码仓库, 开发完毕后在此目录添加配置文件:
```
soi.conf.js
```
在 soi 的安装目录 samples 下会有示例项目, 分别对应 **soi release** 任务和 **soi deploy** 任务, 可作参考。
**注意**:
资源扫描器在扫描目录的时候默认会跳过`_`开头的文件名, 在一些预处理插件中可以把诸如提供假数据的js文件或者抽想出来的变量、函数
的less文件名改成`_`开头, 在产出时不会产生空文件. 另一种做法是配置扫描器的ignorePaths属性或者插件的ignore属性(内置插件都
支持), 这个函数可以接受文件的工程路径作为参数, 返回true则表示忽略此资源.
### 配置 soi
**soi** 是一个全局对象, 承载了通过命令行执行的操作. 也可在配置文件`soi.conf.js`中通过全局soi对象提供的api进行配置. soi 默认当前目录为所要扫描的工程目录. soi的全部方法可以从[这里找到](./doc/api/soi.md).
### 配置 task
**task** 对象是构建的核心, 被认为是一些列任务. 目前工具内置两种类型的Task. ReleaseTask用于将文件编译后产出到本地, 使用`soi.release.task`方法可生成此类对象; DeployTask用于将文件编译后产出到联调或者预生产环境, 使用`soi.deploy.task`方法可生成此类对象. 关于Task的全部实例方法可以从[这里找到](./doc/api/task.md).
## 预处理器
**soi** 内部目前提供以下预处理器, 无需安装其他模块可直接使用:
### less预编译器
将.less后缀的文件编译成css文件。有些less文件分离了函数和相关变量, 这部分less文件在编译后不会有内容, 但资源表会记录。为此可将这部分文件命名为 `_xxx.less` 的形式, 扫描器内部会自动忽略以 `_` 开头的文件。
### babel-es2015编译器
将es6语法的js文件编译成es5语法。不建议妄加使用, 虽然[Reactjs](https://facebook.github.io/react/)生态如日中天, 但也有其弊端。跟本插件相关的就是编译后产出代码的冗余, 比如每个使用class extends关键字的模块都会生成继承语句包裹在每个模块内部, 除非加上额外提取的处理, 否则编译后的文件每个会有约900字节的冗余, 对于加载速度要求较高的场景不适用。
### babel-jsx编译器
编译jsx文件为普通的js文件, 若使用jsx语法但并未引入es6的预编译, 这个插件的使用还是比较方便, 基于[Reactjs](https://facebook.github.io/react/)的场景可以使用。
## 插件体系
soi 内部集成了前端构建常用插件, 无需安装其他模块可直接使用。包括对模块加载器[modux](https://github.com/AceMood/modux)的支持, js最小化的UglifyJS插件, css最小化的cleanCss插件等。更多说明请[移步这里](./doc/plugins.md)
## 最后
**soi** 力求future proof, 包括其插件体系的实现. 分离编译工具为资源扫描和后处理服务插件正是为此. 灵感和启发来自于 Facebook 的 **Haste Internal System**, 国内方面前辈有百度的 **F.I.S**.
未实现的部分或者还不满意的部分[参见todo](./doc/todos.md), 其中最迫不及待就是对于html静态资源的扫描和基于ipc方式实现的编译缓存.
[travis-image]: https://img.shields.io/travis/Saber-Team/soi.svg
[travis-url]: https://travis-ci.org/Saber-Team/soi
[npm-image]: https://img.shields.io/npm/v/soi.svg
[npm-url]: https://npmjs.org/package/soi
[node-image]: https://img.shields.io/node/v/soi.svg
[node-url]: https://npmjs.org/package/soi
[david-image]: http://img.shields.io/david/Saber-Team/soi.svg
[david-url]: https://david-dm.org/Saber-Team/soi
[coveralls-image]: https://img.shields.io/coveralls/Saber-Team/soi.svg
[coveralls-url]: https://coveralls.io/r/Saber-Team/soi?branch=master
[license-image]: http://img.shields.io/npm/l/soi.svg
[license-url]: LICENSE.md
[maintain-image]: https://img.shields.io/badge/maintained-Yes-blue.svg
[project-image]: https://img.shields.io/badge/soi-Excellent-brightgreen.svg
[project-url]: https://github.com/Saber-Team/soi
[downloads-image]: https://img.shields.io/npm/dm/soi.svg
[downloads-url]: https://npmjs.org/package/soi
[downloads-all-image]: https://img.shields.io/npm/dt/soi.svg
[coverage-image]: https://api.codacy.com/project/badge/coverage/43c442e150024a5fb80c876bb426c139
[codacy-image]: https://api.codacy.com/project/badge/grade/43c442e150024a5fb80c876bb426c139
[codacy-url]: https://www.codacy.com/app/zmike86/neo
|
apache-2.0
|
gromgull/cgajs
|
test/test_splitting.js
|
1531
|
var THREE = require('three');
var cgaprocessor = require('./cgaprocessor')
// function t(sizes, size, repeat) {
// console.log(sizes, size, repeat);
// console.log(cgaprocessor._compute_splits( sizes, size, repeat ));
// }
// t([ { size: 2 } ], 4, true);
// t([ { size: 2 } ], 5, true);
// t([ { size: 2 }, { size: 2 } ], 4, true);
// t([ { size: 2 }, { size: 2 } ], 5, true);
// t([ { size: 2, _float: true }, { size: 2 } ], 3, true);
// t([ { size: 2 }, { size: 2, _float: true }, { size: 2 } ], 4.5, true);
// t([ { size: 2 }, { size: 1, _float: true }, { size: 2, _float: true }, { size: 2 } ], 4.5, true);
//var preg = new THREE.BoxGeometry(2,2,2);
var preg = new THREE.Geometry();
preg.vertices.push(new THREE.Vector3(0,0,0),
new THREE.Vector3(1,1,0),
new THREE.Vector3(2,0,0));
preg.faces.push(new THREE.Face3(0,1,2));
//preg.translate(2,0,0);
// var preg = new THREE.Geometry();
// preg.vertices.push(new THREE.Vector3(-1,0,0),
// new THREE.Vector3(-1,1,0),
// new THREE.Vector3(1,0,0));
// preg.faces.push(new THREE.Face3(0,1,2));
// preg.translate(2,0,0);
preg.computeBoundingBox();
console.dir(preg.boundingBox);
console.log(preg.vertices.length, preg.faces.length);
debugger;
var g = cgaprocessor.split_geometry('x', preg, 0.5, 1.5);
g.computeBoundingBox();
console.dir(g.boundingBox);
console.log(g.vertices.length, g.faces.length);
g.vertices.forEach( v => console.log(v) );
g.faces.forEach( v => console.log(v.a,v.b,v.c) );
|
apache-2.0
|
karagog/GKChess
|
src/third_party/pg_utils/book.h
|
2350
|
#ifndef BOOK_H
#define BOOK_H
#include "pg_utils.h"
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
/** The size of one polyglot entry, in bytes. */
#define POLYGLOT_ENTRY_SIZE 16
/** The size of a polyglot entry key, in bytes. */
#define POLYGLOT_KEY_SIZE 8
/** These are the enumerations given in the Polyglot spec. */
enum promoted_piece_enum
{
promote_none = 0,
promote_knight = 1,
promote_bishop = 2,
promote_rook = 3,
promote_queen = 4
};
/** Describes a move with its weight and app-specific 'learn' value. */
typedef struct
{
// 0-based indices describe the source and dest squares
uint8 source_col, source_row;
uint8 dest_col, dest_row;
// The value of this corresponds to the promoted_piece_enum
uint8 promoted_piece;
// The weight is between 0 and 100%
float weight;
// The learn value only means something to the application that's using it
uint32 learn;
} pg_move_t;
/** Returns the polyglot hash for the given position.
\param fen The position of the board in FEN
\returns The position hash, or 0 if there was a problem (invalid FEN)
*/
uint64 pg_compute_key(char const *fen);
/** Looks up the position in the book and returns all the moves it found.
\param handle The handle created by calling open_file()
\param key The position key, acquired from pg_compute_key()
\param array An array of moves which will be populated with return values,
whose length is given by the max_array_length parameter.
In case there are more moves in the book than can be held in the return array,
the results will be truncated.
\param max_array_length An input that gives the length of the return array
\returns The number of items in the return array (can be 0)
*/
unsigned int pg_lookup_moves(void *handle,
uint64 key,
pg_move_t *array,
unsigned int max_array_length);
/** Converts the move to a string.
\param s An array of memory to fill with the string. It must be
at least length 6 (4 for move text, 1 for promoted piece, 1 for null terminator)
*/
void pg_move_to_string(pg_move_t *, char *s);
#ifdef __cplusplus
}
#endif // __cplusplus
#endif
|
apache-2.0
|
nantesmetropole/docker-paas
|
lib/docker_paas/family/test.rb
|
880
|
require 'docker_paas/family/common'
module Docker_paas; module Family
class Test < Common
def test_target
assert_env 'TEST_TARGET', ['puppet']
end
def short_tag
test_target
end
def early_run
assert_env 'DIST', ['stretch']
super + [
# octocatalog-diff is not in stretch, take it from sid
'head -1 /etc/apt/sources.list | sed s/stretch/sid/ > /etc/apt/sources.list.d/sid.list &&',
"echo 'APT::Default-Release \"stretch\";' > /etc/apt/apt.conf.d/default-release &&",
]
end
def packages
super + [
'git',
'octocatalog-diff',
'puppet-lint',
'r10k',
'rgxg',
'ruby-puppetlabs-spec-helper',
'ruby-puppet-syntax',
'ruby-rspec-puppet',
]
end
def after_run
[
'USER nobody',
]
end
end
end; end
|
apache-2.0
|
ninetian/ffmpeginstaller
|
flac-1.3.2/doc/html/api/_09_2metadata_8h.html
|
13318
|
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.8.5"/>
<title>FLAC: include/FLAC++/metadata.h File Reference</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 56px;">
<td style="padding-left: 0.5em;">
<div id="projectname">FLAC
 <span id="projectnumber">1.3.2</span>
</div>
</td>
</tr>
</tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.8.5 -->
<div id="navrow1" class="tabs">
<ul class="tablist">
<li><a href="index.html"><span>Main Page</span></a></li>
<li><a href="modules.html"><span>Modules</span></a></li>
<li><a href="annotated.html"><span>Classes</span></a></li>
<li class="current"><a href="files.html"><span>Files</span></a></li>
</ul>
</div>
<div id="navrow2" class="tabs2">
<ul class="tablist">
<li><a href="files.html"><span>File List</span></a></li>
<li><a href="globals.html"><span>File Members</span></a></li>
</ul>
</div>
<div id="nav-path" class="navpath">
<ul>
<li class="navelem"><a class="el" href="dir_d44c64559bbebec7f509842c48db8b23.html">include</a></li><li class="navelem"><a class="el" href="dir_527642952c2881b3e5b36abb4a29ebef.html">FLAC++</a></li> </ul>
</div>
</div><!-- top -->
<div class="header">
<div class="summary">
<a href="#nested-classes">Classes</a> |
<a href="#func-members">Functions</a> </div>
<div class="headertitle">
<div class="title">metadata.h File Reference</div> </div>
</div><!--header-->
<div class="contents">
<div class="textblock"><code>#include "<a class="el" href="_09_2export_8h_source.html">export.h</a>"</code><br/>
<code>#include "<a class="el" href="metadata_8h_source.html">FLAC/metadata.h</a>"</code><br/>
</div>
<p><a href="_09_2metadata_8h_source.html">Go to the source code of this file.</a></p>
<table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="nested-classes"></a>
Classes</h2></td></tr>
<tr class="memitem:"><td class="memItemLeft" align="right" valign="top">class  </td><td class="memItemRight" valign="bottom"><a class="el" href="classFLAC_1_1Metadata_1_1Prototype.html">FLAC::Metadata::Prototype</a></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:"><td class="memItemLeft" align="right" valign="top">class  </td><td class="memItemRight" valign="bottom"><a class="el" href="classFLAC_1_1Metadata_1_1StreamInfo.html">FLAC::Metadata::StreamInfo</a></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:"><td class="memItemLeft" align="right" valign="top">class  </td><td class="memItemRight" valign="bottom"><a class="el" href="classFLAC_1_1Metadata_1_1Padding.html">FLAC::Metadata::Padding</a></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:"><td class="memItemLeft" align="right" valign="top">class  </td><td class="memItemRight" valign="bottom"><a class="el" href="classFLAC_1_1Metadata_1_1Application.html">FLAC::Metadata::Application</a></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:"><td class="memItemLeft" align="right" valign="top">class  </td><td class="memItemRight" valign="bottom"><a class="el" href="classFLAC_1_1Metadata_1_1SeekTable.html">FLAC::Metadata::SeekTable</a></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:"><td class="memItemLeft" align="right" valign="top">class  </td><td class="memItemRight" valign="bottom"><a class="el" href="classFLAC_1_1Metadata_1_1VorbisComment.html">FLAC::Metadata::VorbisComment</a></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:"><td class="memItemLeft" align="right" valign="top">class  </td><td class="memItemRight" valign="bottom"><a class="el" href="classFLAC_1_1Metadata_1_1VorbisComment_1_1Entry.html">FLAC::Metadata::VorbisComment::Entry</a></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:"><td class="memItemLeft" align="right" valign="top">class  </td><td class="memItemRight" valign="bottom"><a class="el" href="classFLAC_1_1Metadata_1_1CueSheet.html">FLAC::Metadata::CueSheet</a></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:"><td class="memItemLeft" align="right" valign="top">class  </td><td class="memItemRight" valign="bottom"><a class="el" href="classFLAC_1_1Metadata_1_1CueSheet_1_1Track.html">FLAC::Metadata::CueSheet::Track</a></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:"><td class="memItemLeft" align="right" valign="top">class  </td><td class="memItemRight" valign="bottom"><a class="el" href="classFLAC_1_1Metadata_1_1Picture.html">FLAC::Metadata::Picture</a></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:"><td class="memItemLeft" align="right" valign="top">class  </td><td class="memItemRight" valign="bottom"><a class="el" href="classFLAC_1_1Metadata_1_1Unknown.html">FLAC::Metadata::Unknown</a></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:"><td class="memItemLeft" align="right" valign="top">class  </td><td class="memItemRight" valign="bottom"><a class="el" href="classFLAC_1_1Metadata_1_1SimpleIterator.html">FLAC::Metadata::SimpleIterator</a></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:"><td class="memItemLeft" align="right" valign="top">class  </td><td class="memItemRight" valign="bottom"><a class="el" href="classFLAC_1_1Metadata_1_1SimpleIterator_1_1Status.html">FLAC::Metadata::SimpleIterator::Status</a></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:"><td class="memItemLeft" align="right" valign="top">class  </td><td class="memItemRight" valign="bottom"><a class="el" href="classFLAC_1_1Metadata_1_1Chain.html">FLAC::Metadata::Chain</a></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:"><td class="memItemLeft" align="right" valign="top">class  </td><td class="memItemRight" valign="bottom"><a class="el" href="classFLAC_1_1Metadata_1_1Chain_1_1Status.html">FLAC::Metadata::Chain::Status</a></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:"><td class="memItemLeft" align="right" valign="top">class  </td><td class="memItemRight" valign="bottom"><a class="el" href="classFLAC_1_1Metadata_1_1Iterator.html">FLAC::Metadata::Iterator</a></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2"> </td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="func-members"></a>
Functions</h2></td></tr>
<tr class="memitem:gae18d91726a320349b2c3fb45e79d21fc"><td class="memItemLeft" align="right" valign="top">Prototype * </td><td class="memItemRight" valign="bottom"><a class="el" href="group__flacpp__metadata__object.html#gae18d91726a320349b2c3fb45e79d21fc">FLAC::Metadata::clone</a> (const Prototype *)</td></tr>
<tr class="separator:gae18d91726a320349b2c3fb45e79d21fc"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:ga8fa8da652f33edeb4dabb4ce39fda04b"><td class="memItemLeft" align="right" valign="top">bool </td><td class="memItemRight" valign="bottom"><a class="el" href="group__flacpp__metadata__level0.html#ga8fa8da652f33edeb4dabb4ce39fda04b">FLAC::Metadata::get_streaminfo</a> (const char *filename, StreamInfo &streaminfo)</td></tr>
<tr class="separator:ga8fa8da652f33edeb4dabb4ce39fda04b"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:ga533a71ba745ca03068523a4a45fb0329"><td class="memItemLeft" align="right" valign="top">bool </td><td class="memItemRight" valign="bottom"><a class="el" href="group__flacpp__metadata__level0.html#ga533a71ba745ca03068523a4a45fb0329">FLAC::Metadata::get_tags</a> (const char *filename, VorbisComment *&tags)</td></tr>
<tr class="separator:ga533a71ba745ca03068523a4a45fb0329"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:ga85166e6206f3d5635684de4257f2b00e"><td class="memItemLeft" align="right" valign="top">bool </td><td class="memItemRight" valign="bottom"><a class="el" href="group__flacpp__metadata__level0.html#ga85166e6206f3d5635684de4257f2b00e">FLAC::Metadata::get_tags</a> (const char *filename, VorbisComment &tags)</td></tr>
<tr class="separator:ga85166e6206f3d5635684de4257f2b00e"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:ga4fad03d91f22d78acf35dd2f35df9ac7"><td class="memItemLeft" align="right" valign="top">bool </td><td class="memItemRight" valign="bottom"><a class="el" href="group__flacpp__metadata__level0.html#ga4fad03d91f22d78acf35dd2f35df9ac7">FLAC::Metadata::get_cuesheet</a> (const char *filename, CueSheet *&cuesheet)</td></tr>
<tr class="separator:ga4fad03d91f22d78acf35dd2f35df9ac7"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:gaea8f05f89e36af143d73b4280f05cc0e"><td class="memItemLeft" align="right" valign="top">bool </td><td class="memItemRight" valign="bottom"><a class="el" href="group__flacpp__metadata__level0.html#gaea8f05f89e36af143d73b4280f05cc0e">FLAC::Metadata::get_cuesheet</a> (const char *filename, CueSheet &cuesheet)</td></tr>
<tr class="separator:gaea8f05f89e36af143d73b4280f05cc0e"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:ga2ca454c644cb6548b05545c129e4d6ef"><td class="memItemLeft" align="right" valign="top">bool </td><td class="memItemRight" valign="bottom"><a class="el" href="group__flacpp__metadata__level0.html#ga2ca454c644cb6548b05545c129e4d6ef">FLAC::Metadata::get_picture</a> (const char *filename, Picture *&picture,::<a class="el" href="group__flac__format.html#gaf6d3e836cee023e0b8d897f1fdc9825d">FLAC__StreamMetadata_Picture_Type</a> type, const char *mime_type, const FLAC__byte *description, unsigned max_width, unsigned max_height, unsigned max_depth, unsigned max_colors)</td></tr>
<tr class="separator:ga2ca454c644cb6548b05545c129e4d6ef"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:ga82705f1c0ac6d36c0a508dc33e5e7181"><td class="memItemLeft" align="right" valign="top">bool </td><td class="memItemRight" valign="bottom"><a class="el" href="group__flacpp__metadata__level0.html#ga82705f1c0ac6d36c0a508dc33e5e7181">FLAC::Metadata::get_picture</a> (const char *filename, Picture &picture,::<a class="el" href="group__flac__format.html#gaf6d3e836cee023e0b8d897f1fdc9825d">FLAC__StreamMetadata_Picture_Type</a> type, const char *mime_type, const FLAC__byte *description, unsigned max_width, unsigned max_height, unsigned max_depth, unsigned max_colors)</td></tr>
<tr class="separator:ga82705f1c0ac6d36c0a508dc33e5e7181"><td class="memSeparator" colspan="2"> </td></tr>
</table>
<a name="details" id="details"></a><h2 class="groupheader">Detailed Description</h2>
<div class="textblock"><p>This module provides classes for creating and manipulating FLAC metadata blocks in memory, and three progressively more powerful interfaces for traversing and editing metadata in FLAC files. </p>
<p>See the detailed documentation for each interface in the <a class="el" href="group__flacpp__metadata.html">metadata </a> module. </p>
</div></div><!-- contents -->
<hr size="1">
<div class="copyright">
<!-- @@@ oh so hacky -->
<table>
<tr>
<td align="left">
Copyright (c) 2000-2009 Josh Coalson
Copyright (c) 2011-2016 Xiph.Org Foundation
</td>
<td width="1%" align="right">
<a href="http://sourceforge.net"><img src="http://sflogo.sourceforge.net/sflogo.php?group_id=13478&type=1" width="88" height="31" border="0" alt="SourceForge.net Logo" /></a>
</td>
</tr>
</table>
</div>
<!-- Copyright (c) 2000-2009 Josh Coalson -->
<!-- Copyright (c) 2011-2016 Xiph.Org Foundation -->
<!-- Permission is granted to copy, distribute and/or modify this document -->
<!-- under the terms of the GNU Free Documentation License, Version 1.1 -->
<!-- or any later version published by the Free Software Foundation; -->
<!-- with no invariant sections. -->
<!-- A copy of the license can be found at http://www.gnu.org/copyleft/fdl.html -->
</body>
</html>
|
apache-2.0
|
kaular/ArchRef
|
ArchRefClient/ArchRefClient/src/app/shared/datamodels/types/fragmenttype.ts
|
1002
|
import { Entity } from '../entity/entity';
/*******************************************************************************************************************************************************************************************************
*
* @data FragmentType - FragmentType inherited from Entity it is a specific type of a LevelGraphNode
*
* Entity
* @superFields - id: number - ID of the FragmentType
* @superFields - name: string - Name of the FragmentType
* @superFields - expectedProperties: ExpectedProperty[] - Array of expected properties of the FragmentType
* @superFields - providedProperties: ProvidedProperty[] - Array of provided properties of the FragmentType
*
* @author Arthur Kaul
*
******************************************************************************************************************************************************************************************************/
export class FragmentType extends Entity {
constructor() {
super();
}
}
|
apache-2.0
|
lizenn/erlang-dbus
|
doc/dbus_auth_external.md
|
1227
|
# Module dbus_auth_external #
* [Description](#description)
* [Function Index](#index)
* [Function Details](#functions)
Implements EXTERNAL SASL mechanism.
Copyright (c) (C) 2014, Jean Parpaillon
__Behaviours:__ [`dbus_auth`](dbus_auth.md).
__Authors:__ Jean Parpaillon ([`[email protected]`](mailto:[email protected])).
<a name="description"></a>
## Description ##
See [RFC 4422](https://tools.ietf.org.md/rfc4422) for complete
specification.
<a name="index"></a>
## Function Index ##
<table width="100%" border="1" cellspacing="0" cellpadding="2" summary="function index"><tr><td valign="top"><a href="#challenge-2">challenge/2</a></td><td>Not implemented: ANONYMOUS does not require challenge.</td></tr><tr><td valign="top"><a href="#init-0">init/0</a></td><td>Initialize EXTERNAL mechanism.</td></tr></table>
<a name="functions"></a>
## Function Details ##
<a name="challenge-2"></a>
### challenge/2 ###
<pre><code>
challenge(X1::binary(), X2::any()) -> {error, invalid_challenge}
</code></pre>
<br />
Not implemented: ANONYMOUS does not require challenge
<a name="init-0"></a>
### init/0 ###
<pre><code>
init() -> {ok, binary()}
</code></pre>
<br />
Initialize EXTERNAL mechanism.
|
apache-2.0
|
GerritCodeReview/gerrit
|
java/com/google/gerrit/server/extensions/events/ChangeRestored.java
|
3450
|
// Copyright (C) 2015 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.extensions.events;
import com.google.common.flogger.FluentLogger;
import com.google.gerrit.entities.PatchSet;
import com.google.gerrit.exceptions.StorageException;
import com.google.gerrit.extensions.api.changes.NotifyHandling;
import com.google.gerrit.extensions.common.AccountInfo;
import com.google.gerrit.extensions.common.ChangeInfo;
import com.google.gerrit.extensions.common.RevisionInfo;
import com.google.gerrit.extensions.events.ChangeRestoredListener;
import com.google.gerrit.server.GpgException;
import com.google.gerrit.server.account.AccountState;
import com.google.gerrit.server.patch.PatchListNotAvailableException;
import com.google.gerrit.server.patch.PatchListObjectTooLargeException;
import com.google.gerrit.server.permissions.PermissionBackendException;
import com.google.gerrit.server.plugincontext.PluginSetContext;
import com.google.gerrit.server.query.change.ChangeData;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import java.io.IOException;
import java.time.Instant;
/** Helper class to fire an event when a change has been restored. */
@Singleton
public class ChangeRestored {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private final PluginSetContext<ChangeRestoredListener> listeners;
private final EventUtil util;
@Inject
ChangeRestored(PluginSetContext<ChangeRestoredListener> listeners, EventUtil util) {
this.listeners = listeners;
this.util = util;
}
public void fire(
ChangeData changeData, PatchSet ps, AccountState restorer, String reason, Instant when) {
if (listeners.isEmpty()) {
return;
}
try {
Event event =
new Event(
util.changeInfo(changeData),
util.revisionInfo(changeData.project(), ps),
util.accountInfo(restorer),
reason,
when);
listeners.runEach(l -> l.onChangeRestored(event));
} catch (PatchListObjectTooLargeException e) {
logger.atWarning().log("Couldn't fire event: %s", e.getMessage());
} catch (PatchListNotAvailableException
| GpgException
| IOException
| StorageException
| PermissionBackendException e) {
logger.atSevere().withCause(e).log("Couldn't fire event");
}
}
/** Event to be fired when a change has been restored. */
private static class Event extends AbstractRevisionEvent implements ChangeRestoredListener.Event {
private String reason;
Event(
ChangeInfo change,
RevisionInfo revision,
AccountInfo restorer,
String reason,
Instant when) {
super(change, revision, restorer, when, NotifyHandling.ALL);
this.reason = reason;
}
@Override
public String getReason() {
return reason;
}
}
}
|
apache-2.0
|
bogdansolga/spring-boot-training
|
d04/d04s01-async-processing/d04s01e05-async-messaging-using-solace/d04s01e05-product-publisher/src/main/java/net/safedata/spring/boot/training/solace/publisher/MessagePublisher.java
|
1255
|
package net.safedata.spring.boot.training.solace.publisher;
import net.safedata.spring.boot.training.solace.channel.OutboundChannels;
import net.safedata.spring.boot.training.solace.event.AddProductToOrderCommand;
import net.safedata.spring.boot.training.solace.event.OrderUpdatedEvent;
import net.safedata.spring.boot.training.solace.message.MessageCreator;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.stereotype.Component;
@Component
@EnableBinding(OutboundChannels.class)
public class MessagePublisher {
private final OutboundChannels outboundChannels;
@Autowired
public MessagePublisher(final OutboundChannels outboundChannels) {
this.outboundChannels = outboundChannels;
}
public void publishAddProductToOrderEvent(final AddProductToOrderCommand addProductToOrderCommand) {
outboundChannels.addProductToOrder()
.send(MessageCreator.create(addProductToOrderCommand));
}
public void publishOrderUpdatedEvent(final OrderUpdatedEvent orderUpdatedEvent) {
outboundChannels.orderUpdated()
.send(MessageCreator.create(orderUpdatedEvent));
}
}
|
apache-2.0
|
cmdrmcdonald/EliteDangerousDataProvider
|
Contributors.md
|
231
|
#Contributors
* [Cmdr McDonald](https://github.com/cmdrmcdonald)
* [Handfeger](https://github.com/Handfeger)
* Cmdr Malandark
* [Baroness Galaxy](https://github.com/Javelias)
* [Michael Werle](https://github.com/mwerle)
|
apache-2.0
|
jxent/Demoz
|
Demoz/app/src/main/java/cn/demoz/www/adapter/DefaultAdapter.java
|
5112
|
package cn.demoz.www.adapter;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.BaseAdapter;
import android.widget.ListView;
import java.util.List;
import cn.demoz.www.holder.BaseHolder;
import cn.demoz.www.holder.MoreHolder;
import cn.demoz.www.manager.ThreadManager;
import cn.demoz.www.tools.UiUtils;
public abstract class DefaultAdapter<Data> extends BaseAdapter implements OnItemClickListener {
protected List<Data> datas;
private static final int DEFAULT_ITEM = 0;
private static final int MORE_ITEM = 1;
private ListView lv;
public List<Data> getDatas() {
return datas;
}
public void setDatas(List<Data> datas) {
this.datas = datas;
}
public DefaultAdapter(List<Data> datas, ListView lv) {
this.datas = datas;
// 给ListView设置条目的点击事件
lv.setOnItemClickListener(this);
this.lv = lv;
}
// ListView 条目点击事件回调的方法
@Override
public void onItemClick(AdapterView<?> parent, View view, int position,
long id) {
//Toast.makeText(UiUtils.getContext(), "position:"+position, 0).show();
position = position - lv.getHeaderViewsCount();// 获取到顶部条目的数量 位置去掉顶部view的数量
onInnerItemClick(position);
}
/**
* 在该方法去处理条目的点击事件
*/
public void onInnerItemClick(int position) {
}
@Override
public int getCount() {
return datas.size() + 1; // 最后的一个条目 就是加载更多的条目
}
@Override
public Object getItem(int position) {
return datas.get(position);
}
/**
* 根据位置 判断当前条目是什么类型
*/
@Override
public int getItemViewType(int position) { //20
if (position == datas.size()) { // 当前是最后一个条目
return MORE_ITEM;
}
return getInnerItemViewType(position); // 如果不是最后一个条目 返回默认类型
}
protected int getInnerItemViewType(int position) {
return DEFAULT_ITEM;
}
/**
* 当前ListView 有几种不同的条目类型
*/
@Override
public int getViewTypeCount() {
return super.getViewTypeCount() + 1; // 2 有两种不同的类型
}
@Override
public long getItemId(int position) {
return position;
}
public View getView(int position, View convertView, ViewGroup parent) {
BaseHolder holder = null;
switch (getItemViewType(position)) { // 判断当前条目时什么类型
case MORE_ITEM:
if (convertView == null) {
holder = getMoreHolder();
} else {
holder = (BaseHolder) convertView.getTag();
}
break;
default:
if (convertView == null) {
holder = getHolder();
} else {
holder = (BaseHolder) convertView.getTag();
}
if (position < datas.size()) {
holder.setData(datas.get(position));
}
break;
}
return holder.getContentView(); // 如果当前Holder 恰好是MoreHolder 证明MoreHOlder已经显示
}
private MoreHolder holder;
private BaseHolder getMoreHolder() {
if (holder != null) {
return holder;
} else {
holder = new MoreHolder(this, hasMore());
return holder;
}
}
/**
* 是否有额外的数据
*
* @return
*/
protected boolean hasMore() {
return true;
}
protected abstract BaseHolder<Data> getHolder();
/**
* 当加载更多条目显示的时候 调用该方法
*/
public void loadMore() {
ThreadManager.getInstance().createLongPool().execute(new Runnable() {
@Override
public void run() {
// 在子线程中加载更多
final List<Data> newData = onload();
UiUtils.runOnUiThread(new Runnable() {
@Override
public void run() {
if (newData == null) {
holder.setData(MoreHolder.LOAD_ERROR);//
} else if (newData.size() == 0) {
holder.setData(MoreHolder.HAS_NO_MORE);
} else {
// 成功了
holder.setData(MoreHolder.HAS_MORE);
datas.addAll(newData);// 给listView之前的集合添加一个新的集合
notifyDataSetChanged();// 刷新界面
}
}
});
}
});
}
/**
* 加载更多数据
*/
protected abstract List<Data> onload();
}
|
apache-2.0
|
gstamac/powermock
|
powermock-modules/powermock-module-javaagent/src/main/java/sun/tools/attach/LinuxVirtualMachine.java
|
11227
|
/*
* Copyright (c) 2005, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.tools.attach;
import com.sun.tools.attach.AgentLoadException;
import com.sun.tools.attach.AttachNotSupportedException;
import com.sun.tools.attach.spi.AttachProvider;
import java.io.InputStream;
import java.io.IOException;
import java.io.File;
/*
* Linux implementation of HotSpotVirtualMachine
*/
public class LinuxVirtualMachine extends HotSpotVirtualMachine {
// "/tmp" is used as a global well-known location for the files
// .java_pid<pid>. and .attach_pid<pid>. It is important that this
// location is the same for all processes, otherwise the tools
// will not be able to find all Hotspot processes.
// Any changes to this needs to be synchronized with HotSpot.
private static final String tmpdir = "/tmp";
// Indicates if this machine uses the old LinuxThreads
static boolean isLinuxThreads;
// The patch to the socket file created by the target VM
String path;
/**
* Attaches to the target VM
*/
public LinuxVirtualMachine(AttachProvider provider, String vmid)
throws AttachNotSupportedException, IOException
{
super(provider, vmid);
// This provider only understands pids
int pid;
try {
pid = Integer.parseInt(vmid);
} catch (NumberFormatException x) {
throw new AttachNotSupportedException("Invalid process identifier");
}
// Find the socket file. If not found then we attempt to start the
// attach mechanism in the target VM by sending it a QUIT signal.
// Then we attempt to find the socket file again.
path = findSocketFile(pid);
if (path == null) {
File f = createAttachFile(pid);
try {
// On LinuxThreads each thread is a process and we don't have the
// pid of the VMThread which has SIGQUIT unblocked. To workaround
// this we get the pid of the "manager thread" that is created
// by the first call to pthread_create. This is parent of all
// threads (except the initial thread).
if (isLinuxThreads) {
int mpid;
try {
mpid = getLinuxThreadsManager(pid);
} catch (IOException x) {
throw new AttachNotSupportedException(x.getMessage());
}
assert(mpid >= 1);
sendQuitToChildrenOf(mpid);
} else {
sendQuitTo(pid);
}
// give the target VM time to start the attach mechanism
int i = 0;
long delay = 200;
int retries = (int)(attachTimeout() / delay);
do {
try {
Thread.sleep(delay);
} catch (InterruptedException x) { }
path = findSocketFile(pid);
i++;
} while (i <= retries && path == null);
if (path == null) {
throw new AttachNotSupportedException(
"Unable to open socket file: target process not responding " +
"or HotSpot VM not loaded");
}
} finally {
f.delete();
}
}
// Check that the file owner/permission to avoid attaching to
// bogus process
checkPermissions(path);
// Check that we can connect to the process
// - this ensures we throw the permission denied error now rather than
// later when we attempt to enqueue a command.
int s = socket();
try {
connect(s, path);
} finally {
close(s);
}
}
/**
* Detach from the target VM
*/
@Override
public void detach() throws IOException {
synchronized (this) {
if (this.path != null) {
this.path = null;
}
}
}
// protocol version
private final static String PROTOCOL_VERSION = "1";
// known errors
private final static int ATTACH_ERROR_BADVERSION = 101;
/**
* Execute the given command in the target VM.
*/
@Override
InputStream execute(String cmd, Object ... args) throws AgentLoadException, IOException {
assert args.length <= 3; // includes null
// did we detach?
String p;
synchronized (this) {
if (this.path == null) {
throw new IOException("Detached from target VM");
}
p = this.path;
}
// create UNIX socket
int s = socket();
// connect to target VM
try {
connect(s, p);
} catch (IOException x) {
close(s);
throw x;
}
IOException ioe = null;
// connected - write request
// <ver> <cmd> <args...>
try {
writeString(s, PROTOCOL_VERSION);
writeString(s, cmd);
for (int i=0; i<3; i++) {
if (i < args.length && args[i] != null) {
writeString(s, (String)args[i]);
} else {
writeString(s, "");
}
}
} catch (IOException x) {
ioe = x;
}
// Create an input stream to read reply
SocketInputStream sis = new SocketInputStream(s);
// Read the command completion status
int completionStatus;
try {
completionStatus = readInt(sis);
} catch (IOException x) {
sis.close();
if (ioe != null) {
throw ioe;
} else {
throw x;
}
}
if (completionStatus != 0) {
sis.close();
// In the event of a protocol mismatch then the target VM
// returns a known error so that we can throw a reasonable
// error.
if (completionStatus == ATTACH_ERROR_BADVERSION) {
throw new IOException("Protocol mismatch with target VM");
}
// Special-case the "load" command so that the right exception is
// thrown.
if (cmd.equals("load")) {
throw new AgentLoadException("Failed to load agent library");
} else {
throw new IOException("Command failed in target VM");
}
}
// Return the input stream so that the command output can be read
return sis;
}
/*
* InputStream for the socket connection to get target VM
*/
private class SocketInputStream extends InputStream {
int s;
public SocketInputStream(int s) {
this.s = s;
}
@Override
public synchronized int read() throws IOException {
byte b[] = new byte[1];
int n = this.read(b, 0, 1);
if (n == 1) {
return b[0] & 0xff;
} else {
return -1;
}
}
@Override
public synchronized int read(byte[] bs, int off, int len) throws IOException {
if ((off < 0) || (off > bs.length) || (len < 0) ||
((off + len) > bs.length) || ((off + len) < 0)) {
throw new IndexOutOfBoundsException();
} else if (len == 0)
return 0;
return LinuxVirtualMachine.read(s, bs, off, len);
}
@Override
public void close() throws IOException {
LinuxVirtualMachine.close(s);
}
}
// Return the socket file for the given process.
private String findSocketFile(int pid) {
File f = new File(tmpdir, ".java_pid" + pid);
if (!f.exists()) {
return null;
}
return f.getPath();
}
// On Solaris/Linux a simple handshake is used to start the attach mechanism
// if not already started. The client creates a .attach_pid<pid> file in the
// target VM's working directory (or temp directory), and the SIGQUIT handler
// checks for the file.
private File createAttachFile(int pid) throws IOException {
String fn = ".attach_pid" + pid;
String path = "/proc/" + pid + "/cwd/" + fn;
File f = new File(path);
try {
f.createNewFile();
} catch (IOException x) {
f = new File(tmpdir, fn);
f.createNewFile();
}
return f;
}
/*
* Write/sends the given to the target VM. String is transmitted in
* UTF-8 encoding.
*/
private void writeString(int fd, String s) throws IOException {
if (s.length() > 0) {
byte b[];
try {
b = s.getBytes("UTF-8");
} catch (java.io.UnsupportedEncodingException x) {
throw new InternalError();
}
LinuxVirtualMachine.write(fd, b, 0, b.length);
}
byte b[] = new byte[1];
b[0] = 0;
write(fd, b, 0, 1);
}
//-- native methods
static native boolean isLinuxThreads();
static native int getLinuxThreadsManager(int pid) throws IOException;
static native void sendQuitToChildrenOf(int pid) throws IOException;
static native void sendQuitTo(int pid) throws IOException;
static native void checkPermissions(String path) throws IOException;
static native int socket() throws IOException;
static native void connect(int fd, String path) throws IOException;
static native void close(int fd) throws IOException;
static native int read(int fd, byte buf[], int off, int bufLen) throws IOException;
static native void write(int fd, byte buf[], int off, int bufLen) throws IOException;
static {
System.loadLibrary("attach");
isLinuxThreads = isLinuxThreads();
}
}
|
apache-2.0
|
miccl/VisualiseR
|
Assets/VisualiseR/Code/Scripts/Conversion/README.md
|
2151
|
# README
* _code_conversion.py_: Script for the conversion from code to jpg (including syntax highlithing and line numbering).
* _pdf_conversion.py_: Script for the conversion from pdf to jpg
## Code Conversion
The script allows to convert code files into JPEG-format.
Additionally it adds syntax highlighting and line numbering.
The script uses [Pygments](http://pygments.org) for the code conversion.
###Setup
In order to use the script you need the following:
* [Python 3.x](https://www.python.org/downloads/)
* [Pygments](http://pygments.org/download/)
### Usage
The script can used with the following command in the console:
```
python code_conversion.py <input_path> <output_dir_path>
```
**Parameters:**
* _input_path_: Path of the file or directory that should be converted.
* _output_dir_path_: Path where the converted files should be stored.
**Examples:**
Convert all files in directory.
```
python code_conversion.py C:/CodeDirectory C:/TargetDirectory/Test
```
Convert a file.
```
python code_conversion.py C:/CodeDirectory/test.py C:/TargetDirectory/Test
```
## Pdf Conversion
The script allows to convert pdf files into image format.
A new image file is created for every page of the pdf file.
The script uses [ImageMagick](https://www.imagemagick.org/script/index.php), which uses [Ghostscript](https://www.ghostscript.com/) for the conversion.
### Setup
In order to use the script you need the following:
* [Python 3.x](https://www.python.org/downloads/)
* [ImageMagick](https://www.imagemagick.org/script/download.php)
* [Ghostscript](https://www.ghostscript.com/download/)
### Usage
The script can be used with the following command in the console:
```
python pdf_conversion.py <input_path> <output_path>
```
**Parameters:**
* _input_path_: File of the pdf file that should be converted.
* _ouput_path_: Path of the image file where the converted files should be stored.
**Examples:**
Convert all the pdf file in directory.
```
python pdf_conversion.py C:/Path/To/Pdf/File/test.pdf D:/path/to/jpg/file/test.jpg
```
If the pdf has more than 1 page, it creates image files named test-0.jpg, test-1.jpg and so on.
|
apache-2.0
|
kamtschatka/amphtml
|
src/service/url-replacements-impl.js
|
42445
|
/**
* Copyright 2015 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {
AsyncResolverDef,
ResolverReturnDef,
SyncResolverDef,
VariableSource,
getNavigationData,
getTimingDataAsync,
getTimingDataSync,
} from './variable-source';
import {Expander, NOENCODE_WHITELIST} from './url-expander/expander';
import {Services} from '../services';
import {WindowInterface} from '../window-interface';
import {
addMissingParamsToUrl,
addParamsToUrl,
getSourceUrl,
isProtocolValid,
parseQueryString,
parseUrlDeprecated,
removeAmpJsParamsFromUrl,
removeFragment,
} from '../url';
import {dev, rethrowAsync, user} from '../log';
import {getMode} from '../mode';
import {getTrackImpressionPromise} from '../impression.js';
import {hasOwn} from '../utils/object';
import {
installServiceInEmbedScope,
registerServiceBuilderForDoc,
} from '../service';
import {isExperimentOn} from '../experiments';
import {tryResolve} from '../utils/promise';
/** @private @const {string} */
const TAG = 'UrlReplacements';
const EXPERIMENT_DELIMITER = '!';
const VARIANT_DELIMITER = '.';
const GEO_DELIM = ',';
const ORIGINAL_HREF_PROPERTY = 'amp-original-href';
const ORIGINAL_VALUE_PROPERTY = 'amp-original-value';
/**
* Returns a encoded URI Component, or an empty string if the value is nullish.
* @param {*} val
* @return {string}
*/
function encodeValue(val) {
if (val == null) {
return '';
}
return encodeURIComponent(/** @type {string} */(val));
}
/**
* Returns a function that executes method on a new Date instance. This is a
* byte saving hack.
*
* @param {string} method
* @return {!SyncResolverDef}
*/
function dateMethod(method) {
return () => new Date()[method]();
}
/**
* Returns a function that returns property of screen. This is a byte saving
* hack.
*
* @param {!Screen} screen
* @param {string} property
* @return {!SyncResolverDef}
*/
function screenProperty(screen, property) {
return () => screen[property];
}
/**
* Class to provide variables that pertain to top level AMP window.
*/
export class GlobalVariableSource extends VariableSource {
/**
* @param {!./ampdoc-impl.AmpDoc} ampdoc
*/
constructor(ampdoc) {
super(ampdoc);
/** @private {?Promise<?Object<string, string>>} */
this.variants_ = null;
/** @private {?Promise<?ShareTrackingFragmentsDef>} */
this.shareTrackingFragments_ = null;
}
/**
* Utility function for setting resolver for timing data that supports
* sync and async.
* @param {string} varName
* @param {string} startEvent
* @param {string=} endEvent
* @return {!VariableSource}
* @private
*/
setTimingResolver_(varName, startEvent, endEvent) {
return this.setBoth(varName, () => {
return getTimingDataSync(this.ampdoc.win, startEvent, endEvent);
}, () => {
return getTimingDataAsync(this.ampdoc.win, startEvent, endEvent);
});
}
/** @override */
initialize() {
/** @const {!./viewport/viewport-impl.Viewport} */
const viewport = Services.viewportForDoc(this.ampdoc);
// Returns a random value for cache busters.
this.set('RANDOM', () => Math.random());
// Provides a counter starting at 1 per given scope.
const counterStore = Object.create(null);
this.set('COUNTER', scope => {
return counterStore[scope] = (counterStore[scope] | 0) + 1;
});
// Returns the canonical URL for this AMP document.
this.set('CANONICAL_URL', this.getDocInfoUrl_('canonicalUrl'));
// Returns the host of the canonical URL for this AMP document.
this.set('CANONICAL_HOST', this.getDocInfoUrl_('canonicalUrl', 'host'));
// Returns the hostname of the canonical URL for this AMP document.
this.set('CANONICAL_HOSTNAME', this.getDocInfoUrl_('canonicalUrl',
'hostname'));
// Returns the path of the canonical URL for this AMP document.
this.set('CANONICAL_PATH', this.getDocInfoUrl_('canonicalUrl', 'pathname'));
// Returns the referrer URL.
this.setAsync('DOCUMENT_REFERRER', /** @type {AsyncResolverDef} */(() => {
return Services.viewerForDoc(this.ampdoc).getReferrerUrl();
}));
// Like DOCUMENT_REFERRER, but returns null if the referrer is of
// same domain or the corresponding CDN proxy.
this.setAsync('EXTERNAL_REFERRER', /** @type {AsyncResolverDef} */(() => {
return Services.viewerForDoc(this.ampdoc).getReferrerUrl()
.then(referrer => {
if (!referrer) {
return null;
}
const referrerHostname = parseUrlDeprecated(getSourceUrl(referrer))
.hostname;
const currentHostname =
WindowInterface.getHostname(this.ampdoc.win);
return referrerHostname === currentHostname ? null : referrer;
});
}));
// Returns the title of this AMP document.
this.set('TITLE', () => {
// The environment may override the title and set originalTitle. Prefer
// that if available.
return this.ampdoc.win.document['originalTitle'] ||
this.ampdoc.win.document.title;
});
// Returns the URL for this AMP document.
this.set('AMPDOC_URL', () => {
return removeFragment(
this.addReplaceParamsIfMissing_(
this.ampdoc.win.location.href));
});
// Returns the host of the URL for this AMP document.
this.set('AMPDOC_HOST', () => {
const url = parseUrlDeprecated(this.ampdoc.win.location.href);
return url && url.host;
});
// Returns the hostname of the URL for this AMP document.
this.set('AMPDOC_HOSTNAME', () => {
const url = parseUrlDeprecated(this.ampdoc.win.location.href);
return url && url.hostname;
});
// Returns the Source URL for this AMP document.
const expandSourceUrl = () => {
const docInfo = Services.documentInfoForDoc(this.ampdoc);
return removeFragment(this.addReplaceParamsIfMissing_(docInfo.sourceUrl));
};
this.setBoth('SOURCE_URL',
() => expandSourceUrl(),
() => getTrackImpressionPromise().then(() => expandSourceUrl()));
// Returns the host of the Source URL for this AMP document.
this.set('SOURCE_HOST', this.getDocInfoUrl_('sourceUrl', 'host'));
// Returns the hostname of the Source URL for this AMP document.
this.set('SOURCE_HOSTNAME', this.getDocInfoUrl_('sourceUrl', 'hostname'));
// Returns the path of the Source URL for this AMP document.
this.set('SOURCE_PATH', this.getDocInfoUrl_('sourceUrl', 'pathname'));
// Returns a random string that will be the constant for the duration of
// single page view. It should have sufficient entropy to be unique for
// all the page views a single user is making at a time.
this.set('PAGE_VIEW_ID', this.getDocInfoUrl_('pageViewId'));
this.setBoth('QUERY_PARAM', (param, defaultValue = '') => {
return this.getQueryParamData_(param, defaultValue);
}, (param, defaultValue = '') => {
return getTrackImpressionPromise().then(() => {
return this.getQueryParamData_(param, defaultValue);
});
});
// Returns the value of the given field name in the fragment query string.
// Second parameter is an optional default value.
// For example, if location is 'pub.com/amp.html?x=1#y=2' then
// FRAGMENT_PARAM(y) returns '2' and FRAGMENT_PARAM(z, 3) returns 3.
this.setAsync('FRAGMENT_PARAM',
this.getViewerIntegrationValue_('fragmentParam', 'FRAGMENT_PARAM'));
// Returns the first item in the ancestorOrigins array, if available.
this.setAsync('ANCESTOR_ORIGIN',
this.getViewerIntegrationValue_('ancestorOrigin', 'ANCESTOR_ORIGIN'));
/**
* Stores client ids that were generated during this page view
* indexed by scope.
* @type {?Object<string, string>}
*/
let clientIds = null;
// Synchronous alternative. Only works for scopes that were previously
// requested using the async method.
this.setBoth('CLIENT_ID', scope => {
if (!clientIds) {
return null;
}
return clientIds[dev().assertString(scope)];
}, (scope, opt_userNotificationId, opt_cookieName) => {
user().assertString(scope,
'The first argument to CLIENT_ID, the fallback' +
/*OK*/' Cookie name, is required');
if (getMode().runtime == 'inabox') {
return /** @type {!Promise<ResolverReturnDef>} */(Promise.resolve(null));
}
let consent = Promise.resolve();
// If no `opt_userNotificationId` argument is provided then
// assume consent is given by default.
if (opt_userNotificationId) {
consent = Services.userNotificationManagerForDoc(this.ampdoc)
.then(service => {
return service.get(opt_userNotificationId);
});
}
return Services.cidForDoc(this.ampdoc).then(cid => {
return cid.get({
scope: dev().assertString(scope),
createCookieIfNotPresent: true,
cookieName: opt_cookieName,
}, consent);
}).then(cid => {
if (!clientIds) {
clientIds = Object.create(null);
}
// A temporary work around to extract Client ID from _ga cookie. #5761
// TODO: replace with "filter" when it's in place. #2198
const cookieName = opt_cookieName || scope;
if (cid && cookieName == '_ga') {
if (typeof cid === 'string') {
cid = extractClientIdFromGaCookie(cid);
} else {
// TODO(@jridgewell, #11120): remove once #11120 is figured out.
// Do not log the CID directly, that's PII.
dev().error(TAG, 'non-string cid, what is it?', Object.keys(cid));
}
}
clientIds[scope] = cid;
return cid;
});
});
// Returns assigned variant name for the given experiment.
this.setAsync('VARIANT', /** @type {AsyncResolverDef} */(experiment => {
return this.getVariantsValue_(variants => {
const variant = variants[/** @type {string} */(experiment)];
user().assert(variant !== undefined,
'The value passed to VARIANT() is not a valid experiment name:' +
experiment);
// When no variant assigned, use reserved keyword 'none'.
return variant === null ? 'none' : /** @type {string} */(variant);
}, 'VARIANT');
}));
// Returns all assigned experiment variants in a serialized form.
this.setAsync('VARIANTS', /** @type {AsyncResolverDef} */(() => {
return this.getVariantsValue_(variants => {
const experiments = [];
for (const experiment in variants) {
const variant = variants[experiment];
experiments.push(
experiment + VARIANT_DELIMITER + (variant || 'none'));
}
return experiments.join(EXPERIMENT_DELIMITER);
}, 'VARIANTS');
}));
// Returns assigned geo value for geoType or all groups.
this.setAsync('AMP_GEO', /** @type {AsyncResolverDef} */(geoType => {
return this.getGeo_(geos => {
if (geoType) {
user().assert(geoType === 'ISOCountry',
'The value passed to AMP_GEO() is not valid name:' + geoType);
return /** @type {string} */ (geos[geoType] || 'unknown');
}
return /** @type {string} */ (geos.ISOCountryGroups.join(GEO_DELIM));
}, 'AMP_GEO');
}));
// Returns incoming share tracking fragment.
this.setAsync('SHARE_TRACKING_INCOMING', /** @type {AsyncResolverDef} */(
() => {
return this.getShareTrackingValue_(fragments => {
return fragments.incomingFragment;
}, 'SHARE_TRACKING_INCOMING');
}));
// Returns outgoing share tracking fragment.
this.setAsync('SHARE_TRACKING_OUTGOING', /** @type {AsyncResolverDef} */(
() => {
return this.getShareTrackingValue_(fragments => {
return fragments.outgoingFragment;
}, 'SHARE_TRACKING_OUTGOING');
}));
// Returns the number of milliseconds since 1 Jan 1970 00:00:00 UTC.
this.set('TIMESTAMP', dateMethod('getTime'));
// Returns the human readable timestamp in format of
// 2011-01-01T11:11:11.612Z.
this.set('TIMESTAMP_ISO', dateMethod('toISOString'));
// Returns the user's time-zone offset from UTC, in minutes.
this.set('TIMEZONE', dateMethod('getTimezoneOffset'));
// Returns the IANA timezone code
this.set('TIMEZONE_CODE', () => {
let tzCode;
if ('Intl' in this.ampdoc.win &&
'DateTimeFormat' in this.ampdoc.win.Intl) {
// It could be undefined (i.e. IE11)
tzCode = new Intl.DateTimeFormat().resolvedOptions().timeZone;
}
return tzCode || '';
});
// Returns a promise resolving to viewport.getScrollTop.
this.set('SCROLL_TOP', () => viewport.getScrollTop());
// Returns a promise resolving to viewport.getScrollLeft.
this.set('SCROLL_LEFT', () => viewport.getScrollLeft());
// Returns a promise resolving to viewport.getScrollHeight.
this.set('SCROLL_HEIGHT', () => viewport.getScrollHeight());
// Returns a promise resolving to viewport.getScrollWidth.
this.set('SCROLL_WIDTH', () => viewport.getScrollWidth());
// Returns the viewport height.
this.set('VIEWPORT_HEIGHT', () => viewport.getHeight());
// Returns the viewport width.
this.set('VIEWPORT_WIDTH', () => viewport.getWidth());
const {screen} = this.ampdoc.win;
// Returns screen.width.
this.set('SCREEN_WIDTH', screenProperty(screen, 'width'));
// Returns screen.height.
this.set('SCREEN_HEIGHT', screenProperty(screen, 'height'));
// Returns screen.availHeight.
this.set('AVAILABLE_SCREEN_HEIGHT', screenProperty(screen, 'availHeight'));
// Returns screen.availWidth.
this.set('AVAILABLE_SCREEN_WIDTH', screenProperty(screen, 'availWidth'));
// Returns screen.ColorDepth.
this.set('SCREEN_COLOR_DEPTH', screenProperty(screen, 'colorDepth'));
// Returns document characterset.
this.set('DOCUMENT_CHARSET', () => {
const doc = this.ampdoc.win.document;
return doc.characterSet || doc.charset;
});
// Returns the browser language.
this.set('BROWSER_LANGUAGE', () => {
const nav = this.ampdoc.win.navigator;
return (nav.language || nav.userLanguage || nav.browserLanguage || '')
.toLowerCase();
});
// Returns the user agent.
this.set('USER_AGENT', () => {
const nav = this.ampdoc.win.navigator;
return nav.userAgent;
});
// Returns the time it took to load the whole page. (excludes amp-* elements
// that are not rendered by the system yet.)
this.setTimingResolver_(
'PAGE_LOAD_TIME', 'navigationStart', 'loadEventStart');
// Returns the time it took to perform DNS lookup for the domain.
this.setTimingResolver_(
'DOMAIN_LOOKUP_TIME', 'domainLookupStart', 'domainLookupEnd');
// Returns the time it took to connect to the server.
this.setTimingResolver_(
'TCP_CONNECT_TIME', 'connectStart', 'connectEnd');
// Returns the time it took for server to start sending a response to the
// request.
this.setTimingResolver_(
'SERVER_RESPONSE_TIME', 'requestStart', 'responseStart');
// Returns the time it took to download the page.
this.setTimingResolver_(
'PAGE_DOWNLOAD_TIME', 'responseStart', 'responseEnd');
// Returns the time it took for redirects to complete.
this.setTimingResolver_(
'REDIRECT_TIME', 'navigationStart', 'fetchStart');
// Returns the time it took for DOM to become interactive.
this.setTimingResolver_(
'DOM_INTERACTIVE_TIME', 'navigationStart', 'domInteractive');
// Returns the time it took for content to load.
this.setTimingResolver_(
'CONTENT_LOAD_TIME', 'navigationStart', 'domContentLoadedEventStart');
// Access: Reader ID.
this.setAsync('ACCESS_READER_ID', /** @type {AsyncResolverDef} */(() => {
return this.getAccessValue_(accessService => {
return accessService.getAccessReaderId();
}, 'ACCESS_READER_ID');
}));
// Access: data from the authorization response.
this.setAsync('AUTHDATA', /** @type {AsyncResolverDef} */(field => {
user().assert(field,
'The first argument to AUTHDATA, the field, is required');
return this.getAccessValue_(accessService => {
return accessService.getAuthdataField(field);
}, 'AUTHDATA');
}));
// Returns an identifier for the viewer.
this.setAsync('VIEWER', () => {
return Services.viewerForDoc(this.ampdoc)
.getViewerOrigin().then(viewer => {
return viewer == undefined ? '' : viewer;
});
});
// Returns the total engaged time since the content became viewable.
this.setAsync('TOTAL_ENGAGED_TIME', () => {
return Services.activityForDoc(this.ampdoc).then(activity => {
return activity.getTotalEngagedTime();
});
});
// Returns the incremental engaged time since the last push under the
// same name.
this.setAsync('INCREMENTAL_ENGAGED_TIME', (name, reset) => {
return Services.activityForDoc(this.ampdoc).then(activity => {
return activity.getIncrementalEngagedTime(name, reset !== 'false');
});
});
this.set('NAV_TIMING', (startAttribute, endAttribute) => {
user().assert(startAttribute, 'The first argument to NAV_TIMING, the ' +
'start attribute name, is required');
return getTimingDataSync(
this.ampdoc.win,
/**@type {string}*/(startAttribute),
/**@type {string}*/(endAttribute));
});
this.setAsync('NAV_TIMING', (startAttribute, endAttribute) => {
user().assert(startAttribute, 'The first argument to NAV_TIMING, the ' +
'start attribute name, is required');
return getTimingDataAsync(
this.ampdoc.win,
/**@type {string}*/(startAttribute),
/**@type {string}*/(endAttribute));
});
this.set('NAV_TYPE', () => {
return getNavigationData(this.ampdoc.win, 'type');
});
this.set('NAV_REDIRECT_COUNT', () => {
return getNavigationData(this.ampdoc.win, 'redirectCount');
});
// returns the AMP version number
this.set('AMP_VERSION', () => '$internalRuntimeVersion$');
this.set('BACKGROUND_STATE', () => {
return Services.viewerForDoc(this.ampdoc).isVisible() ? '0' : '1';
});
this.setAsync('VIDEO_STATE', (id, property) => {
const root = this.ampdoc.getRootNode();
const video = user().assertElement(
root.getElementById(/** @type {string} */ (id)),
`Could not find an element with id="${id}" for VIDEO_STATE`);
return Services.videoManagerForDoc(this.ampdoc)
.getAnalyticsDetails(video)
.then(details => details ? details[property] : '');
});
this.setAsync('STORY_PAGE_INDEX', this.getStoryValue_('pageIndex',
'STORY_PAGE_INDEX'));
this.setAsync('STORY_PAGE_ID', this.getStoryValue_('pageId',
'STORY_PAGE_ID'));
this.setAsync('FIRST_CONTENTFUL_PAINT', () => {
return tryResolve(() =>
Services.performanceFor(this.ampdoc.win).getFirstContentfulPaint());
});
this.setAsync('FIRST_VIEWPORT_READY', () => {
return tryResolve(() =>
Services.performanceFor(this.ampdoc.win).getFirstViewportReady());
});
this.setAsync('MAKE_BODY_VISIBLE', () => {
return tryResolve(() =>
Services.performanceFor(this.ampdoc.win).getMakeBodyVisible());
});
this.setAsync('AMP_STATE', key => {
return Services.bindForDocOrNull(this.ampdoc).then(bind => {
if (!bind) {
return '';
}
return bind.getStateValue(/** @type {string} */ (key));
});
});
}
/**
* Merges any replacement parameters into a given URL's query string,
* preferring values set in the original query string.
* @param {string} orig The original URL
* @return {string} The resulting URL
* @private
*/
addReplaceParamsIfMissing_(orig) {
const {replaceParams} =
/** @type {!Object} */ (Services.documentInfoForDoc(this.ampdoc));
if (!replaceParams) {
return orig;
}
return addMissingParamsToUrl(removeAmpJsParamsFromUrl(orig), replaceParams);
}
/**
* Resolves the value via one of document info's urls.
* @param {string} field A field on the docInfo
* @param {string=} opt_urlProp A subproperty of the field
* @return {T}
* @template T
*/
getDocInfoUrl_(field, opt_urlProp) {
return () => {
const docInfo = Services.documentInfoForDoc(this.ampdoc);
const value = docInfo[field];
return opt_urlProp ? parseUrlDeprecated(value)[opt_urlProp] : value;
};
}
/**
* Resolves the value via access service. If access service is not configured,
* the resulting value is `null`.
* @param {function(!../../extensions/amp-access/0.1/access-vars.AccessVars):(T|!Promise<T>)} getter
* @param {string} expr
* @return {T|null}
* @template T
* @private
*/
getAccessValue_(getter, expr) {
return Promise.all([
Services.accessServiceForDocOrNull(this.ampdoc),
Services.subscriptionsServiceForDocOrNull(this.ampdoc),
]).then(services => {
const service = /** @type {?../../extensions/amp-access/0.1/access-vars.AccessVars} */ (
services[0] || services[1]);
if (!service) {
// Access/subscriptions service is not installed.
user().error(
TAG,
'Access or subsciptions service is not installed to access: ',
expr);
return null;
}
return getter(service);
});
}
/**
* Return the QUERY_PARAM from the current location href
* @param {*} param
* @param {string} defaultValue
* @return {string}
* @private
*/
getQueryParamData_(param, defaultValue) {
user().assert(param,
'The first argument to QUERY_PARAM, the query string ' +
'param is required');
const url = parseUrlDeprecated(
removeAmpJsParamsFromUrl(this.ampdoc.win.location.href));
const params = parseQueryString(url.search);
const key = user().assertString(param);
const {replaceParams} = Services.documentInfoForDoc(this.ampdoc);
if (typeof params[key] !== 'undefined') {
return params[key];
}
if (replaceParams && typeof replaceParams[key] !== 'undefined') {
return /** @type {string} */(replaceParams[key]);
}
return defaultValue;
}
/**
* Resolves the value via amp-experiment's variants service.
* @param {function(!Object<string, string>):(?string)} getter
* @param {string} expr
* @return {!Promise<?string>}
* @template T
* @private
*/
getVariantsValue_(getter, expr) {
if (!this.variants_) {
this.variants_ = Services.variantForOrNull(this.ampdoc.win);
}
return this.variants_.then(variants => {
user().assert(variants,
'To use variable %s, amp-experiment should be configured',
expr);
return getter(variants);
});
}
/**
* Resolves the value via geo service.
* @param {function(Object<string, string>)} getter
* @param {string} expr
* @return {!Promise<Object<string,(string|Array<string>)>>}
* @template T
* @private
*/
getGeo_(getter, expr) {
return Services.geoForDocOrNull(this.ampdoc)
.then(geo => {
user().assert(geo,
'To use variable %s, amp-geo should be configured',
expr);
return getter(geo);
});
}
/**
* Resolves the value via amp-share-tracking's service.
* @param {function(!ShareTrackingFragmentsDef):T} getter
* @param {string} expr
* @return {!Promise<T>}
* @template T
* @private
*/
getShareTrackingValue_(getter, expr) {
if (!this.shareTrackingFragments_) {
this.shareTrackingFragments_ =
Services.shareTrackingForOrNull(this.ampdoc.win);
}
return this.shareTrackingFragments_.then(fragments => {
user().assert(fragments, 'To use variable %s, ' +
'amp-share-tracking should be configured',
expr);
return getter(/** @type {!ShareTrackingFragmentsDef} */ (fragments));
});
}
/**
* Resolves the value via amp-story's service.
* @param {string} property
* @param {string} name
* @return {!AsyncResolverDef}
* @private
*/
getStoryValue_(property, name) {
return () => {
const service = Services.storyVariableServiceForOrNull(this.ampdoc.win);
return service.then(storyVariables => {
user().assert(storyVariables,
'To use variable %s amp-story should be configured', name);
return storyVariables[property];
});
};
}
/**
* Resolves the value via amp-viewer-integration's service.
* @param {string} property
* @param {string} name
* @return {!AsyncResolverDef}
* @private
*/
getViewerIntegrationValue_(property, name) {
return /** @type {!AsyncResolverDef} */ (
(param, defaultValue = '') => {
const service =
Services.viewerIntegrationVariableServiceForOrNull(this.ampdoc.win);
return service.then(viewerIntegrationVariables => {
user().assert(viewerIntegrationVariables, 'To use variable %s ' +
'amp-viewer-integration must be installed', name);
return viewerIntegrationVariables[property](param, defaultValue);
});
});
}
}
/**
* This class replaces substitution variables with their values.
* Document new values in ../spec/amp-var-substitutions.md
* @package For export
*/
export class UrlReplacements {
/**
* @param {!./ampdoc-impl.AmpDoc} ampdoc
* @param {!VariableSource} variableSource
*/
constructor(ampdoc, variableSource) {
/** @const {!./ampdoc-impl.AmpDoc} */
this.ampdoc = ampdoc;
/** @type {VariableSource} */
this.variableSource_ = variableSource;
/** @type {!Expander} */
this.expander_ = new Expander(this.variableSource_);
}
/**
* Synchronously expands the provided source by replacing all known variables
* with their resolved values. Optional `opt_bindings` can be used to add new
* variables or override existing ones. Any async bindings are ignored.
* @param {string} source
* @param {!Object<string, (ResolverReturnDef|!SyncResolverDef)>=} opt_bindings
* @param {!Object<string, ResolverReturnDef>=} opt_collectVars
* @param {!Object<string, boolean>=} opt_whiteList Optional white list of
* names that can be substituted.
* @return {string}
*/
expandStringSync(source, opt_bindings, opt_collectVars, opt_whiteList) {
return /** @type {string} */ (
this.expand_(source, opt_bindings, opt_collectVars, /* opt_sync */ true,
opt_whiteList));
}
/**
* Expands the provided source by replacing all known variables with their
* resolved values. Optional `opt_bindings` can be used to add new variables
* or override existing ones.
* @param {string} source
* @param {!Object<string, *>=} opt_bindings
* @param {!Object<string, boolean>=} opt_whiteList
* @return {!Promise<string>}
*/
expandStringAsync(source, opt_bindings, opt_whiteList) {
return /** @type {!Promise<string>} */ (this.expand_(source, opt_bindings,
/* opt_collectVars */ undefined,
/* opt_sync */ undefined, opt_whiteList));
}
/**
* Synchronously expands the provided URL by replacing all known variables
* with their resolved values. Optional `opt_bindings` can be used to add new
* variables or override existing ones. Any async bindings are ignored.
* @param {string} url
* @param {!Object<string, (ResolverReturnDef|!SyncResolverDef)>=} opt_bindings
* @param {!Object<string, ResolverReturnDef>=} opt_collectVars
* @param {!Object<string, boolean>=} opt_whiteList Optional white list of
* names that can be substituted.
* @return {string}
*/
expandUrlSync(url, opt_bindings, opt_collectVars, opt_whiteList) {
return this.ensureProtocolMatches_(url, /** @type {string} */ (this.expand_(
url, opt_bindings, opt_collectVars, /* opt_sync */ true,
opt_whiteList)));
}
/**
* Expands the provided URL by replacing all known variables with their
* resolved values. Optional `opt_bindings` can be used to add new variables
* or override existing ones.
* @param {string} url
* @param {!Object<string, *>=} opt_bindings
* @param {!Object<string, boolean>=} opt_whiteList Optional white list of names
* that can be substituted.
* @return {!Promise<string>}
*/
expandUrlAsync(url, opt_bindings, opt_whiteList) {
return /** @type {!Promise<string>} */ (
this.expand_(url, opt_bindings, undefined, undefined,
opt_whiteList).then(
replacement => this.ensureProtocolMatches_(url, replacement)));
}
/**
* Expands an input element value attribute with variable substituted.
* @param {!HTMLInputElement} element
* @return {!Promise<string>}
*/
expandInputValueAsync(element) {
return /** @type {!Promise<string>} */ (
this.expandInputValue_(element, /*opt_sync*/ false));
}
/**
* Expands an input element value attribute with variable substituted.
* @param {!HTMLInputElement} element
* @return {string} Replaced string for testing
*/
expandInputValueSync(element) {
return /** @type {string} */ (
this.expandInputValue_(element, /*opt_sync*/ true));
}
/**
* Expands in input element value attribute with variable substituted.
* @param {!HTMLInputElement} element
* @param {boolean=} opt_sync
* @return {string|!Promise<string>}
*/
expandInputValue_(element, opt_sync) {
dev().assert(element.tagName == 'INPUT' &&
(element.getAttribute('type') || '').toLowerCase() == 'hidden',
'Input value expansion only works on hidden input fields: %s', element);
const whitelist = this.getWhitelistForElement_(element);
if (!whitelist) {
return opt_sync ? element.value : Promise.resolve(element.value);
}
if (element[ORIGINAL_VALUE_PROPERTY] === undefined) {
element[ORIGINAL_VALUE_PROPERTY] = element.value;
}
const result = this.expand_(
element[ORIGINAL_VALUE_PROPERTY] || element.value,
/* opt_bindings */ undefined,
/* opt_collectVars */ undefined,
/* opt_sync */ opt_sync,
/* opt_whitelist */ whitelist);
if (opt_sync) {
return element.value = result;
}
return result.then(newValue => {
element.value = newValue;
return newValue;
});
}
/**
* Returns a replacement whitelist from elements' data-amp-replace attribute.
* @param {!Element} element
* @param {!Object<string, boolean>=} opt_supportedReplacement Optional supported
* replacement that filters whitelist to a subset.
* @return {!Object<string, boolean>|undefined}
*/
getWhitelistForElement_(element, opt_supportedReplacement) {
const whitelist = element.getAttribute('data-amp-replace');
if (!whitelist) {
return;
}
const requestedReplacements = {};
whitelist.trim().split(/\s+/).forEach(replacement => {
if (!opt_supportedReplacement ||
hasOwn(opt_supportedReplacement, replacement)) {
requestedReplacements[replacement] = true;
} else {
user().warn('URL', 'Ignoring unsupported replacement', replacement);
}
});
return requestedReplacements;
}
/**
* Returns whether variable substitution is allowed for given url.
* @param {!Location} url
* @return {boolean}
*/
isAllowedOrigin_(url) {
const docInfo = Services.documentInfoForDoc(this.ampdoc);
if (url.origin == parseUrlDeprecated(docInfo.canonicalUrl).origin ||
url.origin == parseUrlDeprecated(docInfo.sourceUrl).origin) {
return true;
}
const meta = this.ampdoc.getRootNode().querySelector(
'meta[name=amp-link-variable-allowed-origin]');
if (meta && meta.hasAttribute('content')) {
const whitelist = meta.getAttribute('content').trim().split(/\s+/);
for (let i = 0; i < whitelist.length; i++) {
if (url.origin == parseUrlDeprecated(whitelist[i]).origin) {
return true;
}
}
}
return false;
}
/**
* Replaces values in the link of an anchor tag if
* - the link opts into it (via data-amp-replace argument)
* - the destination is the source or canonical origin of this doc.
* @param {!Element} element An anchor element.
* @param {?string} defaultUrlParams to expand link if caller request.
* @return {string|undefined} Replaced string for testing
*/
maybeExpandLink(element, defaultUrlParams) {
dev().assert(element.tagName == 'A');
const supportedReplacements = {
'CLIENT_ID': true,
'QUERY_PARAM': true,
'PAGE_VIEW_ID': true,
'NAV_TIMING': true,
};
const additionalUrlParameters =
element.getAttribute('data-amp-addparams') || '';
const whitelist = this.getWhitelistForElement_(
element, supportedReplacements);
if (!whitelist && !additionalUrlParameters && !defaultUrlParams) {
return;
}
// ORIGINAL_HREF_PROPERTY has the value of the href "pre-replacement".
// We set this to the original value before doing any work and use it
// on subsequent replacements, so that each run gets a fresh value.
let href = dev().assertString(
element[ORIGINAL_HREF_PROPERTY] || element.getAttribute('href'));
const url = parseUrlDeprecated(href);
if (element[ORIGINAL_HREF_PROPERTY] == null) {
element[ORIGINAL_HREF_PROPERTY] = href;
}
if (additionalUrlParameters) {
href = addParamsToUrl(
href,
parseQueryString(additionalUrlParameters));
}
const isAllowedOrigin = this.isAllowedOrigin_(url);
if (!isAllowedOrigin) {
if (whitelist) {
user().warn('URL', 'Ignoring link replacement', href,
' because the link does not go to the document\'s' +
' source, canonical, or whitelisted origin.');
}
return element.href = href;
}
// Note that defaultUrlParams is treated differently than
// additionalUrlParameters in two ways #1: If the outgoing url origin is not
// whitelisted: additionalUrlParameters are always appended by not expanded,
// defaultUrlParams will not be appended. #2: If the expansion function is
// not whitelisted: additionalUrlParamters will not be expanded,
// defaultUrlParams will by default support QUERY_PARAM, and will still be
// expanded.
if (defaultUrlParams) {
if (!whitelist || !whitelist['QUERY_PARAM']) {
// override whitelist and expand defaultUrlParams;
const overrideWhitelist = {'QUERY_PARAM': true};
defaultUrlParams = this.expandUrlSync(
defaultUrlParams,
/* opt_bindings */ undefined,
/* opt_collectVars */ undefined,
/* opt_whitelist */ overrideWhitelist);
}
href = addParamsToUrl(href, parseQueryString(defaultUrlParams));
}
if (whitelist) {
href = this.expandUrlSync(
href,
/* opt_bindings */ undefined,
/* opt_collectVars */ undefined,
/* opt_whitelist */ whitelist);
}
return element.href = href;
}
/**
* @param {string} url
* @param {!Object<string, *>=} opt_bindings
* @param {!Object<string, *>=} opt_collectVars
* @param {boolean=} opt_sync
* @param {!Object<string, boolean>=} opt_whiteList Optional white list of names
* that can be substituted.
* @return {!Promise<string>|string}
* @private
*/
expand_(url, opt_bindings, opt_collectVars, opt_sync, opt_whiteList) {
const isV2ExperimentOn = isExperimentOn(this.ampdoc.win,
'url-replacement-v2');
if (isV2ExperimentOn) {
// TODO(ccordy) support opt_collectVars && opt_whitelist
return this.expander_./*OK*/expand(url, opt_bindings, opt_collectVars,
opt_sync, opt_whiteList);
}
// existing parsing method
const expr = this.variableSource_.getExpr(opt_bindings);
let replacementPromise;
let replacement = url.replace(expr, (match, name, opt_strargs) => {
let args = [];
if (typeof opt_strargs == 'string') {
args = opt_strargs.split(/,\s*/);
}
if (opt_whiteList && !opt_whiteList[name]) {
// Do not perform substitution and just return back the original
// match, so that the string doesn't change.
return match;
}
let binding;
if (opt_bindings && (name in opt_bindings)) {
binding = opt_bindings[name];
} else if ((binding = this.variableSource_.get(name))) {
if (opt_sync) {
binding = binding.sync;
if (!binding) {
user().error(TAG, 'ignoring async replacement key: ', name);
return '';
}
} else {
binding = binding.async || binding.sync;
}
}
let val;
try {
val = (typeof binding == 'function') ?
binding.apply(null, args) : binding;
} catch (e) {
// Report error, but do not disrupt URL replacement. This will
// interpolate as the empty string.
if (opt_sync) {
val = '';
}
rethrowAsync(e);
}
// In case the produced value is a promise, we don't actually
// replace anything here, but do it again when the promise resolves.
if (val && val.then) {
if (opt_sync) {
user().error(TAG, 'ignoring promise value for key: ', name);
return '';
}
/** @const {Promise<string>} */
const p = val.catch(err => {
// Report error, but do not disrupt URL replacement. This will
// interpolate as the empty string.
rethrowAsync(err);
}).then(v => {
replacement = replacement.replace(match,
NOENCODE_WHITELIST[match] ? v : encodeValue(v));
if (opt_collectVars) {
opt_collectVars[match] = v;
}
});
if (replacementPromise) {
replacementPromise = replacementPromise.then(() => p);
} else {
replacementPromise = p;
}
return match;
}
if (opt_collectVars) {
opt_collectVars[match] = val;
}
return NOENCODE_WHITELIST[match] ? val : encodeValue(val);
});
if (replacementPromise) {
replacementPromise = replacementPromise.then(() => replacement);
}
if (opt_sync) {
return replacement;
}
return replacementPromise || Promise.resolve(replacement);
}
/**
* Collects all substitutions in the provided URL and expands them to the
* values for known variables. Optional `opt_bindings` can be used to add
* new variables or override existing ones.
* @param {string} url
* @param {!Object<string, *>=} opt_bindings
* @return {!Promise<!Object<string, *>>}
*/
collectVars(url, opt_bindings) {
const vars = Object.create(null);
return this.expand_(url, opt_bindings, vars).then(() => vars);
}
/**
* Collects substitutions in the `src` attribute of the given element
* that are _not_ whitelisted via `data-amp-replace` opt-in attribute.
* @param {!Element} element
* @return {!Array<string>}
*/
collectUnwhitelistedVarsSync(element) {
const url = element.getAttribute('src');
const vars = Object.create(null);
this.expandStringSync(url, /* opt_bindings */ undefined, vars);
const varNames = Object.keys(vars);
const whitelist = this.getWhitelistForElement_(element);
if (whitelist) {
return varNames.filter(v => !whitelist[v]);
} else {
// All vars are unwhitelisted if the element has no whitelist.
return varNames;
}
}
/**
* Ensures that the protocol of the original url matches the protocol of the
* replacement url. Returns the replacement if they do, the original if they
* do not.
* @param {string} url
* @param {string} replacement
* @return {string}
*/
ensureProtocolMatches_(url, replacement) {
const newProtocol = parseUrlDeprecated(replacement, /* opt_nocache */ true)
.protocol;
const oldProtocol = parseUrlDeprecated(url, /* opt_nocache */ true)
.protocol;
if (newProtocol != oldProtocol) {
user().error(TAG, 'Illegal replacement of the protocol: ', url);
return url;
}
user().assert(isProtocolValid(replacement),
'The replacement url has invalid protocol: %s', replacement);
return replacement;
}
/**
* @return {VariableSource}
*/
getVariableSource() {
return this.variableSource_;
}
}
/**
* Extracts client ID from a _ga cookie.
* https://developers.google.com/analytics/devguides/collection/analyticsjs/cookies-user-id
* @param {string} gaCookie
* @return {string}
*/
export function extractClientIdFromGaCookie(gaCookie) {
return gaCookie.replace(/^(GA1|1)\.[\d-]+\./, '');
}
/**
* @param {!./ampdoc-impl.AmpDoc} ampdoc
*/
export function installUrlReplacementsServiceForDoc(ampdoc) {
registerServiceBuilderForDoc(
ampdoc,
'url-replace',
function(doc) {
return new UrlReplacements(doc, new GlobalVariableSource(doc));
});
}
/**
* @param {!./ampdoc-impl.AmpDoc} ampdoc
* @param {!Window} embedWin
* @param {!VariableSource} varSource
*/
export function installUrlReplacementsForEmbed(ampdoc, embedWin, varSource) {
installServiceInEmbedScope(embedWin, 'url-replace',
new UrlReplacements(ampdoc, varSource));
}
/**
* @typedef {{incomingFragment: string, outgoingFragment: string}}
*/
let ShareTrackingFragmentsDef;
|
apache-2.0
|
weiguolong/wglRepository
|
app/src/main/java/com/chinaweather/android/ChooseAreaFragment.java
|
10217
|
package com.chinaweather.android;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import com.chinaweather.android.db.City;
import com.chinaweather.android.db.County;
import com.chinaweather.android.db.Province;
import com.chinaweather.android.util.HttpUtil;
import com.chinaweather.android.util.Utility;
import org.litepal.crud.DataSupport;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import okhttp3.Call;
import okhttp3.Callback;
import okhttp3.Response;
public class ChooseAreaFragment extends Fragment {
public static final int LEVEL_PROVINCE = 0;
public static final int LEVEL_CITY = 1;
public static final int LEVEL_COUNTY = 2;
private ProgressDialog progressDialog;
private TextView titleText;
private Button backButton;
private ListView listView;
private ArrayAdapter<String> adapter;
private List<String> dataList = new ArrayList<>();
/**
* 省列表
*/
private List<Province> provinceList;
/**
* 市列表
*/
private List<City> cityList;
/**
* 县列表
*/
private List<County> countyList;
/**
* 选中的省
*/
private Province selectedProvince;
/**
* 选中的市
*/
private City selectedCity;
/**
* 当前的级别
*/
private int currentLevel;
public ChooseAreaFragment() {
// Required empty public constructor
}
/**
* 先获取一些控件的实例, 然后初始化ArrayAdapter, 并将它设置为ListView中适配器。
* @param inflater
* @param container
* @param savedInstanceState
* @return
*/
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.choose_area, container,false);
titleText = (TextView) view.findViewById(R.id.title_text);
backButton = (Button) view.findViewById(R.id.back_button);
listView = (ListView)view.findViewById(R.id.list_view);
adapter = new ArrayAdapter<>(getContext(), android.R.layout.simple_list_item_1, dataList);
listView.setAdapter(adapter);
return view;
}
/**
* 给ListView和Button设置点击事件。
* @param savedInstanceState
*/
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
/**
* 当点击某个省的时候, 就会进入到ListView的onItemClick()方法中, 这个时候就会根据当前的级别来
* 判断是去掉用queryCityies()方法还是queryCounties()方法。
*/
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
if (currentLevel == LEVEL_PROVINCE) {
selectedProvince = provinceList.get(position);
queryCities();
}else if (currentLevel == LEVEL_CITY ) {
selectedCity = cityList.get(position);
queryCounties();
}else if (currentLevel == LEVEL_COUNTY ) {
String weatherId = countyList.get(position).getWeatherId();
Intent intent = new Intent(getActivity(), WeatherActivity.class);
intent.putExtra("weather_id", weatherId);
startActivity(intent);
getActivity().finish();
}
}
});
backButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (currentLevel == LEVEL_COUNTY) {
queryCities();
}else if (currentLevel == LEVEL_CITY ) {
queryProvinces();
}
}
});
queryProvinces();
}
/**
* 查询全国所有的省, 优先从数据库查询, 如果没有查询到则再去服务器查询。
*/
private void queryProvinces() {
titleText.setText("中国");
backButton.setVisibility(View.GONE); //隐藏返回按钮
//从本地数据库读取省级数据。
provinceList = DataSupport.findAll(Province.class);
if (provinceList.size() > 0 ) {
dataList.clear();;
for (Province province : provinceList ) {
dataList.add(province.getProvinceName());
}
adapter.notifyDataSetChanged();
listView.setSelection(0);
currentLevel = LEVEL_PROVINCE;
}else {
//发起网络请求, 读取数据。
final String address = "http://guolin.tech/api/china";
queryFromServer(address, "province");
}
}
/**
* 查询选中的省的所有城市, 优先从数据库中查找, 如果没有则再到服务器上查询。
*/
private void queryCities() {
titleText.setText(selectedProvince.getProvinceName());
backButton.setVisibility(View.VISIBLE);
cityList = DataSupport.where("provinceid = ? " , String.valueOf(selectedProvince.getId())).find(City.class);
if (cityList.size() > 0) {
dataList.clear();
for (City city : cityList) {
dataList.add(city.getCityName());
}
adapter.notifyDataSetChanged();
listView.setSelection(0);
currentLevel = LEVEL_CITY;
}else {
int provinceCode = selectedProvince.getProvinceCode();
String address = "http://guolin.tech/api/china/" + provinceCode;
queryFromServer(address, "city");
}
}
/**
* 查询选中市内的所有县, 优先从数据库查询, 如果没有查询到再去服务器上查询。
*/
private void queryCounties() {
titleText.setText(selectedCity.getCityName());
backButton.setVisibility(View.VISIBLE);
countyList = DataSupport.where("cityid = ?" , String.valueOf(selectedCity.getId())).find(County.class);
if (countyList.size() > 0) {
dataList.clear();
for (County county : countyList ) {
dataList.add(county.getCountyName());
}
adapter.notifyDataSetChanged();
listView.setSelection(0);
currentLevel = LEVEL_COUNTY;
} else {
int provinceCode = selectedProvince.getProvinceCode();
int cityCode = selectedCity.getCityCode();
String address = "http://guolin.tech/api/china/"+provinceCode+"/"+cityCode;
queryFromServer(address , "county");
}
}
/**
* 根据传入的地址和类型从服务器上查询省市县数据。
* @param address
* @param type
*/
private void queryFromServer(String address , final String type) {
showProgressDialog();
HttpUtil.sendOkHttpRequest(address, new Callback() {
@Override
public void onFailure(Call call, IOException e) {
//通过runOnUiThread()方法回到主线程处理逻辑。
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
closeProgressDialog();
Toast.makeText(getContext(),"加载失败",Toast.LENGTH_SHORT).show();
}
});
}
@Override
public void onResponse(Call call, Response response) throws IOException {
String responseText = response.body().string();
boolean result = false;
if ("province".equals(type)) {
result = Utility.handleProvinceResponse(responseText);
}else if ("city".equals(type)) {
result = Utility.handleCityResponse(responseText, selectedProvince.getId());
}else if ("county".equals(type)) {
result = Utility.handleCountyResponse(responseText, selectedCity.getId());
}
if (result) {
/**
* 由于queryProvinces()方法涉及到UI操作, 因此必须在主线程中调用, 这里借助runOnUiThread()
* 方法, 就会实现从子线程到主线程。
*/
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
closeProgressDialog();
if ("province".equals(type)) {
queryProvinces();
}else if ("city".equals(type)) {
queryCities();
}else if ("county".equals(type)) {
queryCounties();
}
}
});
}
}
});
}
/**
* 显示进度对话框
*/
private void showProgressDialog() {
if (progressDialog == null) {
progressDialog = new ProgressDialog(getActivity());
progressDialog.setMessage("正在加载中...");
progressDialog.setCanceledOnTouchOutside(false);
}
progressDialog.show();
}
/**
* 关闭进度对话框
*/
private void closeProgressDialog() {
if (progressDialog != null ) {
progressDialog.dismiss();
}
}
}
|
apache-2.0
|
Nickel671/JsInteropGenerator
|
target/generated-sources/gwt/org/niklas/elemental/Elemental/client/elements/DOMTokenList.java
|
423
|
package org.niklas.elemental.Elemental.client.elements;
import com.google.gwt.core.client.js.JsProperty;
import com.google.gwt.core.client.js.JsType;
@JsType(
prototype = "DOMTokenList"
)
interface DOMTokenList {
@JsProperty
int getLength();
String item(int index);
boolean contains(String token);
void add(String tokens);
void remove(String tokens);
boolean toggle(String token, boolean force);
}
|
apache-2.0
|
googlecreativelab/chrome-music-lab
|
spectrogram/src/javascripts/UI/player.js
|
5514
|
/********************************************************
Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*********************************************************/
var Util = require('../util/util.js');
function Player() {
// Create an audio graph.
window.AudioContext = window.AudioContext || window.webkitAudioContext;
context = new AudioContext();
var analyser = context.createAnalyser();
//analyser.fftSize = 2048 * 2 * 2
// analyser.fftSize = (window.isMobile)? 2048 : 8192;
analyser.fftSize = (window.isMobile)?1024 : 2048;
analyser.smoothingTimeConstant = 0;
// Create a mix.
var mix = context.createGain();
// Create a bandpass filter.
var bandpass = context.createBiquadFilter();
bandpass.Q.value = 10;
bandpass.type = 'bandpass';
var filterGain = context.createGain();
filterGain.gain.value = 1;
// Connect audio processing graph
mix.connect(analyser);
analyser.connect(filterGain);
filterGain.connect(context.destination);
this.context = context;
this.mix = mix;
// this.bandpass = bandpass;
this.filterGain = filterGain;
this.analyser = analyser;
this.buffers = {};
// Connect an empty source node to the mix.
Util.loadTrackSrc(this.context, 'bin/snd/empty.mp3', function(buffer) {
var source = this.createSource_(buffer, true);
source.loop = true;
source.start(0);
}.bind(this));
}
Player.prototype.playSrc = function(src) {
// Stop all of the mic stuff.
this.filterGain.gain.value = 1;
if (this.input) {
this.input.disconnect();
this.input = null;
return;
}
if (this.buffers[src]) {
$('#loadingSound').fadeIn(100).delay(1000).fadeOut(500);
this.playHelper_(src);
return;
}
$('#loadingSound').fadeIn(100);
Util.loadTrackSrc(this.context, src, function(buffer) {
this.buffers[src] = buffer;
this.playHelper_(src);
$('#loadingSound').delay(500).fadeOut(500);
}.bind(this));
};
Player.prototype.playUserAudio = function(src) {
// Stop all of the mic stuff.
this.filterGain.gain.value = 1;
if (this.input) {
this.input.disconnect();
this.input = null;
return;
}
this.buffers['user'] = src.buffer;
this.playHelper_('user');
};
Player.prototype.playHelper_ = function(src) {
var buffer = this.buffers[src];
this.source = this.createSource_(buffer, true);
this.source.start(0);
if (!this.loop) {
this.playTimer = setTimeout(function() {
this.stop();
}.bind(this), buffer.duration * 2000);
}
};
Player.prototype.live = function() {
// The AudioContext may be in a suspended state prior to the page receiving a user
// gesture. If it is, resume it.
if (this.context.state === 'suspended') {
this.context.resume();
}
if(window.isIOS){
window.parent.postMessage('error2','*');
console.log("cant use mic on ios");
}else{
if (this.input) {
this.input.disconnect();
this.input = null;
return;
}
var self = this;
navigator.mediaDevices.getUserMedia({audio: true}).then(function(stream) {
self.onStream_(stream);
}).catch(function() {
self.onStreamError(this);
});
this.filterGain.gain.value = 0;
}
};
Player.prototype.onStream_ = function(stream) {
var input = this.context.createMediaStreamSource(stream);
input.connect(this.mix);
this.input = input;
this.stream = stream;
};
Player.prototype.onStreamError_ = function(e) {
// TODO: Error handling.
};
Player.prototype.setLoop = function(loop) {
this.loop = loop;
};
Player.prototype.createSource_ = function(buffer, loop) {
var source = this.context.createBufferSource();
source.buffer = buffer;
source.loop = loop;
source.connect(this.mix);
return source;
};
Player.prototype.setMicrophoneInput = function() {
// TODO: Implement me!
};
Player.prototype.stop = function() {
if (this.source) {
this.source.stop(0);
this.source = null;
clearTimeout(this.playTimer);
this.playTimer = null;
}
if (this.input) {
this.input.disconnect();
this.input = null;
return;
}
};
Player.prototype.getAnalyserNode = function() {
return this.analyser;
};
Player.prototype.setBandpassFrequency = function(freq) {
if (freq == null) {
console.log('Removing bandpass filter');
// Remove the effect of the bandpass filter completely, connecting the mix to the analyser directly.
this.mix.disconnect();
this.mix.connect(this.analyser);
} else {
// console.log('Setting bandpass frequency to %d Hz', freq);
// Only set the frequency if it's specified, otherwise use the old one.
this.bandpass.frequency.value = freq;
this.mix.disconnect();
this.mix.connect(this.bandpass);
// bandpass is connected to filterGain.
this.filterGain.connect(this.analyser);
}
};
Player.prototype.playTone = function(freq) {
if (!this.osc) {
this.osc = this.context.createOscillator();
this.osc.connect(this.mix);
this.osc.type = 'sine';
this.osc.start(0);
}
this.osc.frequency.value = freq;
this.filterGain.gain.value = .2;
};
Player.prototype.stopTone = function() {
this.osc.stop(0);
this.osc = null;
};
module.exports = Player;
|
apache-2.0
|
kuali/kpme
|
pm/impl/src/test/config/sql/PositionServiceTest-cleanup.sql
|
676
|
--
-- Copyright 2004-2014 The Kuali Foundation
--
-- Licensed under the Educational Community License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.opensource.org/licenses/ecl2.php
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
--
delete from hr_position_t where HR_POSITION_ID = '1';
|
apache-2.0
|
dadarom/dubbo
|
dubbo-rpc/dubbo-rpc-api/src/main/java/com/alibaba/dubbo/rpc/filter/GenericImplFilter.java
|
9168
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.dubbo.rpc.filter;
import com.alibaba.dubbo.common.Constants;
import com.alibaba.dubbo.common.beanutil.JavaBeanAccessor;
import com.alibaba.dubbo.common.beanutil.JavaBeanDescriptor;
import com.alibaba.dubbo.common.beanutil.JavaBeanSerializeUtil;
import com.alibaba.dubbo.common.extension.Activate;
import com.alibaba.dubbo.common.logger.Logger;
import com.alibaba.dubbo.common.logger.LoggerFactory;
import com.alibaba.dubbo.common.utils.PojoUtils;
import com.alibaba.dubbo.common.utils.ReflectUtils;
import com.alibaba.dubbo.rpc.Filter;
import com.alibaba.dubbo.rpc.Invocation;
import com.alibaba.dubbo.rpc.Invoker;
import com.alibaba.dubbo.rpc.Result;
import com.alibaba.dubbo.rpc.RpcException;
import com.alibaba.dubbo.rpc.RpcInvocation;
import com.alibaba.dubbo.rpc.RpcResult;
import com.alibaba.dubbo.rpc.service.GenericException;
import com.alibaba.dubbo.rpc.support.ProtocolUtils;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
/**
* GenericImplInvokerFilter
*/
@Activate(group = Constants.CONSUMER, value = Constants.GENERIC_KEY, order = 20000)
public class GenericImplFilter implements Filter {
private static final Logger logger = LoggerFactory.getLogger(GenericImplFilter.class);
private static final Class<?>[] GENERIC_PARAMETER_TYPES = new Class<?>[]{String.class, String[].class, Object[].class};
public Result invoke(Invoker<?> invoker, Invocation invocation) throws RpcException {
String generic = invoker.getUrl().getParameter(Constants.GENERIC_KEY);
if (ProtocolUtils.isGeneric(generic)
&& !Constants.$INVOKE.equals(invocation.getMethodName())
&& invocation instanceof RpcInvocation) {
RpcInvocation invocation2 = (RpcInvocation) invocation;
String methodName = invocation2.getMethodName();
Class<?>[] parameterTypes = invocation2.getParameterTypes();
Object[] arguments = invocation2.getArguments();
String[] types = new String[parameterTypes.length];
for (int i = 0; i < parameterTypes.length; i++) {
types[i] = ReflectUtils.getName(parameterTypes[i]);
}
Object[] args;
if (ProtocolUtils.isBeanGenericSerialization(generic)) {
args = new Object[arguments.length];
for (int i = 0; i < arguments.length; i++) {
args[i] = JavaBeanSerializeUtil.serialize(arguments[i], JavaBeanAccessor.METHOD);
}
} else {
args = PojoUtils.generalize(arguments);
}
invocation2.setMethodName(Constants.$INVOKE);
invocation2.setParameterTypes(GENERIC_PARAMETER_TYPES);
invocation2.setArguments(new Object[]{methodName, types, args});
Result result = invoker.invoke(invocation2);
if (!result.hasException()) {
Object value = result.getValue();
try {
Method method = invoker.getInterface().getMethod(methodName, parameterTypes);
if (ProtocolUtils.isBeanGenericSerialization(generic)) {
if (value == null) {
return new RpcResult(value);
} else if (value instanceof JavaBeanDescriptor) {
return new RpcResult(JavaBeanSerializeUtil.deserialize((JavaBeanDescriptor) value));
} else {
throw new RpcException(
new StringBuilder(64)
.append("The type of result value is ")
.append(value.getClass().getName())
.append(" other than ")
.append(JavaBeanDescriptor.class.getName())
.append(", and the result is ")
.append(value).toString());
}
} else {
return new RpcResult(PojoUtils.realize(value, method.getReturnType(), method.getGenericReturnType()));
}
} catch (NoSuchMethodException e) {
throw new RpcException(e.getMessage(), e);
}
} else if (result.getException() instanceof GenericException) {
GenericException exception = (GenericException) result.getException();
try {
String className = exception.getExceptionClass();
Class<?> clazz = ReflectUtils.forName(className);
Throwable targetException = null;
Throwable lastException = null;
try {
targetException = (Throwable) clazz.newInstance();
} catch (Throwable e) {
lastException = e;
for (Constructor<?> constructor : clazz.getConstructors()) {
try {
targetException = (Throwable) constructor.newInstance(new Object[constructor.getParameterTypes().length]);
break;
} catch (Throwable e1) {
lastException = e1;
}
}
}
if (targetException != null) {
try {
Field field = Throwable.class.getDeclaredField("detailMessage");
if (!field.isAccessible()) {
field.setAccessible(true);
}
field.set(targetException, exception.getExceptionMessage());
} catch (Throwable e) {
logger.warn(e.getMessage(), e);
}
result = new RpcResult(targetException);
} else if (lastException != null) {
throw lastException;
}
} catch (Throwable e) {
throw new RpcException("Can not deserialize exception " + exception.getExceptionClass() + ", message: " + exception.getExceptionMessage(), e);
}
}
return result;
}
if (invocation.getMethodName().equals(Constants.$INVOKE)
&& invocation.getArguments() != null
&& invocation.getArguments().length == 3
&& ProtocolUtils.isGeneric(generic)) {
Object[] args = (Object[]) invocation.getArguments()[2];
if (ProtocolUtils.isJavaGenericSerialization(generic)) {
for (Object arg : args) {
if (!(byte[].class == arg.getClass())) {
error(byte[].class.getName(), arg.getClass().getName());
}
}
} else if (ProtocolUtils.isBeanGenericSerialization(generic)) {
for (Object arg : args) {
if (!(arg instanceof JavaBeanDescriptor)) {
error(JavaBeanDescriptor.class.getName(), arg.getClass().getName());
}
}
}
((RpcInvocation) invocation).setAttachment(
Constants.GENERIC_KEY, invoker.getUrl().getParameter(Constants.GENERIC_KEY));
}
return invoker.invoke(invocation);
}
private void error(String expected, String actual) throws RpcException {
throw new RpcException(
new StringBuilder(32)
.append("Generic serialization [")
.append(Constants.GENERIC_SERIALIZATION_NATIVE_JAVA)
.append("] only support message type ")
.append(expected)
.append(" and your message type is ")
.append(actual).toString());
}
}
|
apache-2.0
|
rgooch/Dominator
|
hypervisor/rpcd/changeAddressPool.go
|
1050
|
package rpcd
import (
"github.com/Cloud-Foundations/Dominator/lib/errors"
"github.com/Cloud-Foundations/Dominator/lib/srpc"
"github.com/Cloud-Foundations/Dominator/proto/hypervisor"
)
func (t *srpcType) ChangeAddressPool(conn *srpc.Conn,
request hypervisor.ChangeAddressPoolRequest,
reply *hypervisor.ChangeAddressPoolResponse) error {
*reply = hypervisor.ChangeAddressPoolResponse{
Error: errors.ErrorToString(t.changeAddressPool(conn, request))}
return nil
}
func (t *srpcType) changeAddressPool(conn *srpc.Conn,
request hypervisor.ChangeAddressPoolRequest) error {
if len(request.AddressesToAdd) > 0 {
err := t.manager.AddAddressesToPool(request.AddressesToAdd)
if err != nil {
return err
}
}
if len(request.AddressesToRemove) > 0 {
err := t.manager.RemoveAddressesFromPool(request.AddressesToRemove)
if err != nil {
return err
}
}
if len(request.MaximumFreeAddresses) > 0 {
err := t.manager.RemoveExcessAddressesFromPool(
request.MaximumFreeAddresses)
if err != nil {
return err
}
}
return nil
}
|
apache-2.0
|
347184068/jybl
|
src/main/java/com/wfu/common/utils/SystemPath.java
|
1970
|
/**
* Copyright © 2012-2016 <a href="https://github.com/thinkgem/jeesite">JeeSite</a> All rights reserved.
*/
package com.wfu.common.utils;
import javax.servlet.http.HttpServletRequest;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
/**
* @author wanye
* @date Dec 14, 2008
* @version v 1.0
* @description 得到当前应用的系统路径
*/
public class SystemPath {
public static String getSysPath() {
String path = Thread.currentThread().getContextClassLoader()
.getResource("").toString();
String temp = path.replaceFirst("file:/", "").replaceFirst(
"WEB-INF/classes/", "");
String separator = System.getProperty("file.separator");
String resultPath = temp.replaceAll("/", separator + separator);
return resultPath;
}
public static String getClassPath() {
String path = Thread.currentThread().getContextClassLoader()
.getResource("").toString();
String temp = path.replaceFirst("file:/", "");
String separator = System.getProperty("file.separator");
String resultPath = temp.replaceAll("/", separator + separator);
return resultPath;
}
public static String getSystempPath() {
return System.getProperty("java.io.tmpdir");
}
public static String getSeparator() {
return System.getProperty("file.separator");
}
/**
* 获取服务器的地址
* @return
*/
public static String getServerPath(){
HttpServletRequest request = ((ServletRequestAttributes) RequestContextHolder.getRequestAttributes()).getRequest();
String path = request.getContextPath();
String basePath = request.getScheme()+"://"+request.getServerName()+":"+request.getServerPort()+path;
return basePath;
}
public static void main(String[] args) {
System.out.println(getSysPath());
System.out.println(System.getProperty("java.io.tmpdir"));
System.out.println(getSeparator());
System.out.println(getClassPath());
}
}
|
apache-2.0
|
tonyredondo/TWCore2
|
src/TWCore.Services/DasMulli.Win32.ServiceUtils/ServiceControlHandler.cs
|
232
|
using System;
// ReSharper disable CheckNamespace
namespace DasMulli.Win32.ServiceUtils
{
internal delegate void ServiceControlHandler(ServiceControlCommand control, uint eventType, IntPtr eventData, IntPtr eventContext);
}
|
apache-2.0
|
ilearninging/xxhis
|
all/493.html
|
1063
|
<table border="1" id="table1" style="border-collapse: collapse">
<tr>
<td height="25" align="center"><span style="font-size: 16px">战国</span></td>
<td height="25" align="center"><span style="font-size: 16px">公元前435年</span></td>
<td height="25" align="center"><span style="font-size: 16px">丙午</span></td>
<td height="25px" align="center"><span style="font-size: 16px">周考王嵬六年</span></td>
</tr>
<tr>
<td colspan="4">
<table border="0" width="100%">
<tr>
<td valign="top">
<b>历史纪事</b> </td>
<td>
<div></div></td>
</tr>
</table>
</td>
</tr>
<tr>
<td colspan="4">
<table border="0" width="100%">
<tr>
<td valign="top">
<b>文化纪事</b> </td>
<td>
<div></div></td>
</tr>
</table>
</td>
</tr>
<tr>
<td colspan="4">
<table border="0" width="100%">
<tr>
<td valign="top">
<b>杂谭逸事</b> </td>
<td>
<div></div></td>
</tr>
</table>
</td>
</tr>
<tr>
<td colspan="4">
<table border="0" width="100%">
<tr>
<td valign="top">
<b>注释</b></td>
<td>
<div>无记载</div></td>
</tr>
</table>
</td>
</tr>
<tr>
</tr></table>
|
apache-2.0
|
galak/zephyr
|
drivers/serial/uart_rom_esp32s2.c
|
1343
|
/*
* Copyright (c) 2021 Espressif Systems (Shanghai) Co., Ltd.
*
* SPDX-License-Identifier: Apache-2.0
*/
#define DT_DRV_COMPAT espressif_esp32s2_uart
/* Include esp-idf headers first to avoid redefining BIT() macro */
#include <soc.h>
#include <esp_attr.h>
#include <device.h>
#include <drivers/uart.h>
#include <drivers/clock_control.h>
static int uart_rom_esp32s2_poll_in(const struct device *dev, unsigned char *p_char)
{
ARG_UNUSED(dev);
return (int)esp_rom_uart_rx_one_char(p_char);
}
static IRAM_ATTR void uart_rom_esp32s2_poll_out(const struct device *dev,
unsigned char c)
{
ARG_UNUSED(dev);
esp_rom_uart_tx_one_char(c);
}
static int uart_rom_esp32s2_poll_err_check(const struct device *dev)
{
ARG_UNUSED(dev);
return 0;
}
static int uart_rom_esp32s2_init(const struct device *dev)
{
ARG_UNUSED(dev);
return 0;
}
static const DRAM_ATTR struct uart_driver_api uart_rom_esp32s2_api = {
.poll_in = uart_rom_esp32s2_poll_in,
.poll_out = uart_rom_esp32s2_poll_out,
.err_check = uart_rom_esp32s2_poll_err_check,
};
#define ESP32S2_ROM_UART_INIT(idx) \
DEVICE_DT_DEFINE(DT_NODELABEL(uart##idx), \
&uart_rom_esp32s2_init, \
NULL, \
NULL, \
NULL, \
PRE_KERNEL_1, \
CONFIG_SERIAL_INIT_PRIORITY, \
&uart_rom_esp32s2_api); \
DT_INST_FOREACH_STATUS_OKAY(ESP32S2_ROM_UART_INIT)
|
apache-2.0
|
MacdonaldRobinson/FlexDotnetCMS
|
FrameworkLibrary/Classes/ControlAdapters/TreeViewAdapter.cs
|
1555
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.UI;
using System.Web.UI.Adapters;
using System.Web.UI.WebControls;
namespace FrameworkLibrary.Classes.ControlAdapters
{
public class TreeViewAdapter : ControlAdapter
{
protected CustomTreeView Target
{
get
{
return (this.Control as CustomTreeView);
}
}
protected override void Render(HtmlTextWriter writer)
{
writer.WriteFullBeginTag("ul");
writer.WriteLine();
foreach (CustomTreeNode node in Target.Nodes)
{
RenderNode(node, writer);
}
writer.WriteLine();
writer.WriteEndTag("ul");
}
private void RenderNode(CustomTreeNode node, HtmlTextWriter writer)
{
writer.WriteFullBeginTag("li " + node.GetLIAttributesAsString());
writer.WriteFullBeginTag("a href=\"" + node.NavigateUrl + "\"" + node.GetLinkAttributesAsString());
writer.Write(node.Text);
writer.WriteEndTag("a");
if (node.ChildNodes.Count > 0)
{
writer.WriteFullBeginTag("ul");
foreach (CustomTreeNode childNode in node.ChildNodes)
{
RenderNode(childNode, writer);
}
writer.WriteEndTag("ul");
}
writer.WriteEndTag("li");
}
}
}
|
apache-2.0
|
Mainflux/mainflux-lite
|
lora/redis/streams.go
|
5245
|
package redis
import (
"encoding/json"
"errors"
"fmt"
"github.com/go-redis/redis"
"github.com/mainflux/mainflux/logger"
"github.com/mainflux/mainflux/lora"
)
const (
keyType = "lora"
keyDevEUI = "dev_eui"
keyAppID = "app_id"
group = "mainflux.lora"
stream = "mainflux.things"
thingPrefix = "thing."
thingCreate = thingPrefix + "create"
thingUpdate = thingPrefix + "update"
thingRemove = thingPrefix + "remove"
channelPrefix = "channel."
channelCreate = channelPrefix + "create"
channelUpdate = channelPrefix + "update"
channelRemove = channelPrefix + "remove"
exists = "BUSYGROUP Consumer Group name already exists"
)
var (
errMetadataType = errors.New("field lora is missing in the metadata")
errMetadataFormat = errors.New("malformed metadata")
errMetadataAppID = errors.New("application ID not found in channel metadatada")
errMetadataDevEUI = errors.New("device EUI not found in thing metadatada")
)
// Subscriber represents event source for things and channels provisioning.
type Subscriber interface {
// Subscribes to geven subject and receives events.
Subscribe(string) error
}
type eventStore struct {
svc lora.Service
client *redis.Client
consumer string
logger logger.Logger
}
// NewEventStore returns new event store instance.
func NewEventStore(svc lora.Service, client *redis.Client, consumer string, log logger.Logger) Subscriber {
return eventStore{
svc: svc,
client: client,
consumer: consumer,
logger: log,
}
}
func (es eventStore) Subscribe(subject string) error {
err := es.client.XGroupCreateMkStream(stream, group, "$").Err()
if err != nil && err.Error() != exists {
return err
}
for {
streams, err := es.client.XReadGroup(&redis.XReadGroupArgs{
Group: group,
Consumer: es.consumer,
Streams: []string{stream, ">"},
Count: 100,
}).Result()
if err != nil || len(streams) == 0 {
continue
}
for _, msg := range streams[0].Messages {
event := msg.Values
var err error
switch event["operation"] {
case thingCreate:
cte, derr := decodeCreateThing(event)
if derr != nil {
err = derr
break
}
err = es.handleCreateThing(cte)
case thingUpdate:
ute, derr := decodeCreateThing(event)
if derr != nil {
err = derr
break
}
err = es.handleCreateThing(ute)
case thingRemove:
rte := decodeRemoveThing(event)
err = es.handleRemoveThing(rte)
case channelCreate:
cce, derr := decodeCreateChannel(event)
if derr != nil {
err = derr
break
}
err = es.handleCreateChannel(cce)
case channelUpdate:
uce, derr := decodeCreateChannel(event)
if derr != nil {
err = derr
break
}
err = es.handleCreateChannel(uce)
case channelRemove:
rce := decodeRemoveChannel(event)
err = es.handleRemoveChannel(rce)
}
if err != nil && err != errMetadataType {
es.logger.Warn(fmt.Sprintf("Failed to handle event sourcing: %s", err.Error()))
break
}
es.client.XAck(stream, group, msg.ID)
}
}
}
func decodeCreateThing(event map[string]interface{}) (createThingEvent, error) {
strmeta := read(event, "metadata", "{}")
var metadata map[string]interface{}
if err := json.Unmarshal([]byte(strmeta), &metadata); err != nil {
return createThingEvent{}, err
}
cte := createThingEvent{
id: read(event, "id", ""),
}
m, ok := metadata[keyType]
if !ok {
return createThingEvent{}, errMetadataType
}
lm, ok := m.(map[string]interface{})
if !ok {
return createThingEvent{}, errMetadataFormat
}
val, ok := lm[keyDevEUI].(string)
if !ok {
return createThingEvent{}, errMetadataDevEUI
}
cte.loraDevEUI = val
return cte, nil
}
func decodeRemoveThing(event map[string]interface{}) removeThingEvent {
return removeThingEvent{
id: read(event, "id", ""),
}
}
func decodeCreateChannel(event map[string]interface{}) (createChannelEvent, error) {
strmeta := read(event, "metadata", "{}")
var metadata map[string]interface{}
if err := json.Unmarshal([]byte(strmeta), &metadata); err != nil {
return createChannelEvent{}, err
}
cce := createChannelEvent{
id: read(event, "id", ""),
}
m, ok := metadata[keyType]
if !ok {
return createChannelEvent{}, errMetadataType
}
lm, ok := m.(map[string]interface{})
if !ok {
return createChannelEvent{}, errMetadataFormat
}
val, ok := lm[keyAppID].(string)
if !ok {
return createChannelEvent{}, errMetadataAppID
}
cce.loraAppID = val
return cce, nil
}
func decodeRemoveChannel(event map[string]interface{}) removeChannelEvent {
return removeChannelEvent{
id: read(event, "id", ""),
}
}
func (es eventStore) handleCreateThing(cte createThingEvent) error {
return es.svc.CreateThing(cte.id, cte.loraDevEUI)
}
func (es eventStore) handleRemoveThing(rte removeThingEvent) error {
return es.svc.RemoveThing(rte.id)
}
func (es eventStore) handleCreateChannel(cce createChannelEvent) error {
return es.svc.CreateChannel(cce.id, cce.loraAppID)
}
func (es eventStore) handleRemoveChannel(rce removeChannelEvent) error {
return es.svc.RemoveChannel(rce.id)
}
func read(event map[string]interface{}, key, def string) string {
val, ok := event[key].(string)
if !ok {
return def
}
return val
}
|
apache-2.0
|
izenecloud/nginx
|
tengine/contrib/stylechecker.py
|
1596
|
#!/usr/bin/python
"""usage: python stylechecker.py /path/to/the/c/code"""
import os
import sys
import string
import re
WHITE = '\033[97m'
CYAN = '\033[96m'
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
ENDC = '\033[0m'
def check_file(file):
if re.search('\.[c|h]$', file) == None:
return
f = open(file)
i = 1
file_name_printed = False
for line in f:
line = line.replace('\n', '')
# check the number of columns greater than 80
if len(line) > 80:
if not file_name_printed:
print RED + file + ':' + ENDC
file_name_printed = True
print (GREEN + ' [>80]:' + BLUE + ' #%d(%d)' + WHITE + ':%s') % (i, len(line), line) + ENDC
# check the TAB key
if string.find(line, '\t') >= 0:
if not file_name_printed:
print RED + file + ':' + ENDC
file_name_printed = True
print (YELLOW + ' [TAB]:' + BLUE + ' #%d(%d)' + WHITE + ':%s') % (i, len(line), line) + ENDC
# check blank lines
if line.isspace():
if not file_name_printed:
print RED + file + ':' + ENDC
file_name_printed = True
print (CYAN + ' [BLK]:' + BLUE + ' #%d(%d)' + WHITE + ':%s') % (i, len(line), line) + ENDC
i = i + 1
f.close()
def walk_dir(dir):
for root, dirs, files in os.walk(dir):
for f in files:
s = root + '/' + f
check_file(s)
for d in dirs:
walk_dir(d)
walk_dir(sys.argv[1])
|
apache-2.0
|
alonana/JavaAsc
|
entity/src/main/java/com/javaasc/entity/api/JascValues.java
|
119
|
package com.javaasc.entity.api;
import java.util.List;
public interface JascValues {
List<String> getValues();
}
|
apache-2.0
|
rx2130/Leetcode
|
python/19 Remove Nth Node From End of List.py
|
1039
|
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
def removeNthFromEnd(self, head, n):
"""
:type head: ListNode
:type n: int
:rtype: ListNode
"""
fast = slow = head
for i in range(n):
fast = fast.next
if fast is None:
return head.next
while fast.next:
slow = slow.next
fast = fast.next
# print(slow.next, fast)
slow.next = slow.next.next
return head
def test(self):
intarrToTest = [1, 2, 3, 4, 5]
head = ListNode(intarrToTest[0])
x = head
for s in intarrToTest[1:]:
temp = ListNode(s)
x.next = temp
x = x.next
head = self.removeNthFromEnd(head, 4)
x = head
while x:
print(x.val, end=' ')
x = x.next
print()
test = Solution()
print(test.test())
|
apache-2.0
|
cjlee112/socraticqs2
|
mysite/mysite/tests/celery.py
|
3119
|
import datetime
from unittest import mock
import unittest
from django.utils import timezone
from unittest.mock import Mock, PropertyMock
from django.test import TestCase
from django.contrib.sessions.models import Session
# from core.tasks import send_outcome, check_anonymous
from lti.tasks import send_outcome
class CeleryTasksTest(TestCase):
@unittest.skip("skip unless fixed")
@mock.patch('mysite.celery.UserSession.objects.filter')
@mock.patch('mysite.celery.User.objects.filter')
def test_check_anonymous_user_session_no_session(self, mock_User_filter, mock_UserSession_filter):
mock_user = Mock(id=1)
call_mock_User_filter = [mock_user]
mock_session = Mock(id=2)
# user_session.session
p = PropertyMock(return_value=3, side_effect=Session.DoesNotExist('Object Does not exist'))
type(mock_session).session = p
call_mock_UserSession_filter = [mock_session]
mock_User_filter.return_value = call_mock_User_filter
mock_UserSession_filter.return_value = call_mock_UserSession_filter
mock_user_del = Mock()
mock_user.delete = mock_user_del
# response = check_anonymous()
mock_user_del.assert_called_once_with()
mock_User_filter.assert_called_with(groups__name='Temporary')
mock_UserSession_filter.assert_called_with(user__groups__name='Temporary')
@unittest.skip("skip unless fixed")
@mock.patch('mysite.celery.UserSession.objects.filter')
@mock.patch('mysite.celery.User.objects.filter')
def test_check_anonymous_user_session_has_session(self, mock_User_filter, mock_UserSession_filter):
mock_user = Mock(id=1)
call_mock_User_filter = [mock_user]
mock_session = Mock(id=2)
# user_session.session
mock_session.session.expire_date = timezone.now() - datetime.timedelta(days=1)
sess_session_del = Mock()
sess_user_del = Mock()
mock_session.session.delete = sess_session_del
mock_session.user.delete = sess_user_del
call_mock_UserSession_filter = [mock_session]
mock_User_filter.return_value = call_mock_User_filter
mock_UserSession_filter.return_value = call_mock_UserSession_filter
mock_user_del = Mock()
mock_user.delete = mock_user_del
# response = check_anonymous()
sess_session_del.assert_called_once_with()
sess_user_del.assert_called_once_with()
mock_user_del.assert_called_once_with()
mock_User_filter.assert_called_with(groups__name='Temporary')
mock_UserSession_filter.assert_called_with(user__groups__name='Temporary')
@mock.patch('lti.tasks.GradedLaunch.objects.get')
@mock.patch('lti.tasks.send_score_update')
def test_send_outcome(self, mock_send_score_update, mock_GradedLaunch_get):
get_mock_ret_val = Mock()
mock_GradedLaunch_get.return_value = get_mock_ret_val
send_outcome('0', assignment_id=1)
mock_GradedLaunch_get.assert_called_once_with(id=1)
mock_send_score_update.assert_called_once_with(get_mock_ret_val, '0')
|
apache-2.0
|
orfeotoolbox/OTB
|
Modules/Core/ObjectList/include/otbObjectListToObjectListFilter.h
|
4437
|
/*
* Copyright (C) 2005-2020 Centre National d'Etudes Spatiales (CNES)
*
* This file is part of Orfeo Toolbox
*
* https://www.orfeo-toolbox.org/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef otbObjectListToObjectListFilter_h
#define otbObjectListToObjectListFilter_h
#include "otbObjectListSource.h"
namespace otb
{
/** \class ObjectListToObjectListFilter
* \brief Base class for filters that take an ObjectList as input and produce an ObjectList as output.
*
* ObjectListToObjectListFilter is the base class for all process objects that output
* ObjectList data and require ObjectList data as input. Specifically, this class
* defines the SetInput() method for defining the input to a filter.
*
* Be aware that this class is templated over the list type, not the object type. It will
* be typically something like otb::ObjectList<ObjectType>. This is to enable the use of
* class derived from ObjectList or other implementations.
*
*
* \ingroup ObjectListFilter
*
* \ingroup OTBObjectList
*/
template <class TInputList, class TOutputList>
class ITK_EXPORT ObjectListToObjectListFilter : public otb::ObjectListSource<TOutputList>
{
public:
/** Standard class typedefs. */
typedef ObjectListToObjectListFilter Self;
typedef otb::ObjectListSource<TOutputList> Superclass;
typedef itk::SmartPointer<Self> Pointer;
typedef itk::SmartPointer<const Self> ConstPointer;
/** Method for creation through the object factory. */
itkNewMacro(Self);
/** Run-time type information (and related methods). */
itkTypeMacro(ObjectListToObjectListFilter, ObjectListSource);
/** Some typedefs. */
typedef TInputList InputListType;
typedef TOutputList OutputListType;
typedef typename TInputList::ConstPointer InputListPointer;
typedef typename TOutputList::Pointer OutputListPointer;
typedef typename TInputList::ConstIterator InputListIterator;
typedef typename InputListType::ObjectType InputObjectType;
typedef typename OutputListType::ObjectType OutputObjectType;
typedef itk::DataObject::Pointer DataObjectPointer;
using Superclass::SetInput;
virtual void SetInput(const InputListType* input);
const InputListType* GetInput(void);
protected:
/** Constructor */
ObjectListToObjectListFilter();
/** Destructor */
~ObjectListToObjectListFilter() override
{
}
/**PrintSelf method */
void PrintSelf(std::ostream& os, itk::Indent indent) const override;
void GenerateData(void) override;
/** Multi-threading implementation */
typedef std::vector<OutputListPointer> OutputListForThreadType;
virtual void BeforeThreadedGenerateData();
virtual void AfterThreadedGenerateData()
{
}
virtual int SplitRequestedRegion(itk::ThreadIdType threadId, int threadCount, unsigned int requestedElements, unsigned int& startIndex,
unsigned int& stopIndex);
/** startIndex and stopIndex represent the indices of the Objects
* to examine in thread threadId */
virtual void ThreadedGenerateData(unsigned int startIndex, unsigned int stopIndex, itk::ThreadIdType threadId);
/** Static function used as a "callback" by the MultiThreader. The threading
* library will call this routine for each thread, which will delegate the
* control to ThreadedGenerateData(). */
static ITK_THREAD_RETURN_TYPE ThreaderCallback(void* arg);
/** Internal structure used for passing image data into the threading library */
struct ThreadStruct
{
Pointer Filter;
};
OutputListForThreadType m_ObjectListPerThread;
/** End Multi-threading implementation */
private:
ObjectListToObjectListFilter(const Self&) = delete;
void operator=(const Self&) = delete;
};
} // end namespace otb
#ifndef OTB_MANUAL_INSTANTIATION
#include "otbObjectListToObjectListFilter.hxx"
#endif
#endif
|
apache-2.0
|
Commit451/ParcelCheck
|
app/src/main/java/com/commit451/parcelcheck/sample/brokenModels/Phone.java
|
1342
|
package com.commit451.parcelcheck.sample.brokenModels;
import android.os.Parcel;
import android.os.Parcelable;
import java.util.Date;
/**
* A phone model, which is missing a few parcelable calls
*/
public class Phone implements Parcelable {
private int modelNumber;
private String manufacturer;
private Date releaseDate;
public Phone() {
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeInt(this.modelNumber);
//Comment this out, which makes the parcel test fail
//dest.writeString(this.manufacturer);
dest.writeLong(this.releaseDate != null ? this.releaseDate.getTime() : -1);
}
protected Phone(Parcel in) {
this.modelNumber = in.readInt();
this.manufacturer = in.readString();
long tmpReleaseDate = in.readLong();
this.releaseDate = tmpReleaseDate == -1 ? null : new Date(tmpReleaseDate);
}
public static final Parcelable.Creator<Phone> CREATOR = new Parcelable.Creator<Phone>() {
@Override
public Phone createFromParcel(Parcel source) {
return new Phone(source);
}
@Override
public Phone[] newArray(int size) {
return new Phone[size];
}
};
}
|
apache-2.0
|
obulpathi/cdn1
|
cdn/transport/validators/stoplight/exceptions.py
|
1077
|
# Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class ValidationFailed(ValueError):
"""User input was inconsistent with API restrictions."""
def __init__(self, msg, *args, **kwargs):
msg = msg.format(*args, **kwargs)
super(ValidationFailed, self).__init__(msg)
class ValidationProgrammingError(ValueError):
"""Caller did not map validations correctly."""
def __init__(self, msg, *args, **kwargs):
msg = msg.format(*args, **kwargs)
super(ValidationProgrammingError, self).__init__(msg)
|
apache-2.0
|
PolymerLabs/async-demos
|
packages/scheduler/src/lib/queue-scheduler.ts
|
280
|
import {TaskQueue} from './task-queue.js';
import { Task } from './task.js';
/**
* An object that controls when tasks are executed from a queue or set of
* queues.
*/
export interface QueueScheduler<Q extends TaskQueue<T>, T extends Task<any>> {
schedule(queue: Q): void;
}
|
apache-2.0
|
SlalomDigital/iBeaconCMS
|
config/initializers/session_store.rb
|
148
|
# Be sure to restart your server when you modify this file.
IBeaconCMS::Application.config.session_store :cookie_store, key: '_iBeaconCMS_session'
|
apache-2.0
|
pulcy/vault-monkey
|
deps/github.com/hashicorp/vault/vendor/github.com/google/go-github/github/event_types.go
|
24483
|
// Copyright 2016 The go-github AUTHORS. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// These event types are shared between the Events API and used as Webhook payloads.
package github
// CommitCommentEvent is triggered when a commit comment is created.
// The Webhook event name is "commit_comment".
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#commitcommentevent
type CommitCommentEvent struct {
Comment *RepositoryComment `json:"comment,omitempty"`
// The following fields are only populated by Webhook events.
Action *string `json:"action,omitempty"`
Repo *Repository `json:"repository,omitempty"`
Sender *User `json:"sender,omitempty"`
}
// CreateEvent represents a created repository, branch, or tag.
// The Webhook event name is "create".
//
// Note: webhooks will not receive this event for created repositories.
// Additionally, webhooks will not receive this event for tags if more
// than three tags are pushed at once.
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#createevent
type CreateEvent struct {
Ref *string `json:"ref,omitempty"`
// RefType is the object that was created. Possible values are: "repository", "branch", "tag".
RefType *string `json:"ref_type,omitempty"`
MasterBranch *string `json:"master_branch,omitempty"`
Description *string `json:"description,omitempty"`
// The following fields are only populated by Webhook events.
PusherType *string `json:"pusher_type,omitempty"`
Repo *Repository `json:"repository,omitempty"`
Sender *User `json:"sender,omitempty"`
}
// DeleteEvent represents a deleted branch or tag.
// The Webhook event name is "delete".
//
// Note: webhooks will not receive this event for tags if more than three tags
// are deleted at once.
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#deleteevent
type DeleteEvent struct {
Ref *string `json:"ref,omitempty"`
// RefType is the object that was deleted. Possible values are: "branch", "tag".
RefType *string `json:"ref_type,omitempty"`
// The following fields are only populated by Webhook events.
PusherType *string `json:"pusher_type,omitempty"`
Repo *Repository `json:"repository,omitempty"`
Sender *User `json:"sender,omitempty"`
}
// DeploymentEvent represents a deployment.
// The Webhook event name is "deployment".
//
// Events of this type are not visible in timelines, they are only used to trigger hooks.
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#deploymentevent
type DeploymentEvent struct {
Deployment *Deployment `json:"deployment,omitempty"`
Repo *Repository `json:"repository,omitempty"`
// The following fields are only populated by Webhook events.
Sender *User `json:"sender,omitempty"`
}
// DeploymentStatusEvent represents a deployment status.
// The Webhook event name is "deployment_status".
//
// Events of this type are not visible in timelines, they are only used to trigger hooks.
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#deploymentstatusevent
type DeploymentStatusEvent struct {
Deployment *Deployment `json:"deployment,omitempty"`
DeploymentStatus *DeploymentStatus `json:"deployment_status,omitempty"`
Repo *Repository `json:"repository,omitempty"`
// The following fields are only populated by Webhook events.
Sender *User `json:"sender,omitempty"`
}
// ForkEvent is triggered when a user forks a repository.
// The Webhook event name is "fork".
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#forkevent
type ForkEvent struct {
// Forkee is the created repository.
Forkee *Repository `json:"forkee,omitempty"`
// The following fields are only populated by Webhook events.
Repo *Repository `json:"repository,omitempty"`
Sender *User `json:"sender,omitempty"`
}
// Page represents a single Wiki page.
type Page struct {
PageName *string `json:"page_name,omitempty"`
Title *string `json:"title,omitempty"`
Summary *string `json:"summary,omitempty"`
Action *string `json:"action,omitempty"`
SHA *string `json:"sha,omitempty"`
HTMLURL *string `json:"html_url,omitempty"`
}
// GollumEvent is triggered when a Wiki page is created or updated.
// The Webhook event name is "gollum".
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#gollumevent
type GollumEvent struct {
Pages []*Page `json:"pages,omitempty"`
// The following fields are only populated by Webhook events.
Repo *Repository `json:"repository,omitempty"`
Sender *User `json:"sender,omitempty"`
}
// IssueActivityEvent represents the payload delivered by Issue webhook.
//
// Deprecated: Use IssuesEvent instead.
type IssueActivityEvent struct {
Action *string `json:"action,omitempty"`
Issue *Issue `json:"issue,omitempty"`
// The following fields are only populated by Webhook events.
Repo *Repository `json:"repository,omitempty"`
Sender *User `json:"sender,omitempty"`
}
// EditChange represents the changes when an issue, pull request, or comment has
// been edited.
type EditChange struct {
Title *struct {
From *string `json:"from,omitempty"`
} `json:"title,omitempty"`
Body *struct {
From *string `json:"from,omitempty"`
} `json:"body,omitempty"`
}
// IntegrationInstallationEvent is triggered when an integration is created or deleted.
// The Webhook event name is "integration_installation".
//
// GitHub docs: https://developer.github.com/early-access/integrations/webhooks/#integrationinstallationevent
type IntegrationInstallationEvent struct {
// The action that was performed. Possible values for an "integration_installation"
// event are: "created", "deleted".
Action *string `json:"action,omitempty"`
Installation *Installation `json:"installation,omitempty"`
Sender *User `json:"sender,omitempty"`
}
// IntegrationInstallationRepositoriesEvent is triggered when an integration repository
// is added or removed. The Webhook event name is "integration_installation_repositories".
//
// GitHub docs: https://developer.github.com/early-access/integrations/webhooks/#integrationinstallationrepositoriesevent
type IntegrationInstallationRepositoriesEvent struct {
// The action that was performed. Possible values for an "integration_installation_repositories"
// event are: "added", "removed".
Action *string `json:"action,omitempty"`
Installation *Installation `json:"installation,omitempty"`
RepositoriesAdded []*Repository `json:"repositories_added,omitempty"`
RepositoriesRemoved []*Repository `json:"repositories_removed,omitempty"`
Sender *User `json:"sender,omitempty"`
}
// IssueCommentEvent is triggered when an issue comment is created on an issue
// or pull request.
// The Webhook event name is "issue_comment".
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#issuecommentevent
type IssueCommentEvent struct {
// Action is the action that was performed on the comment.
// Possible values are: "created", "edited", "deleted".
Action *string `json:"action,omitempty"`
Issue *Issue `json:"issue,omitempty"`
Comment *IssueComment `json:"comment,omitempty"`
// The following fields are only populated by Webhook events.
Changes *EditChange `json:"changes,omitempty"`
Repo *Repository `json:"repository,omitempty"`
Sender *User `json:"sender,omitempty"`
}
// IssuesEvent is triggered when an issue is assigned, unassigned, labeled,
// unlabeled, opened, closed, or reopened.
// The Webhook event name is "issues".
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#issuesevent
type IssuesEvent struct {
// Action is the action that was performed. Possible values are: "assigned",
// "unassigned", "labeled", "unlabeled", "opened", "closed", "reopened", "edited".
Action *string `json:"action,omitempty"`
Issue *Issue `json:"issue,omitempty"`
Assignee *User `json:"assignee,omitempty"`
Label *Label `json:"label,omitempty"`
// The following fields are only populated by Webhook events.
Changes *EditChange `json:"changes,omitempty"`
Repo *Repository `json:"repository,omitempty"`
Sender *User `json:"sender,omitempty"`
}
// LabelEvent is triggered when a repository's label is created, edited, or deleted.
// The Webhook event name is "label"
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#labelevent
type LabelEvent struct {
// Action is the action that was performed. Possible values are:
// "created", "edited", "deleted"
Action *string `json:"action,omitempty"`
Label *Label `json:"label,omitempty"`
// The following fields are only populated by Webhook events.
Changes *EditChange `json:"changes,omitempty"`
Repo *Repository `json:"repository,omitempty"`
Org *Organization `json:"organization,omitempty"`
}
// MemberEvent is triggered when a user is added as a collaborator to a repository.
// The Webhook event name is "member".
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#memberevent
type MemberEvent struct {
// Action is the action that was performed. Possible value is: "added".
Action *string `json:"action,omitempty"`
Member *User `json:"member,omitempty"`
// The following fields are only populated by Webhook events.
Repo *Repository `json:"repository,omitempty"`
Sender *User `json:"sender,omitempty"`
}
// MembershipEvent is triggered when a user is added or removed from a team.
// The Webhook event name is "membership".
//
// Events of this type are not visible in timelines, they are only used to
// trigger organization webhooks.
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#membershipevent
type MembershipEvent struct {
// Action is the action that was performed. Possible values are: "added", "removed".
Action *string `json:"action,omitempty"`
// Scope is the scope of the membership. Possible value is: "team".
Scope *string `json:"scope,omitempty"`
Member *User `json:"member,omitempty"`
Team *Team `json:"team,omitempty"`
// The following fields are only populated by Webhook events.
Org *Organization `json:"organization,omitempty"`
Sender *User `json:"sender,omitempty"`
}
// MilestoneEvent is triggered when a milestone is created, closed, opened, edited, or deleted.
// The Webhook event name is "milestone".
//
// Github docs: https://developer.github.com/v3/activity/events/types/#milestoneevent
type MilestoneEvent struct {
// Action is the action that was performed. Possible values are:
// "created", "closed", "opened", "edited", "deleted"
Action *string `json:"action,omitempty"`
Milestone *Milestone `json:"milestone,omitempty"`
// The following fields are only populated by Webhook events.
Changes *EditChange `json:"changes,omitempty"`
Repo *Repository `json:"repository,omitempty"`
Sender *User `json:"sender,omitempty"`
Org *Organization `json:"organization,omitempty"`
}
// OrganizationEvent is triggered when a user is added, removed, or invited to an organization.
// Events of this type are not visible in timelines. These events are only used to trigger organization hooks.
// Webhook event name is "organization".
//
// Github docs: https://developer.github.com/v3/activity/events/types/#organizationevent
type OrganizationEvent struct {
// Action is the action that was performed.
// Can be one of "member_added", "member_removed", or "member_invited".
Action *string `json:"action,omitempty"`
// Invitaion is the invitation for the user or email if the action is "member_invited".
Invitation *Invitation `json:"invitation,omitempty"`
// Membership is the membership between the user and the organization.
// Not present when the action is "member_invited".
Membership *Membership `json:"membership,omitempty"`
Organization *Organization `json:"organization,omitempty"`
Sender *User `json:"sender,omitempty"`
}
// PageBuildEvent represents an attempted build of a GitHub Pages site, whether
// successful or not.
// The Webhook event name is "page_build".
//
// This event is triggered on push to a GitHub Pages enabled branch (gh-pages
// for project pages, master for user and organization pages).
//
// Events of this type are not visible in timelines, they are only used to trigger hooks.
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#pagebuildevent
type PageBuildEvent struct {
Build *PagesBuild `json:"build,omitempty"`
// The following fields are only populated by Webhook events.
ID *int `json:"id,omitempty"`
Repo *Repository `json:"repository,omitempty"`
Sender *User `json:"sender,omitempty"`
}
// PingEvent is triggered when a Webhook is added to GitHub.
//
// GitHub docs: https://developer.github.com/webhooks/#ping-event
type PingEvent struct {
// Random string of GitHub zen.
Zen *string `json:"zen,omitempty"`
// The ID of the webhook that triggered the ping.
HookID *int `json:"hook_id,omitempty"`
// The webhook configuration.
Hook *Hook `json:"hook,omitempty"`
}
// PublicEvent is triggered when a private repository is open sourced.
// According to GitHub: "Without a doubt: the best GitHub event."
// The Webhook event name is "public".
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#publicevent
type PublicEvent struct {
// The following fields are only populated by Webhook events.
Repo *Repository `json:"repository,omitempty"`
Sender *User `json:"sender,omitempty"`
}
// PullRequestEvent is triggered when a pull request is assigned, unassigned,
// labeled, unlabeled, opened, closed, reopened, or synchronized.
// The Webhook event name is "pull_request".
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#pullrequestevent
type PullRequestEvent struct {
// Action is the action that was performed. Possible values are: "assigned",
// "unassigned", "labeled", "unlabeled", "opened", "closed", or "reopened",
// "synchronize", "edited". If the action is "closed" and the merged key is false,
// the pull request was closed with unmerged commits. If the action is "closed"
// and the merged key is true, the pull request was merged.
Action *string `json:"action,omitempty"`
Number *int `json:"number,omitempty"`
PullRequest *PullRequest `json:"pull_request,omitempty"`
// The following fields are only populated by Webhook events.
Changes *EditChange `json:"changes,omitempty"`
Repo *Repository `json:"repository,omitempty"`
Sender *User `json:"sender,omitempty"`
}
// PullRequestReviewEvent is triggered when a review is submitted on a pull
// request.
// The Webhook event name is "pull_request_review".
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#pullrequestreviewevent
type PullRequestReviewEvent struct {
// Action is always "submitted".
Action *string `json:"action,omitempty"`
Review *PullRequestReview `json:"review,omitempty"`
PullRequest *PullRequest `json:"pull_request,omitempty"`
// The following fields are only populated by Webhook events.
Repo *Repository `json:"repository,omitempty"`
Sender *User `json:"sender,omitempty"`
// The following field is only present when the webhook is triggered on
// a repository belonging to an organization.
Organization *Organization `json:"organization,omitempty"`
}
// PullRequestReviewCommentEvent is triggered when a comment is created on a
// portion of the unified diff of a pull request.
// The Webhook event name is "pull_request_review_comment".
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#pullrequestreviewcommentevent
type PullRequestReviewCommentEvent struct {
// Action is the action that was performed on the comment.
// Possible values are: "created", "edited", "deleted".
Action *string `json:"action,omitempty"`
PullRequest *PullRequest `json:"pull_request,omitempty"`
Comment *PullRequestComment `json:"comment,omitempty"`
// The following fields are only populated by Webhook events.
Changes *EditChange `json:"changes,omitempty"`
Repo *Repository `json:"repository,omitempty"`
Sender *User `json:"sender,omitempty"`
}
// PushEvent represents a git push to a GitHub repository.
//
// GitHub API docs: http://developer.github.com/v3/activity/events/types/#pushevent
type PushEvent struct {
PushID *int `json:"push_id,omitempty"`
Head *string `json:"head,omitempty"`
Ref *string `json:"ref,omitempty"`
Size *int `json:"size,omitempty"`
Commits []PushEventCommit `json:"commits,omitempty"`
Repo *PushEventRepository `json:"repository,omitempty"`
Before *string `json:"before,omitempty"`
DistinctSize *int `json:"distinct_size,omitempty"`
// The following fields are only populated by Webhook events.
After *string `json:"after,omitempty"`
Created *bool `json:"created,omitempty"`
Deleted *bool `json:"deleted,omitempty"`
Forced *bool `json:"forced,omitempty"`
BaseRef *string `json:"base_ref,omitempty"`
Compare *string `json:"compare,omitempty"`
HeadCommit *PushEventCommit `json:"head_commit,omitempty"`
Pusher *User `json:"pusher,omitempty"`
Sender *User `json:"sender,omitempty"`
}
func (p PushEvent) String() string {
return Stringify(p)
}
// PushEventCommit represents a git commit in a GitHub PushEvent.
type PushEventCommit struct {
Message *string `json:"message,omitempty"`
Author *CommitAuthor `json:"author,omitempty"`
URL *string `json:"url,omitempty"`
Distinct *bool `json:"distinct,omitempty"`
// The following fields are only populated by Events API.
SHA *string `json:"sha,omitempty"`
// The following fields are only populated by Webhook events.
ID *string `json:"id,omitempty"`
TreeID *string `json:"tree_id,omitempty"`
Timestamp *Timestamp `json:"timestamp,omitempty"`
Committer *CommitAuthor `json:"committer,omitempty"`
Added []string `json:"added,omitempty"`
Removed []string `json:"removed,omitempty"`
Modified []string `json:"modified,omitempty"`
}
func (p PushEventCommit) String() string {
return Stringify(p)
}
// PushEventRepository represents the repo object in a PushEvent payload.
type PushEventRepository struct {
ID *int `json:"id,omitempty"`
Name *string `json:"name,omitempty"`
FullName *string `json:"full_name,omitempty"`
Owner *PushEventRepoOwner `json:"owner,omitempty"`
Private *bool `json:"private,omitempty"`
Description *string `json:"description,omitempty"`
Fork *bool `json:"fork,omitempty"`
CreatedAt *Timestamp `json:"created_at,omitempty"`
PushedAt *Timestamp `json:"pushed_at,omitempty"`
UpdatedAt *Timestamp `json:"updated_at,omitempty"`
Homepage *string `json:"homepage,omitempty"`
Size *int `json:"size,omitempty"`
StargazersCount *int `json:"stargazers_count,omitempty"`
WatchersCount *int `json:"watchers_count,omitempty"`
Language *string `json:"language,omitempty"`
HasIssues *bool `json:"has_issues,omitempty"`
HasDownloads *bool `json:"has_downloads,omitempty"`
HasWiki *bool `json:"has_wiki,omitempty"`
HasPages *bool `json:"has_pages,omitempty"`
ForksCount *int `json:"forks_count,omitempty"`
OpenIssuesCount *int `json:"open_issues_count,omitempty"`
DefaultBranch *string `json:"default_branch,omitempty"`
MasterBranch *string `json:"master_branch,omitempty"`
Organization *string `json:"organization,omitempty"`
// The following fields are only populated by Webhook events.
URL *string `json:"url,omitempty"`
HTMLURL *string `json:"html_url,omitempty"`
}
// PushEventRepoOwner is a basic representation of user/org in a PushEvent payload.
type PushEventRepoOwner struct {
Name *string `json:"name,omitempty"`
Email *string `json:"email,omitempty"`
}
// ReleaseEvent is triggered when a release is published.
// The Webhook event name is "release".
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#releaseevent
type ReleaseEvent struct {
// Action is the action that was performed. Possible value is: "published".
Action *string `json:"action,omitempty"`
Release *RepositoryRelease `json:"release,omitempty"`
// The following fields are only populated by Webhook events.
Repo *Repository `json:"repository,omitempty"`
Sender *User `json:"sender,omitempty"`
}
// RepositoryEvent is triggered when a repository is created.
// The Webhook event name is "repository".
//
// Events of this type are not visible in timelines, they are only used to
// trigger organization webhooks.
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#repositoryevent
type RepositoryEvent struct {
// Action is the action that was performed. Possible values are: "created", "deleted",
// "publicized", "privatized".
Action *string `json:"action,omitempty"`
Repo *Repository `json:"repository,omitempty"`
// The following fields are only populated by Webhook events.
Org *Organization `json:"organization,omitempty"`
Sender *User `json:"sender,omitempty"`
}
// StatusEvent is triggered when the status of a Git commit changes.
// The Webhook event name is "status".
//
// Events of this type are not visible in timelines, they are only used to
// trigger hooks.
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#statusevent
type StatusEvent struct {
SHA *string `json:"sha,omitempty"`
// State is the new state. Possible values are: "pending", "success", "failure", "error".
State *string `json:"state,omitempty"`
Description *string `json:"description,omitempty"`
TargetURL *string `json:"target_url,omitempty"`
Branches []*Branch `json:"branches,omitempty"`
// The following fields are only populated by Webhook events.
ID *int `json:"id,omitempty"`
Name *string `json:"name,omitempty"`
Context *string `json:"context,omitempty"`
Commit *RepositoryCommit `json:"commit,omitempty"`
CreatedAt *Timestamp `json:"created_at,omitempty"`
UpdatedAt *Timestamp `json:"updated_at,omitempty"`
Repo *Repository `json:"repository,omitempty"`
Sender *User `json:"sender,omitempty"`
}
// TeamAddEvent is triggered when a repository is added to a team.
// The Webhook event name is "team_add".
//
// Events of this type are not visible in timelines. These events are only used
// to trigger hooks.
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#teamaddevent
type TeamAddEvent struct {
Team *Team `json:"team,omitempty"`
Repo *Repository `json:"repository,omitempty"`
// The following fields are only populated by Webhook events.
Org *Organization `json:"organization,omitempty"`
Sender *User `json:"sender,omitempty"`
}
// WatchEvent is related to starring a repository, not watching. See this API
// blog post for an explanation: https://developer.github.com/changes/2012-09-05-watcher-api/
//
// The event’s actor is the user who starred a repository, and the event’s
// repository is the repository that was starred.
//
// GitHub docs: https://developer.github.com/v3/activity/events/types/#watchevent
type WatchEvent struct {
// Action is the action that was performed. Possible value is: "started".
Action *string `json:"action,omitempty"`
// The following fields are only populated by Webhook events.
Repo *Repository `json:"repository,omitempty"`
Sender *User `json:"sender,omitempty"`
}
|
apache-2.0
|
googleapis/java-accessapproval
|
.kokoro/continuous/propose_release.sh
|
1189
|
#!/bin/bash
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -eo pipefail
export NPM_CONFIG_PREFIX=/home/node/.npm-global
if [ -f ${KOKORO_KEYSTORE_DIR}/73713_github-magic-proxy-url-release-please ]; then
# Groom the release PR as new commits are merged.
npx release-please release-pr --token=${KOKORO_KEYSTORE_DIR}/73713_github-magic-proxy-token-release-please \
--repo-url=googleapis/java-accessapproval \
--package-name="accessapproval" \
--api-url=${KOKORO_KEYSTORE_DIR}/73713_github-magic-proxy-url-release-please \
--proxy-key=${KOKORO_KEYSTORE_DIR}/73713_github-magic-proxy-key-release-please \
--release-type=java-yoshi
fi
|
apache-2.0
|
28msec/nolap-report-editor
|
tests/e2e/basic-scenario.js
|
4515
|
'use strict';
//GetAttribute() returns "boolean" values and will return either "true" or null
describe('Report', function(){
var _ = require('lodash');
var Reports = require('../../app/reports/reports-page');
var Report = require('../../app/report/report-page');
var reports = new Reports();
var report, reportName, conceptName;
it('Should create a new empty report', function(){
reports.visitPage();
reportName = 'HelloWorld' + Math.floor((Math.random() * 10) + 1);
reports.createReport(reportName);
reports.getCurrentUrl()
.then(function(url){
var id = _.last(url.split('/'));
report = new Report(id);
expect(report.searchBox.isPresent()).toBe(true);
expect(report.label.getText()).toBe(reportName);
});
});
it('Should already contain an element', function(){
report.goToTaxonomy().concepts.goToConcept('h:ReportLineItems');
var concept = report.taxonomy.getConcept('h:ReportLineItems');
expect(concept.label.getAttribute('value')).toBe(reportName);
expect(report.taxonomy.elements.count()).toBe(1);
expect(report.taxonomy.rootElements.count()).toBe(1);
});
it('Shouldn\'t create a new concept with an invalid name', function(){
conceptName = 'hello World';
report.goToTaxonomy();
var concepts = report.taxonomy.concepts;
concepts.createConcept(conceptName);
expect(concepts.errorMessage.isDisplayed()).toBe(true);
expect(concepts.errorMessage.getText()).toBe('Invalid Concept Name');
});
it('Should create a new concept (1)', function(){
conceptName = 'h:helloWorldID';
report.goToTaxonomy();
var concepts = report.taxonomy.concepts;
concepts.createConcept(conceptName);
var concept = report.goToTaxonomy().concepts.goToConcept(conceptName);
expect(concept.label.getAttribute('value')).toBe('Hello World ID');
});
it('Taxonomy Section should be active', function(){
expect(report.getActiveSection()).toBe('Taxonomy');
report.goToFacts();
report.goToTaxonomy();
expect(report.getActiveSection()).toBe('Taxonomy');
});
it('Should create a new concept (2)', function(){
conceptName = 'h:assets';
var concepts = report.taxonomy.concepts;
report.goToTaxonomy();
concepts.createConcept(conceptName);
var concept = report.goToTaxonomy().concepts.goToConcept(conceptName);
expect(concept.label.getAttribute('value')).toBe('Assets');
});
it('Creates a new element', function(){
report.goToTaxonomy().concepts.goToConcept(conceptName);
report.taxonomy.getConcept(conceptName).createElement();
expect(report.taxonomy.elements.count()).toBe(2);
expect(report.taxonomy.rootElements.count()).toBe(1);
});
it('Renames the concept label', function(){
var overview = report.goToTaxonomy().concepts.goToConcept(conceptName).overview;
expect(overview.form.conceptLabel.getAttribute('value')).toBe('Assets');
overview.changeLabel('Assets Label');
expect(overview.form.conceptLabel.getAttribute('value')).toBe('Assets Label');
});
it('Creates a us-gaap:Assets synonym', function(){
var synonyms = report.taxonomy.getConcept(conceptName).goToSynonyms();
expect(synonyms.list.count()).toBe(0);
synonyms.addSynonym('us-gaap:Assets');
synonyms.addSynonym('us-gaap:AssetsCurrent');
synonyms.addSynonym('us-gaap:AssetsCurrent');
expect(synonyms.list.count()).toBe(2);
expect(synonyms.getName(synonyms.list.first())).toBe('us-gaap:Assets');
expect(synonyms.getName(synonyms.list.last())).toBe('us-gaap:AssetsCurrent');
});
it('Should display the fact table', function() {
report.goToFacts();
expect(report.facts.lineCount()).toBeGreaterThan(0);
});
it('Should display the preview', function() {
report.goToSpreadsheet();
expect(report.spreadsheet.getCellValueByCss('.constraints .header')).toBe('Component: (Network and Table)');
});
it('Should delete report', function() {
reports.visitPage();
reports.list.count().then(function(count){
reports.deleteReport(reportName).then(function(){
expect(reports.list.count()).toBe(count - 1);
});
});
});
});
|
apache-2.0
|
tensorflow/io
|
tensorflow_io/core/kernels/video_kernels.cc
|
5927
|
/* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow_io/core/kernels/video_kernels.h"
extern "C" {
#if defined(__APPLE__)
void* VideoCaptureInitFunction(const char* device, int64_t* bytes,
int64_t* width, int64_t* height);
void VideoCaptureNextFunction(void* context, void* data, int64_t size);
void VideoCaptureFiniFunction(void* context);
#elif defined(_MSC_VER)
void* VideoCaptureInitFunction(const char* device, int64_t* bytes,
int64_t* width, int64_t* height) {
return NULL;
}
void VideoCaptureNextFunction(void* context, void* data, int64_t size) {}
void VideoCaptureFiniFunction(void* context) {}
#else
void* VideoCaptureInitFunction(const char* device, int64_t* bytes,
int64_t* width, int64_t* height) {
tensorflow::data::VideoCaptureContext* p =
new tensorflow::data::VideoCaptureContext();
if (p != nullptr) {
tensorflow::Status status = p->Init(device, bytes, width, height);
if (status.ok()) {
return p;
}
LOG(ERROR) << "unable to initialize video capture: " << status;
delete p;
}
return NULL;
}
void VideoCaptureNextFunction(void* context, void* data, int64_t size) {
tensorflow::data::VideoCaptureContext* p =
static_cast<tensorflow::data::VideoCaptureContext*>(context);
if (p != nullptr) {
tensorflow::Status status = p->Read(data, size);
if (!status.ok()) {
LOG(ERROR) << "unable to read video capture: " << status;
}
}
}
void VideoCaptureFiniFunction(void* context) {
tensorflow::data::VideoCaptureContext* p =
static_cast<tensorflow::data::VideoCaptureContext*>(context);
if (p != nullptr) {
delete p;
}
}
#endif
}
namespace tensorflow {
namespace data {
namespace {
class VideoCaptureReadableResource : public ResourceBase {
public:
VideoCaptureReadableResource(Env* env)
: env_(env), context_(nullptr, [](void* p) {
if (p != nullptr) {
VideoCaptureFiniFunction(p);
}
}) {}
~VideoCaptureReadableResource() {}
Status Init(const string& input) {
mutex_lock l(mu_);
int64_t bytes, width, height;
context_.reset(
VideoCaptureInitFunction(input.c_str(), &bytes, &width, &height));
if (context_.get() == nullptr) {
return errors::InvalidArgument("unable to open device ", input);
}
bytes_ = static_cast<int64>(bytes);
width_ = static_cast<int64>(width);
height_ = static_cast<int64>(height);
return Status::OK();
}
Status Read(
std::function<Status(const TensorShape& shape, Tensor** value_tensor)>
allocate_func) {
mutex_lock l(mu_);
Tensor* value_tensor;
TF_RETURN_IF_ERROR(allocate_func(TensorShape({1}), &value_tensor));
string buffer;
buffer.resize(bytes_);
VideoCaptureNextFunction(context_.get(), (void*)&buffer[0],
static_cast<int64_t>(bytes_));
value_tensor->flat<tstring>()(0) = buffer;
return Status::OK();
}
string DebugString() const override {
mutex_lock l(mu_);
return "VideoCaptureReadableResource";
}
protected:
mutable mutex mu_;
Env* env_ TF_GUARDED_BY(mu_);
std::unique_ptr<void, void (*)(void*)> context_;
int64 bytes_;
int64 width_;
int64 height_;
};
class VideoCaptureReadableInitOp
: public ResourceOpKernel<VideoCaptureReadableResource> {
public:
explicit VideoCaptureReadableInitOp(OpKernelConstruction* context)
: ResourceOpKernel<VideoCaptureReadableResource>(context) {
env_ = context->env();
}
private:
void Compute(OpKernelContext* context) override {
ResourceOpKernel<VideoCaptureReadableResource>::Compute(context);
const Tensor* input_tensor;
OP_REQUIRES_OK(context, context->input("input", &input_tensor));
const string& input = input_tensor->scalar<tstring>()();
OP_REQUIRES_OK(context, resource_->Init(input));
}
Status CreateResource(VideoCaptureReadableResource** resource)
TF_EXCLUSIVE_LOCKS_REQUIRED(mu_) override {
*resource = new VideoCaptureReadableResource(env_);
return Status::OK();
}
private:
mutable mutex mu_;
Env* env_ TF_GUARDED_BY(mu_);
};
class VideoCaptureReadableReadOp : public OpKernel {
public:
explicit VideoCaptureReadableReadOp(OpKernelConstruction* context)
: OpKernel(context) {
env_ = context->env();
}
void Compute(OpKernelContext* context) override {
VideoCaptureReadableResource* resource;
OP_REQUIRES_OK(context,
GetResourceFromContext(context, "input", &resource));
core::ScopedUnref unref(resource);
OP_REQUIRES_OK(
context, resource->Read([&](const TensorShape& shape,
Tensor** value_tensor) -> Status {
TF_RETURN_IF_ERROR(context->allocate_output(0, shape, value_tensor));
return Status::OK();
}));
}
private:
mutable mutex mu_;
Env* env_ TF_GUARDED_BY(mu_);
};
REGISTER_KERNEL_BUILDER(Name("IO>VideoCaptureReadableInit").Device(DEVICE_CPU),
VideoCaptureReadableInitOp);
REGISTER_KERNEL_BUILDER(Name("IO>VideoCaptureReadableRead").Device(DEVICE_CPU),
VideoCaptureReadableReadOp);
} // namespace
} // namespace data
} // namespace tensorflow
|
apache-2.0
|
LorenzReinhart/ONOSnew
|
providers/lisp/mapping/src/main/java/org/onosproject/provider/lisp/mapping/util/MappingAddressBuilder.java
|
8703
|
/*
* Copyright 2017-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.provider.lisp.mapping.util;
import org.onlab.packet.IpAddress;
import org.onlab.packet.IpPrefix;
import org.onlab.packet.MacAddress;
import org.onosproject.lisp.ctl.ExtensionMappingAddressInterpreter;
import org.onosproject.lisp.msg.types.LispAfiAddress;
import org.onosproject.lisp.msg.types.LispAsAddress;
import org.onosproject.lisp.msg.types.LispDistinguishedNameAddress;
import org.onosproject.lisp.msg.types.LispIpv4Address;
import org.onosproject.lisp.msg.types.LispIpv6Address;
import org.onosproject.lisp.msg.types.LispMacAddress;
import org.onosproject.lisp.msg.types.lcaf.LispLcafAddress;
import org.onosproject.mapping.addresses.ExtensionMappingAddress;
import org.onosproject.mapping.addresses.MappingAddress;
import org.onosproject.mapping.addresses.MappingAddresses;
import org.onosproject.net.Device;
import org.onosproject.net.DeviceId;
import org.onosproject.net.device.DeviceService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.onosproject.mapping.addresses.ExtensionMappingAddressType.ExtensionMappingAddressTypes.*;
/**
* Mapping address builder class.
*/
public final class MappingAddressBuilder {
private static final Logger log =
LoggerFactory.getLogger(MappingAddressBuilder.class);
private static final int IPV4_PREFIX_LENGTH = 32;
private static final int IPV6_PREFIX_LENGTH = 128;
// prevent from instantiation
private MappingAddressBuilder() {
}
/**
* Converts LispAfiAddress into abstracted mapping address.
*
* @param deviceService device service
* @param deviceId device identifier
* @param address LispAfiAddress
* @return abstracted mapping address
*/
protected static MappingAddress getAddress(DeviceService deviceService,
DeviceId deviceId,
LispAfiAddress address) {
if (address == null) {
log.warn("Address is not specified.");
return null;
}
switch (address.getAfi()) {
case IP4:
return afi2mapping(address);
case IP6:
return afi2mapping(address);
case AS:
int asNum = ((LispAsAddress) address).getASNum();
return MappingAddresses.asMappingAddress(String.valueOf(asNum));
case DISTINGUISHED_NAME:
String dn = ((LispDistinguishedNameAddress)
address).getDistinguishedName();
return MappingAddresses.dnMappingAddress(dn);
case MAC:
MacAddress macAddress = ((LispMacAddress) address).getAddress();
return MappingAddresses.ethMappingAddress(macAddress);
case LCAF:
return deviceService == null ? null :
lcaf2extension(deviceService, deviceId, (LispLcafAddress) address);
default:
log.warn("Unsupported address type {}", address.getAfi());
break;
}
return null;
}
/**
* Converts AFI address to generalized mapping address.
*
* @param afiAddress IP typed AFI address
* @return generalized mapping address
*/
private static MappingAddress afi2mapping(LispAfiAddress afiAddress) {
switch (afiAddress.getAfi()) {
case IP4:
IpAddress ipv4Address = ((LispIpv4Address) afiAddress).getAddress();
IpPrefix ipv4Prefix = IpPrefix.valueOf(ipv4Address, IPV4_PREFIX_LENGTH);
return MappingAddresses.ipv4MappingAddress(ipv4Prefix);
case IP6:
IpAddress ipv6Address = ((LispIpv6Address) afiAddress).getAddress();
IpPrefix ipv6Prefix = IpPrefix.valueOf(ipv6Address, IPV6_PREFIX_LENGTH);
return MappingAddresses.ipv6MappingAddress(ipv6Prefix);
default:
log.warn("Only support to convert IP address type");
break;
}
return null;
}
/**
* Converts LCAF address to extension mapping address.
*
* @param deviceService device service
* @param deviceId device identifier
* @param lcaf LCAF address
* @return extension mapping address
*/
private static MappingAddress lcaf2extension(DeviceService deviceService,
DeviceId deviceId,
LispLcafAddress lcaf) {
Device device = deviceService.getDevice(deviceId);
ExtensionMappingAddressInterpreter addressInterpreter;
ExtensionMappingAddress mappingAddress = null;
if (device.is(ExtensionMappingAddressInterpreter.class)) {
addressInterpreter = device.as(ExtensionMappingAddressInterpreter.class);
} else {
addressInterpreter = null;
}
switch (lcaf.getType()) {
case LIST:
if (addressInterpreter != null &&
addressInterpreter.supported(LIST_ADDRESS.type())) {
mappingAddress = addressInterpreter.mapLcafAddress(lcaf);
}
break;
case SEGMENT:
if (addressInterpreter != null &&
addressInterpreter.supported(SEGMENT_ADDRESS.type())) {
mappingAddress = addressInterpreter.mapLcafAddress(lcaf);
}
break;
case AS:
if (addressInterpreter != null &&
addressInterpreter.supported(AS_ADDRESS.type())) {
mappingAddress = addressInterpreter.mapLcafAddress(lcaf);
}
break;
case APPLICATION_DATA:
if (addressInterpreter != null &&
addressInterpreter.supported(APPLICATION_DATA_ADDRESS.type())) {
mappingAddress = addressInterpreter.mapLcafAddress(lcaf);
}
break;
case GEO_COORDINATE:
if (addressInterpreter != null &&
addressInterpreter.supported(GEO_COORDINATE_ADDRESS.type())) {
mappingAddress = addressInterpreter.mapLcafAddress(lcaf);
}
break;
case NAT:
if (addressInterpreter != null &&
addressInterpreter.supported(NAT_ADDRESS.type())) {
mappingAddress = addressInterpreter.mapLcafAddress(lcaf);
}
break;
case NONCE:
if (addressInterpreter != null &&
addressInterpreter.supported(NONCE_ADDRESS.type())) {
mappingAddress = addressInterpreter.mapLcafAddress(lcaf);
}
break;
case MULTICAST:
if (addressInterpreter != null &&
addressInterpreter.supported(MULTICAST_ADDRESS.type())) {
mappingAddress = addressInterpreter.mapLcafAddress(lcaf);
}
break;
case TRAFFIC_ENGINEERING:
if (addressInterpreter != null &&
addressInterpreter.supported(TRAFFIC_ENGINEERING_ADDRESS.type())) {
mappingAddress = addressInterpreter.mapLcafAddress(lcaf);
}
break;
case SOURCE_DEST:
if (addressInterpreter != null &&
addressInterpreter.supported(SOURCE_DEST_ADDRESS.type())) {
mappingAddress = addressInterpreter.mapLcafAddress(lcaf);
}
break;
default:
log.warn("Unsupported extension mapping address type {}", lcaf.getType());
break;
}
return mappingAddress != null ?
MappingAddresses.extensionMappingAddressWrapper(mappingAddress, deviceId) : null;
}
}
|
apache-2.0
|
faganpe/KafkaStreamingPOC
|
src/scripts/es/create_index_with_mappings.sh
|
6544
|
curl -XPUT 'http://192.168.99.100:9200/spark/' -d '{
"settings" : {
"index" : {
"number_of_shards" : 1,
"number_of_replicas" : 0
}
},
"mappings" : {
"netflow": {
"properties": {
"port_src_well_known_service": {
"type": "string"
},
"ts": {
"format": "yyyy/MM/dd HH:mm:ss.SSS||yyyy/MM/dd",
"type": "date"
},
"geoip_src_country": {
"type": "string"
},
"asset_dst_priority": {
"index": "not_analyzed",
"type": "integer"
},
"threat_dst_infrastructure": {
"type": "string"
},
"reason_for_flow": {
"type": "string"
},
"threat_src_infrastructure": {
"type": "string"
},
"te": {
"type": "string"
},
"geoip_src_lat": {
"type": "string"
},
"src_port": {
"type": "long"
},
"asset_src_org_type": {
"type": "string"
},
"geoip_src_subdivisions": {
"type": "string"
},
"threat_src_campaign": {
"type": "string"
},
"threat_src_type": {
"type": "string"
},
"geoip_dst_city": {
"type": "string"
},
"geoip_dst_country": {
"type": "string"
},
"geoip_dst_long": {
"type": "string"
},
"threat_dst_campaign": {
"type": "string"
},
"tcp_flag_a": {
"type": "string"
},
"threat_dst_malware": {
"type": "string"
},
"asset_dst_country": {
"type": "string"
},
"sensor_priority": {
"type": "string"
},
"sensor_id": {
"type": "string"
},
"asset_src_priority": {
"type": "string"
},
"dest_port": {
"type": "integer"
},
"asset_src_site": {
"type": "string"
},
"tcp_flag_s": {
"type": "string"
},
"geoip_src_as": {
"type": "string"
},
"tcp_flag_r": {
"type": "string"
},
"tcp_flag_p": {
"type": "string"
},
"yyyy": {
"type": "string"
},
"src_ip": {
"type": "ip"
},
"dst_ip": {
"type": "ip"
},
"asset_dst_org_sector": {
"type": "string"
},
"asset_src_org_sector": {
"type": "string"
},
"sensor_country": {
"type": "string"
},
"sensor_site": {
"type": "string"
},
"threat_dst_type": {
"type": "string"
},
"tcp_flag_f": {
"type": "string"
},
"tcp_flag_u": {
"type": "string"
},
"bytes": {
"type": "long"
},
"packets": {
"type": "long"
},
"geoip_src_city": {
"type": "string"
},
"hh": {
"type": "string"
},
"threat_src_malware": {
"type": "string"
},
"sensor_org_type": {
"type": "string"
},
"dd": {
"type": "string"
},
"tos": {
"type": "long"
},
"asset_dst_org_type": {
"type": "string"
},
"dest_ip": {
"type": "ip"
},
"asset_dst_org_name": {
"type": "string"
},
"geoip_src_long": {
"type": "string"
},
"asset_dst_site": {
"type": "string"
},
"sensor_org_name": {
"type": "string"
},
"threat_src_attacker": {
"type": "string"
},
"threat_dst_attacker": {
"type": "string"
},
"geoip_dst_as": {
"type": "string"
},
"geoip_dst_as_org": {
"type": "string"
},
"sensor_org_sector": {
"type": "string"
},
"mi": {
"type": "string"
},
"mm": {
"type": "string"
},
"protocol": {
"type": "long"
},
"ESDateStr": {
"type": "string"
},
"geoip_dst_lat": {
"type": "string"
},
"asset_src_country": {
"type": "string"
},
"geoip_dst_isp_org": {
"type": "string"
},
"port_dst_well_known_service": {
"type": "string"
},
"duration": {
"type": "string"
},
"geoip_dst_subdivisions": {
"type": "string"
},
"geoip_src_as_org": {
"type": "string"
},
"geoip_src_isp_org": {
"type": "string"
},
"ip_version": {
"type": "string"
},
"asset_src_org_name": {
"type": "string"
},
"asset_dst_priority": {
"type": "string"
}
}
}
}
}'
|
apache-2.0
|
mynlp/ccg2lambda
|
en/eacl2017exp.sh
|
6306
|
#!/bin/bash
#
# Copyright 2016 Pascual Martinez-Gomez
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Usage:
#
# ./en/eacl2017exp.sh <ncores> <split> <templates.yaml>
#
# Example:
#
# ./en/eacl2017exp.sh 10 train en/semantic_templates_en_event.yaml
#
sick=en/SICK.semeval.txt
# How many processes in parallel you want to run.
# The maximum number should be inferior to the number of cores in your machine.
# Default: 3
cores=${1:-3}
# Split of the data (default train):
# train (4439 problems),
# test (4906 problems),
# trial (495 problems).
dataset=${2:-"train"}
templates=$3
plain_dir=plain
results_dir=results
# Extract training and test data from SICK dataset, removing the header line.
if [ ! -d ${plain_dir} ]; then
mkdir -p ${plain_dir}
fi
echo "Extracting problems from the SICK file."
tail -n +2 $sick | \
tr -d '\r' | \
awk -F'\t' -v tdir=${plain_dir} \
'{pair_id=$1;
sub(/\.$/,"",$2);
sub(/\.$/,"",$3);
premise=$2;
conclusion=$3;
if($5 == "CONTRADICTION"){
judgement="no";
} else if ($5 == "ENTAILMENT") {
judgement="yes";
} else if ($5 == "NEUTRAL") {
judgement="unknown";
}
set=$NF;
printf "%s.\n%s.\n", premise, conclusion > tdir"/sick_"tolower(set)"_"pair_id".txt";
printf "%s\n", judgement > tdir"/sick_"tolower(set)"_"pair_id".answer";
}'
# Create files that list all filenames of training, testing and trial.
for dset in {train,test,trial}; do
ls -v ${plain_dir}/sick_${dset}_*.txt > ${plain_dir}/sick_${dset}.files
done
# Split filename entries into several files, for parallel processing:
ntrain=`cat ${plain_dir}/sick_train.files | wc -l`
ntest=`cat ${plain_dir}/sick_test.files | wc -l`
ntrial=`cat ${plain_dir}/sick_trial.files | wc -l`
train_lines_per_split=`python -c "from math import ceil; print(int(ceil(float(${ntrain})/${cores})))"`
test_lines_per_split=`python -c "from math import ceil; print(int(ceil(float(${ntest})/${cores})))"`
trial_lines_per_split=`python -c "from math import ceil; print(int(ceil(float(${ntrial})/${cores})))"`
rm -f ${plain_dir}/sick_{train,test,trial}.files_??
split -l $train_lines_per_split ${plain_dir}/sick_train.files ${plain_dir}/sick_train.files_
split -l $test_lines_per_split ${plain_dir}/sick_test.files ${plain_dir}/sick_test.files_
split -l $trial_lines_per_split ${plain_dir}/sick_trial.files ${plain_dir}/sick_trial.files_
# Copy a coq static library and compile it.
cp en/coqlib_sick.v coqlib.v
coqc coqlib.v
cp en/tactics_coq_sick.txt tactics_coq.txt
# Run pipeline for each entailment problem.
for ff in ${plain_dir}/sick_${dataset}.files_??; do
for f in `cat ${ff}`; do
./en/rte_en_mp.sh $f $templates;
done &
done
# Wait for the parallel processes to finish.
wait
total=0
correct=0
for f in ${plain_dir}/sick_${dataset}_*.answer; do
let total++
base_filename=${f##*/}
sys_filename=${results_dir}/${base_filename/.answer/.txt.answer}
gold_answer=`head -1 $f`
if [ ! -e ${sys_filename} ]; then
sys_answer="unknown"
else
sys_answer=`head -1 ${sys_filename}`
if [ ! "${sys_answer}" == "unknown" ] && [ ! "${sys_answer}" == "yes" ] && [ ! "${sys_answer}" == "no" ]; then
sys_answer="unknown"
fi
fi
if [ "${gold_answer}" == "${sys_answer}" ]; then
let correct++
fi
echo -e $f"\t"$gold_answer"\t"$sys_answer
done
accuracy=`echo "scale=3; $correct / $total" | bc -l`
echo "Accuracy: "$correct" / "$total" = "$accuracy
# Print a summary (precision, recall, f-score) of the errors at individual problems,
# per problem category and a global score.
echo "Evaluating."
echo "<!doctype html>
<html lang='en'>
<head>
<meta charset='UTF-8'>
<title>Evaluation results of "$category_templates"</title>
<style>
body {
font-size: 1.5em;
}
</style>
</head>
<body>
<table border='1'>
<tr>
<td>sick problem</td>
<td>gold answer</td>
<td>system answer</td>
<td>proving time</td>
</tr>" > $results_dir/main_${dataset}.html
total_observations=0
correct_recognitions=0
attempts=0
total_proving_time=0
red_color="rgb(255,0,0)"
green_color="rgb(0,255,0)"
white_color="rgb(255,255,255)"
gray_color="rgb(136,136,136)"
for gold_filename in `ls -v ${plain_dir}/sick_${dataset}_*.answer`; do
base_filename=${gold_filename##*/} # this line obtains the filename, without the directory path.
system_filename=${results_dir}/${base_filename/.answer/.txt.answer}
gold_answer=`cat $gold_filename`
system_answer=`cat $system_filename`
time_filename=${results_dir}/${base_filename/.answer/.txt.time}
proving_time=`cat $time_filename`
total_proving_time=`echo "$total_proving_time + $proving_time" | bc -l`
total_number=$((total_number + 1))
color=$white_color
if [ "$gold_answer" == "yes" ] || [ "$gold_answer" == "no" ]; then
total_observations=$((total_observations + 1))
if [ "$gold_answer" == "$system_answer" ]; then
correct_recognitions=$((correct_recognitions + 1))
color=$green_color
else
color=$red_color
fi
if [ "$system_answer" == "yes" ] || [ "$system_answer" == "no" ]; then
attempts=$((attempts + 1))
else
color=$gray_color
fi
fi
echo '
<tr>
<td><a style="background-color:'$color';" href="'${base_filename/.answer/.txt.html}'">'${base_filename/.answer/}'</a></td>
<td>'$gold_answer'</td>
<td>'$system_answer'</td>
<td>'$proving_time's</td>
</tr>' >> $results_dir/main_${dataset}.html
done
average_proving_time=`echo "scale=2; $total_proving_time / $total_number" | bc -l`
echo "
<h4><font color="red">Accuracy: "$correct" / "$total" = "$accuracy" </font></h4>
<h4><font color="red">Average proving time: "${average_proving_time}" </font></h4>
</body>
</html>
" >> $results_dir/main_${dataset}.html
./ja/accuracy.sh ${results_dir}/main_${dataset}.html > ${results_dir}/score.txt
|
apache-2.0
|
asanka88/apache-synapse
|
modules/transports/core/nhttp/src/main/java/org/apache/synapse/transport/nhttp/util/RESTUtil.java
|
16005
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.synapse.transport.nhttp.util;
import org.apache.axiom.om.OMAbstractFactory;
import org.apache.axiom.om.OMElement;
import org.apache.axis2.AxisFault;
import org.apache.axis2.Constants;
import org.apache.axis2.addressing.EndpointReference;
import org.apache.axis2.builder.Builder;
import org.apache.axis2.context.MessageContext;
import org.apache.axis2.description.AxisBindingOperation;
import org.apache.axis2.description.AxisEndpoint;
import org.apache.axis2.description.AxisOperation;
import org.apache.axis2.description.AxisService;
import org.apache.axis2.description.WSDL20DefaultValueHolder;
import org.apache.axis2.description.WSDL2Constants;
import org.apache.axis2.dispatchers.HTTPLocationBasedDispatcher;
import org.apache.axis2.dispatchers.RequestURIBasedDispatcher;
import org.apache.axis2.dispatchers.RequestURIOperationDispatcher;
import org.apache.axis2.engine.AxisEngine;
import org.apache.axis2.transport.http.HTTPConstants;
import org.apache.axis2.transport.http.util.URIEncoderDecoder;
import org.apache.http.Header;
import org.apache.synapse.transport.nhttp.NHttpConfiguration;
import org.apache.synapse.transport.nhttp.NhttpConstants;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.util.Iterator;
/**
* This class provides a set of utility methods to manage the REST invocation calls
* going out from the nhttp transport in the HTTP GET method
*/
public class RESTUtil {
/**
* This method will return the URI part for the GET HTTPRequest by converting
* the SOAP infoset to the URL-encoded GET format
*
* @param messageContext - from which the SOAP infoset will be extracted to encode
* @param address - address of the actual service
* @return uri - ERI of the GET request
* @throws AxisFault - if the SOAP infoset cannot be converted in to the GET URL-encoded format
*/
public static String getURI(MessageContext messageContext, String address) throws AxisFault {
OMElement firstElement;
address = address.substring(address.indexOf("//") + 2);
address = address.substring(address.indexOf("/"));
String queryParameterSeparator = (String) messageContext
.getProperty(WSDL2Constants.ATTR_WHTTP_QUERY_PARAMETER_SEPARATOR);
// In case queryParameterSeparator is null we better use the default value
if (queryParameterSeparator == null) {
queryParameterSeparator = WSDL20DefaultValueHolder
.getDefaultValue(WSDL2Constants.ATTR_WHTTP_QUERY_PARAMETER_SEPARATOR);
}
firstElement = messageContext.getEnvelope().getBody().getFirstElement();
String params = "";
if (firstElement != null) {
// first element corresponds to the operation name
address = address + "/" + firstElement.getLocalName();
} else {
firstElement = messageContext.getEnvelope().getBody();
}
Iterator iter = firstElement.getChildElements();
String legalCharacters = WSDL2Constants
.LEGAL_CHARACTERS_IN_QUERY.replaceAll(queryParameterSeparator, "");
StringBuffer buff = new StringBuffer(params);
// iterate through the child elements and find the request parameters
while (iter.hasNext()) {
OMElement element = (OMElement) iter.next();
try {
buff.append(URIEncoderDecoder.quoteIllegal(element.getLocalName(),
legalCharacters)).append("=").append(URIEncoderDecoder.quoteIllegal(element.getText(),
legalCharacters)).append(queryParameterSeparator);
} catch (UnsupportedEncodingException e) {
throw new AxisFault("URI Encoding error : " + element.getLocalName()
+ "=" + element.getText(), e);
}
}
params = buff.toString();
if (params.trim().length() != 0) {
int index = address.indexOf("?");
if (index == -1) {
address = address + "?" + params.substring(0, params.length() - 1);
} else if (index == address.length() - 1) {
address = address + params.substring(0, params.length() - 1);
} else {
address = address
+ queryParameterSeparator + params.substring(0, params.length() - 1);
}
}
return address;
}
/**
* Processes the HTTP GET / DELETE request and builds the SOAP info-set of the REST message
*
* @param msgContext The MessageContext of the Request Message
* @param out The output stream of the response
* @param requestURI The URL that the request came to
* @param contentTypeHeader The contentType header of the request
* @param httpMethod The http method of the request
* @param dispatching Weather we should do service dispatching
* @throws AxisFault - Thrown in case a fault occurs
*/
public static void processGetAndDeleteRequest(MessageContext msgContext, OutputStream out,
String requestURI, Header contentTypeHeader,
String httpMethod, boolean dispatching)
throws AxisFault {
String contentType = contentTypeHeader != null ? contentTypeHeader.getValue() : null;
prepareMessageContext(msgContext, requestURI, httpMethod, out, contentType, dispatching);
msgContext.setProperty(NhttpConstants.NO_ENTITY_BODY, Boolean.TRUE);
org.apache.axis2.transport.http.util.RESTUtil.processURLRequest(msgContext, out,
contentType);
}
/**
* Processes the HTTP GET / DELETE request and builds the SOAP info-set of the REST message
*
* @param msgContext The MessageContext of the Request Message
* @param out The output stream of the response
* @param requestURI The URL that the request came to
* @param contentTypeHeader The contentType header of the request
* @param builder The message builder to use
* @param httpMethod The http method of the request
* @param dispatching Weather we should do service dispatching
* @throws AxisFault - Thrown in case a fault occurs
*/
public static void processGetAndDeleteRequest(MessageContext msgContext, OutputStream out,
String requestURI, Header contentTypeHeader,
Builder builder, String httpMethod,
boolean dispatching)
throws AxisFault {
String contentType = contentTypeHeader != null ? contentTypeHeader.getValue() : null;
prepareMessageContext(msgContext, requestURI, httpMethod, out, contentType, dispatching);
msgContext.setProperty(NhttpConstants.NO_ENTITY_BODY, Boolean.TRUE);
org.apache.axis2.transport.http.util.RESTUtil.processURLRequest(msgContext, out,
contentType, builder);
}
/**
* Processes the HTTP GET request and builds the SOAP info-set of the REST message
*
* @param msgContext The MessageContext of the Request Message
* @param out The output stream of the response
* @param soapAction SoapAction of the request
* @param requestURI The URL that the request came to
* @throws AxisFault - Thrown in case a fault occurs
*/
public static void processURLRequest(MessageContext msgContext, OutputStream out,
String soapAction, String requestURI) throws AxisFault {
if ((soapAction != null) && soapAction.startsWith("\"") && soapAction.endsWith("\"")) {
soapAction = soapAction.substring(1, soapAction.length() - 1);
}
msgContext.setSoapAction(soapAction);
msgContext.setTo(new EndpointReference(requestURI));
msgContext.setProperty(MessageContext.TRANSPORT_OUT, out);
msgContext.setServerSide(true);
msgContext.setDoingREST(true);
msgContext.setEnvelope(OMAbstractFactory.getSOAP11Factory().getDefaultEnvelope());
msgContext.setProperty(NhttpConstants.NO_ENTITY_BODY, Boolean.TRUE);
AxisEngine.receive(msgContext);
}
/**
* Processes the HTTP POST request and builds the SOAP info-set of the REST message
*
* @param msgContext The MessageContext of the Request Message
* @param is The input stream of the request
* @param os The output stream of the response
* @param requestURI The URL that the request came to
* @param contentTypeHeader The contentType header of the request
* @param dispatching Weather we should do dispatching
* @throws AxisFault - Thrown in case a fault occurs
*/
public static void processPOSTRequest(MessageContext msgContext, InputStream is,
OutputStream os, String requestURI,
Header contentTypeHeader,
boolean dispatching) throws AxisFault {
String contentType = contentTypeHeader != null ? contentTypeHeader.getValue() : null;
prepareMessageContext(msgContext, requestURI, HTTPConstants.HTTP_METHOD_POST,
os, contentType, dispatching);
org.apache.axis2.transport.http.util.RESTUtil.processXMLRequest(msgContext, is, os,
contentType);
}
/**
* Processes the HTTP POST request and builds the SOAP info-set of the REST message
*
* @param msgContext The MessageContext of the Request Message
* @param is The input stream of the request
* @param os The output stream of the response
* @param requestURI The URL that the request came to
* @param contentTypeHeader The contentType header of the request
* @param builder The message builder to use
* @param dispatching Weather we should do dispatching
* @throws AxisFault - Thrown in case a fault occurs
*/
public static void processPOSTRequest(MessageContext msgContext, InputStream is,
OutputStream os, String requestURI,
Header contentTypeHeader, Builder builder,
boolean dispatching) throws AxisFault {
String contentType = contentTypeHeader != null ? contentTypeHeader.getValue() : null;
prepareMessageContext(msgContext, requestURI, HTTPConstants.HTTP_METHOD_POST,
os, contentType, dispatching);
org.apache.axis2.transport.http.util.RESTUtil.processXMLRequest(msgContext, is, os,
contentType, builder);
}
/**
* prepare message context prior to call axis2 RestUtils
*
* @param msgContext The MessageContext of the Request Message
* @param requestURI The URL that the request came to
* @param httpMethod The http method of the request
* @param out The output stream of the response
* @param contentType The content type of the request
* @param dispatching weather we should do dispatching
* @throws AxisFault Thrown in case a fault occurs
*/
private static void prepareMessageContext(MessageContext msgContext,
String requestURI,
String httpMethod,
OutputStream out,
String contentType,
boolean dispatching) throws AxisFault {
msgContext.setTo(new EndpointReference(requestURI));
msgContext.setProperty(HTTPConstants.HTTP_METHOD, httpMethod);
msgContext.setServerSide(true);
msgContext.setDoingREST(true);
msgContext.setProperty(MessageContext.TRANSPORT_OUT, out);
msgContext.setProperty(NhttpConstants.REST_REQUEST_CONTENT_TYPE, contentType);
// workaround to get REST working in the case of
// 1) Based on the request URI , it is possible to find a service name and operation.
// However, there is no actual service deployed in the synapse ( no i.e proxy or other)
// e.g http://localhost:8280/services/StudentService/students where there is no proxy
// service with name StudentService.This is a senario where StudentService is in an external
// server and it is needed to call it from synapse using the main sequence
// 2) request is to be injected into the main sequence .i.e. http://localhost:8280
// This method does not cause any performance issue ...
// Proper fix should be refractoring axis2 RestUtil in a proper way
if (dispatching) {
RequestURIBasedDispatcher requestDispatcher = new RequestURIBasedDispatcher();
AxisService axisService = requestDispatcher.findService(msgContext);
if (axisService == null) {
String defaultSvcName = NHttpConfiguration.getInstance().getStringProperty(
"nhttp.default.service", "__SynapseService");
axisService = msgContext.getConfigurationContext()
.getAxisConfiguration().getService(defaultSvcName);
}
msgContext.setAxisService(axisService);
}
}
public static void dispatchAndVerify(MessageContext msgContext) throws AxisFault {
RequestURIBasedDispatcher requestDispatcher = new RequestURIBasedDispatcher();
requestDispatcher.invoke(msgContext);
AxisService axisService = msgContext.getAxisService();
if (axisService != null) {
HTTPLocationBasedDispatcher httpLocationBasedDispatcher =
new HTTPLocationBasedDispatcher();
httpLocationBasedDispatcher.invoke(msgContext);
if (msgContext.getAxisOperation() == null) {
RequestURIOperationDispatcher requestURIOperationDispatcher =
new RequestURIOperationDispatcher();
requestURIOperationDispatcher.invoke(msgContext);
}
AxisOperation axisOperation;
if ((axisOperation = msgContext.getAxisOperation()) != null) {
AxisEndpoint axisEndpoint =
(AxisEndpoint) msgContext.getProperty(WSDL2Constants.ENDPOINT_LOCAL_NAME);
if (axisEndpoint != null) {
AxisBindingOperation axisBindingOperation = (AxisBindingOperation) axisEndpoint
.getBinding().getChild(axisOperation.getName());
msgContext.setProperty(Constants.AXIS_BINDING_OPERATION, axisBindingOperation);
}
msgContext.setAxisOperation(axisOperation);
}
}
}
}
|
apache-2.0
|
firebase/firebase-android-sdk
|
firebase-firestore/src/main/java/com/google/firebase/firestore/model/mutation/NumericIncrementTransformOperation.java
|
4335
|
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.firebase.firestore.model.mutation;
import static com.google.firebase.firestore.model.Values.isDouble;
import static com.google.firebase.firestore.model.Values.isInteger;
import static com.google.firebase.firestore.util.Assert.fail;
import static com.google.firebase.firestore.util.Assert.hardAssert;
import androidx.annotation.Nullable;
import com.google.firebase.Timestamp;
import com.google.firebase.firestore.model.Values;
import com.google.firestore.v1.Value;
/**
* Implements the backend semantics for locally computed NUMERIC_ADD (increment) transforms.
* Converts all field values to longs or doubles and resolves overflows to
* Long.MAX_VALUE/Long.MIN_VALUE.
*/
public class NumericIncrementTransformOperation implements TransformOperation {
private Value operand;
public NumericIncrementTransformOperation(Value operand) {
hardAssert(
Values.isNumber(operand),
"NumericIncrementTransformOperation expects a NumberValue operand");
this.operand = operand;
}
@Override
public Value applyToLocalView(@Nullable Value previousValue, Timestamp localWriteTime) {
Value baseValue = computeBaseValue(previousValue);
// Return an integer value only if the previous value and the operand is an integer.
if (isInteger(baseValue) && isInteger(operand)) {
long sum = safeIncrement(baseValue.getIntegerValue(), operandAsLong());
return Value.newBuilder().setIntegerValue(sum).build();
} else if (isInteger(baseValue)) {
double sum = baseValue.getIntegerValue() + operandAsDouble();
return Value.newBuilder().setDoubleValue(sum).build();
} else {
hardAssert(
isDouble(baseValue),
"Expected NumberValue to be of type DoubleValue, but was ",
previousValue.getClass().getCanonicalName());
double sum = baseValue.getDoubleValue() + operandAsDouble();
return Value.newBuilder().setDoubleValue(sum).build();
}
}
@Override
public Value applyToRemoteDocument(@Nullable Value previousValue, Value transformResult) {
return transformResult;
}
public Value getOperand() {
return operand;
}
/**
* Inspects the provided value, returning the provided value if it is already a NumberValue,
* otherwise returning a coerced IntegerValue of 0.
*/
@Override
public Value computeBaseValue(@Nullable Value previousValue) {
return Values.isNumber(previousValue)
? previousValue
: Value.newBuilder().setIntegerValue(0).build();
}
/**
* Implementation of Java 8's `addExact()` that resolves positive and negative numeric overflows
* to Long.MAX_VALUE or Long.MIN_VALUE respectively (instead of throwing an ArithmeticException).
*/
private long safeIncrement(long x, long y) {
long r = x + y;
// See "Hacker's Delight" 2-12: Overflow if both arguments have the opposite sign of the result
if (((x ^ r) & (y ^ r)) >= 0) {
return r;
}
if (r >= 0L) {
return Long.MIN_VALUE;
} else {
return Long.MAX_VALUE;
}
}
private double operandAsDouble() {
if (isDouble(operand)) {
return operand.getDoubleValue();
} else if (isInteger(operand)) {
return operand.getIntegerValue();
} else {
throw fail(
"Expected 'operand' to be of Number type, but was "
+ operand.getClass().getCanonicalName());
}
}
private long operandAsLong() {
if (isDouble(operand)) {
return (long) operand.getDoubleValue();
} else if (isInteger(operand)) {
return operand.getIntegerValue();
} else {
throw fail(
"Expected 'operand' to be of Number type, but was "
+ operand.getClass().getCanonicalName());
}
}
}
|
apache-2.0
|
bobwenx/HikariCP
|
hikaricp-common/src/main/java/com/zaxxer/hikari/pool/BaseHikariPool.java
|
18562
|
/*
* Copyright (C) 2013,2014 Brett Wooldridge
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zaxxer.hikari.pool;
import static com.zaxxer.hikari.pool.HikariMBeanElf.registerMBeans;
import static com.zaxxer.hikari.pool.HikariMBeanElf.unregisterMBeans;
import static com.zaxxer.hikari.util.IConcurrentBagEntry.STATE_IN_USE;
import static com.zaxxer.hikari.util.IConcurrentBagEntry.STATE_NOT_IN_USE;
import static com.zaxxer.hikari.util.IConcurrentBagEntry.STATE_REMOVED;
import static com.zaxxer.hikari.util.UtilityElf.createInstance;
import static com.zaxxer.hikari.util.UtilityElf.createThreadPoolExecutor;
import static com.zaxxer.hikari.util.UtilityElf.elapsedTimeMs;
import static com.zaxxer.hikari.util.UtilityElf.getTransactionIsolation;
import static com.zaxxer.hikari.util.UtilityElf.setRemoveOnCancelPolicy;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import javax.sql.DataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.codahale.metrics.MetricRegistry;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.IConnectionCustomizer;
import com.zaxxer.hikari.metrics.CodaHaleMetricsTracker;
import com.zaxxer.hikari.metrics.MetricsTracker;
import com.zaxxer.hikari.metrics.MetricsTracker.MetricsContext;
import com.zaxxer.hikari.proxy.IHikariConnectionProxy;
import com.zaxxer.hikari.proxy.ProxyFactory;
import com.zaxxer.hikari.util.ConcurrentBag;
import com.zaxxer.hikari.util.IBagStateListener;
import com.zaxxer.hikari.util.DefaultThreadFactory;
import com.zaxxer.hikari.util.GlobalPoolLock;
import com.zaxxer.hikari.util.LeakTask;
import com.zaxxer.hikari.util.PoolUtilities;
/**
* This is the primary connection pool class that provides the basic
* pooling behavior for HikariCP.
*
* @author Brett Wooldridge
*/
public abstract class BaseHikariPool implements HikariPoolMBean, IBagStateListener
{
protected static final Logger LOGGER = LoggerFactory.getLogger("HikariPool");
private static final long ALIVE_BYPASS_WINDOW = Long.getLong("com.zaxxer.hikari.aliveBypassWindow", 1000L);
public final String catalog;
public final boolean isReadOnly;
public final boolean isAutoCommit;
public int transactionIsolation;
protected final PoolUtilities poolUtils;
protected final HikariConfig configuration;
protected final AtomicInteger totalConnections;
protected final ConcurrentBag<PoolBagEntry> connectionBag;
protected final ThreadPoolExecutor addConnectionExecutor;
protected final ThreadPoolExecutor closeConnectionExecutor;
protected final ScheduledThreadPoolExecutor houseKeepingExecutorService;
protected final boolean isUseJdbc4Validation;
protected final boolean isIsolateInternalQueries;
protected volatile boolean isShutdown;
protected volatile long connectionTimeout;
protected volatile boolean isPoolSuspended;
private final LeakTask leakTask;
private final DataSource dataSource;
private final MetricsTracker metricsTracker;
private final GlobalPoolLock suspendResumeLock;
private final IConnectionCustomizer connectionCustomizer;
private final AtomicReference<Throwable> lastConnectionFailure;
private final String username;
private final String password;
private final boolean isRecordMetrics;
/**
* Construct a HikariPool with the specified configuration.
*
* @param configuration a HikariConfig instance
*/
public BaseHikariPool(HikariConfig configuration)
{
this(configuration, configuration.getUsername(), configuration.getPassword());
}
/**
* Construct a HikariPool with the specified configuration. We cache lots of configuration
* items in class-local final members for speed.
*
* @param configuration a HikariConfig instance
* @param username authentication username
* @param password authentication password
*/
public BaseHikariPool(HikariConfig configuration, String username, String password)
{
this.username = username;
this.password = password;
this.configuration = configuration;
this.poolUtils = new PoolUtilities();
this.connectionBag = createConcurrentBag(this);
this.totalConnections = new AtomicInteger();
this.connectionTimeout = configuration.getConnectionTimeout();
this.lastConnectionFailure = new AtomicReference<Throwable>();
this.isReadOnly = configuration.isReadOnly();
this.isAutoCommit = configuration.isAutoCommit();
this.suspendResumeLock = configuration.isAllowPoolSuspension() ? GlobalPoolLock.SUSPEND_RESUME_LOCK : GlobalPoolLock.FAUX_LOCK;
this.catalog = configuration.getCatalog();
this.connectionCustomizer = initializeCustomizer();
this.transactionIsolation = getTransactionIsolation(configuration.getTransactionIsolation());
this.isIsolateInternalQueries = configuration.isIsolateInternalQueries();
this.isUseJdbc4Validation = configuration.getConnectionTestQuery() == null;
this.isRecordMetrics = configuration.getMetricRegistry() != null;
this.metricsTracker = (isRecordMetrics ? new CodaHaleMetricsTracker(this, (MetricRegistry) configuration.getMetricRegistry()) : new MetricsTracker(this));
this.dataSource = poolUtils.initializeDataSource(configuration.getDataSourceClassName(), configuration.getDataSource(), configuration.getDataSourceProperties(), configuration.getJdbcUrl(), username, password);
this.addConnectionExecutor = createThreadPoolExecutor(configuration.getMaximumPoolSize(), "HikariCP connection filler (pool " + configuration.getPoolName() + ")", configuration.getThreadFactory(), new ThreadPoolExecutor.DiscardPolicy());
this.closeConnectionExecutor = createThreadPoolExecutor(4, "HikariCP connection closer (pool " + configuration.getPoolName() + ")", configuration.getThreadFactory(), new ThreadPoolExecutor.CallerRunsPolicy());
long delayPeriod = Long.getLong("com.zaxxer.hikari.housekeeping.periodMs", TimeUnit.SECONDS.toMillis(30L));
ThreadFactory threadFactory = configuration.getThreadFactory() != null ? configuration.getThreadFactory() : new DefaultThreadFactory("Hikari Housekeeping Timer (pool " + configuration.getPoolName() + ")", true);
this.houseKeepingExecutorService = new ScheduledThreadPoolExecutor(1, threadFactory, new ThreadPoolExecutor.DiscardPolicy());
this.houseKeepingExecutorService.scheduleAtFixedRate(getHouseKeeper(), delayPeriod, delayPeriod, TimeUnit.MILLISECONDS);
this.leakTask = (configuration.getLeakDetectionThreshold() == 0) ? LeakTask.NO_LEAK : new LeakTask(configuration.getLeakDetectionThreshold(), houseKeepingExecutorService);
setRemoveOnCancelPolicy(houseKeepingExecutorService);
poolUtils.setLoginTimeout(dataSource, connectionTimeout, LOGGER);
registerMBeans(configuration, this);
fillPool();
}
/**
* Get a connection from the pool, or timeout trying.
*
* @return a java.sql.Connection instance
* @throws SQLException thrown if a timeout occurs trying to obtain a connection
*/
public final Connection getConnection() throws SQLException
{
suspendResumeLock.acquire();
long timeout = connectionTimeout;
final long start = System.currentTimeMillis();
final MetricsContext metricsContext = (isRecordMetrics ? metricsTracker.recordConnectionRequest(start) : MetricsTracker.NO_CONTEXT);
try {
do {
final PoolBagEntry bagEntry = connectionBag.borrow(timeout, TimeUnit.MILLISECONDS);
if (bagEntry == null) {
break; // We timed out... break and throw exception
}
final long now = System.currentTimeMillis();
if (now - bagEntry.lastAccess > ALIVE_BYPASS_WINDOW && !isConnectionAlive(bagEntry.connection, timeout)) {
closeConnection(bagEntry); // Throw away the dead connection and try again
timeout = connectionTimeout - elapsedTimeMs(start);
}
else {
metricsContext.setConnectionLastOpen(bagEntry, now);
return ProxyFactory.getProxyConnection((HikariPool) this, bagEntry, leakTask.start());
}
}
while (timeout > 0L);
}
catch (InterruptedException e) {
throw new SQLException("Interrupted during connection acquisition", e);
}
finally {
suspendResumeLock.release();
metricsContext.stop();
}
logPoolState("Timeout failure ");
throw new SQLException(String.format("Timeout after %dms of waiting for a connection.", elapsedTimeMs(start)), lastConnectionFailure.getAndSet(null));
}
/**
* Release a connection back to the pool, or permanently close it if it is broken.
*
* @param bagEntry the PoolBagEntry to release back to the pool
*/
public final void releaseConnection(final PoolBagEntry bagEntry)
{
metricsTracker.recordConnectionUsage(bagEntry);
if (bagEntry.evicted) {
LOGGER.debug("Connection returned to pool {} is broken or evicted. Closing connection.", configuration.getPoolName());
closeConnection(bagEntry);
}
else {
bagEntry.lastAccess = System.currentTimeMillis();
connectionBag.requite(bagEntry);
}
}
/**
* Shutdown the pool, closing all idle connections and aborting or closing
* active connections.
*
* @throws InterruptedException thrown if the thread is interrupted during shutdown
*/
public final void shutdown() throws InterruptedException
{
if (!isShutdown) {
isShutdown = true;
LOGGER.info("HikariCP pool {} is shutting down.", configuration.getPoolName());
logPoolState("Before shutdown ");
connectionBag.close();
softEvictConnections();
houseKeepingExecutorService.shutdownNow();
addConnectionExecutor.shutdown();
addConnectionExecutor.awaitTermination(5L, TimeUnit.SECONDS);
final long start = System.currentTimeMillis();
do {
softEvictConnections();
abortActiveConnections();
}
while ((getIdleConnections() > 0 || getActiveConnections() > 0) && elapsedTimeMs(start) < TimeUnit.SECONDS.toMillis(5));
closeConnectionExecutor.shutdown();
closeConnectionExecutor.awaitTermination(5L, TimeUnit.SECONDS);
logPoolState("After shutdown ");
unregisterMBeans(configuration, this);
metricsTracker.close();
}
}
/**
* Evict a connection from the pool.
*
* @param proxyConnection the connection to evict
*/
public final void evictConnection(IHikariConnectionProxy proxyConnection)
{
closeConnection(proxyConnection.getPoolBagEntry());
}
/**
* Get the wrapped DataSource.
*
* @return the wrapped DataSource
*/
public final DataSource getDataSource()
{
return dataSource;
}
/**
* Get the pool configuration object.
*
* @return the {@link HikariConfig} for this pool
*/
public final HikariConfig getConfiguration()
{
return configuration;
}
@Override
public String toString()
{
return configuration.getPoolName();
}
// ***********************************************************************
// HikariPoolMBean methods
// ***********************************************************************
/** {@inheritDoc} */
@Override
public final int getActiveConnections()
{
return connectionBag.getCount(STATE_IN_USE);
}
/** {@inheritDoc} */
@Override
public final int getIdleConnections()
{
return connectionBag.getCount(STATE_NOT_IN_USE);
}
/** {@inheritDoc} */
@Override
public final int getTotalConnections()
{
return connectionBag.size() - connectionBag.getCount(STATE_REMOVED);
}
/** {@inheritDoc} */
@Override
public final int getThreadsAwaitingConnection()
{
return connectionBag.getPendingQueue();
}
/** {@inheritDoc} */
@Override
public final void suspendPool()
{
if (!isPoolSuspended) {
suspendResumeLock.suspend();
isPoolSuspended = true;
}
}
/** {@inheritDoc} */
@Override
public final void resumePool()
{
if (isPoolSuspended) {
isPoolSuspended = false;
addBagItem(); // re-populate the pool
suspendResumeLock.resume();
}
}
// ***********************************************************************
// Protected methods
// ***********************************************************************
/**
* Create and add a single connection to the pool.
*/
protected final boolean addConnection()
{
// Speculative increment of totalConnections with expectation of success
if (totalConnections.incrementAndGet() > configuration.getMaximumPoolSize()) {
totalConnections.decrementAndGet();
return true;
}
Connection connection = null;
try {
connection = (username == null && password == null) ? dataSource.getConnection() : dataSource.getConnection(username, password);
if (isUseJdbc4Validation && !poolUtils.isJdbc40Compliant(connection)) {
throw new SQLException("JDBC4 Connection.isValid() method not supported, connection test query must be configured");
}
final boolean timeoutEnabled = (connectionTimeout != Integer.MAX_VALUE);
final long timeoutMs = timeoutEnabled ? Math.max(250L, connectionTimeout) : 0L;
final int originalTimeout = poolUtils.setNetworkTimeout(houseKeepingExecutorService, connection, timeoutMs, timeoutEnabled);
transactionIsolation = (transactionIsolation < 0 ? connection.getTransactionIsolation() : transactionIsolation);
poolUtils.setupConnection(connection, isAutoCommit, isReadOnly, transactionIsolation, catalog);
connectionCustomizer.customize(connection);
poolUtils.executeSql(connection, configuration.getConnectionInitSql(), isAutoCommit);
poolUtils.setNetworkTimeout(houseKeepingExecutorService, connection, originalTimeout, timeoutEnabled);
connectionBag.add(new PoolBagEntry(connection, this));
lastConnectionFailure.set(null);
return true;
}
catch (Exception e) {
totalConnections.decrementAndGet(); // We failed, so undo speculative increment of totalConnections
lastConnectionFailure.set(e);
poolUtils.quietlyCloseConnection(connection);
LOGGER.debug("Connection attempt to database {} failed: {}", configuration.getPoolName(), e.getMessage(), e);
return false;
}
}
// ***********************************************************************
// Abstract methods
// ***********************************************************************
/**
* Permanently close the real (underlying) connection (eat any exception).
*
* @param connectionProxy the connection to actually close
*/
protected abstract void closeConnection(final PoolBagEntry bagEntry);
/**
* Check whether the connection is alive or not.
*
* @param connection the connection to test
* @param timeoutMs the timeout before we consider the test a failure
* @return true if the connection is alive, false if it is not alive or we timed out
*/
protected abstract boolean isConnectionAlive(final Connection connection, final long timeoutMs);
/**
* Attempt to abort() active connections on Java7+, or close() them on Java6.
*
* @throws InterruptedException
*/
protected abstract void abortActiveConnections() throws InterruptedException;
/**
* Create the JVM version-specific ConcurrentBag instance used by the pool.
*
* @param listener the IBagStateListener instance
* @return a ConcurrentBag instance
*/
protected abstract ConcurrentBag<PoolBagEntry> createConcurrentBag(IBagStateListener listener);
/**
* Create the JVM version-specific Housekeeping runnable instance used by the pool.
* @return the HouseKeeper instance
*/
protected abstract Runnable getHouseKeeper();
// ***********************************************************************
// Private methods
// ***********************************************************************
/**
* Fill the pool up to the minimum size.
*/
private void fillPool()
{
if (configuration.getMinimumIdle() > 0) {
if (configuration.isInitializationFailFast() && !addConnection()) {
throw new RuntimeException("Fail-fast during pool initialization", lastConnectionFailure.getAndSet(null));
}
addBagItem();
}
}
/**
* Construct the user's connection customizer, if specified.
*
* @return an IConnectionCustomizer instance
*/
private IConnectionCustomizer initializeCustomizer()
{
if (configuration.getConnectionCustomizerClassName() != null) {
return createInstance(configuration.getConnectionCustomizerClassName(), IConnectionCustomizer.class);
}
return configuration.getConnectionCustomizer();
}
public final void logPoolState(String... prefix)
{
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("{}pool stats {} (total={}, inUse={}, avail={}, waiting={})",
(prefix.length > 0 ? prefix[0] : ""), configuration.getPoolName(),
getTotalConnections(), getActiveConnections(), getIdleConnections(), getThreadsAwaitingConnection());
}
}
}
|
apache-2.0
|
kubernetes/ingress-gce
|
pkg/svcneg/client/clientset/versioned/typed/svcneg/v1beta1/fake/fake_svcneg_client.go
|
1253
|
/*
Copyright 2020 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by client-gen. DO NOT EDIT.
package fake
import (
rest "k8s.io/client-go/rest"
testing "k8s.io/client-go/testing"
v1beta1 "k8s.io/ingress-gce/pkg/svcneg/client/clientset/versioned/typed/svcneg/v1beta1"
)
type FakeNetworkingV1beta1 struct {
*testing.Fake
}
func (c *FakeNetworkingV1beta1) ServiceNetworkEndpointGroups(namespace string) v1beta1.ServiceNetworkEndpointGroupInterface {
return &FakeServiceNetworkEndpointGroups{c, namespace}
}
// RESTClient returns a RESTClient that is used to communicate
// with API server by this client implementation.
func (c *FakeNetworkingV1beta1) RESTClient() rest.Interface {
var ret *rest.RESTClient
return ret
}
|
apache-2.0
|
yeuser/java-libs
|
abstract-service-library/dist/abstract-service-library-2.2.0-gamma/api/org/nise/ux/asl/face/class-use/Worker.html
|
6344
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_25) on Sun Dec 15 17:36:27 IRST 2013 -->
<title>Uses of Interface org.nise.ux.asl.face.Worker (Abstract Service Library)</title>
<meta name="date" content="2013-12-15">
<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Interface org.nise.ux.asl.face.Worker (Abstract Service Library)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../org/nise/ux/asl/face/Worker.html" title="interface in org.nise.ux.asl.face">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../index.html?org/nise/ux/asl/face/class-use/Worker.html" target="_top">Frames</a></li>
<li><a href="Worker.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Interface org.nise.ux.asl.face.Worker" class="title">Uses of Interface<br>org.nise.ux.asl.face.Worker</h2>
</div>
<div class="classUseContainer">
<ul class="blockList">
<li class="blockList">
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../../org/nise/ux/asl/face/Worker.html" title="interface in org.nise.ux.asl.face">Worker</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#org.nise.ux.asl.face">org.nise.ux.asl.face</a></td>
<td class="colLast"> </td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<ul class="blockList">
<li class="blockList"><a name="org.nise.ux.asl.face">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../../org/nise/ux/asl/face/Worker.html" title="interface in org.nise.ux.asl.face">Worker</a> in <a href="../../../../../../org/nise/ux/asl/face/package-summary.html">org.nise.ux.asl.face</a></h3>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../org/nise/ux/asl/face/package-summary.html">org.nise.ux.asl.face</a> that return <a href="../../../../../../org/nise/ux/asl/face/Worker.html" title="interface in org.nise.ux.asl.face">Worker</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../org/nise/ux/asl/face/Worker.html" title="interface in org.nise.ux.asl.face">Worker</a></code></td>
<td class="colLast"><span class="strong">WorkerFactory.</span><code><strong><a href="../../../../../../org/nise/ux/asl/face/WorkerFactory.html#getWorker()">getWorker</a></strong>()</code>
<div class="block">Create a new instance of <a href="../../../../../../org/nise/ux/asl/face/Worker.html" title="interface in org.nise.ux.asl.face"><code>Worker</code></a></div>
</td>
</tr>
</tbody>
</table>
</li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../org/nise/ux/asl/face/Worker.html" title="interface in org.nise.ux.asl.face">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../index.html?org/nise/ux/asl/face/class-use/Worker.html" target="_top">Frames</a></li>
<li><a href="Worker.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>
<i>This library is a work of NISE organization User-eXperience team.</i>
</small></p>
</body>
</html>
|
apache-2.0
|
googleapis/java-domains
|
proto-google-cloud-domains-v1beta1/src/main/java/com/google/cloud/domains/v1beta1/RetrieveRegisterParametersRequest.java
|
27978
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/domains/v1beta1/domains.proto
package com.google.cloud.domains.v1beta1;
/**
*
*
* <pre>
* Request for the `RetrieveRegisterParameters` method.
* </pre>
*
* Protobuf type {@code google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest}
*/
public final class RetrieveRegisterParametersRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest)
RetrieveRegisterParametersRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use RetrieveRegisterParametersRequest.newBuilder() to construct.
private RetrieveRegisterParametersRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RetrieveRegisterParametersRequest() {
domainName_ = "";
location_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RetrieveRegisterParametersRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private RetrieveRegisterParametersRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
domainName_ = s;
break;
}
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
location_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.domains.v1beta1.DomainsProto
.internal_static_google_cloud_domains_v1beta1_RetrieveRegisterParametersRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.domains.v1beta1.DomainsProto
.internal_static_google_cloud_domains_v1beta1_RetrieveRegisterParametersRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest.class,
com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest.Builder.class);
}
public static final int DOMAIN_NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object domainName_;
/**
*
*
* <pre>
* Required. The domain name. Unicode domain names must be expressed in Punycode format.
* </pre>
*
* <code>string domain_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The domainName.
*/
@java.lang.Override
public java.lang.String getDomainName() {
java.lang.Object ref = domainName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
domainName_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The domain name. Unicode domain names must be expressed in Punycode format.
* </pre>
*
* <code>string domain_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for domainName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDomainNameBytes() {
java.lang.Object ref = domainName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
domainName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LOCATION_FIELD_NUMBER = 2;
private volatile java.lang.Object location_;
/**
*
*
* <pre>
* Required. The location. Must be in the format `projects/*/locations/*`.
* </pre>
*
* <code>
* string location = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The location.
*/
@java.lang.Override
public java.lang.String getLocation() {
java.lang.Object ref = location_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
location_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The location. Must be in the format `projects/*/locations/*`.
* </pre>
*
* <code>
* string location = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for location.
*/
@java.lang.Override
public com.google.protobuf.ByteString getLocationBytes() {
java.lang.Object ref = location_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
location_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(domainName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, domainName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(location_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, location_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(domainName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, domainName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(location_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, location_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest)) {
return super.equals(obj);
}
com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest other =
(com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest) obj;
if (!getDomainName().equals(other.getDomainName())) return false;
if (!getLocation().equals(other.getLocation())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + DOMAIN_NAME_FIELD_NUMBER;
hash = (53 * hash) + getDomainName().hashCode();
hash = (37 * hash) + LOCATION_FIELD_NUMBER;
hash = (53 * hash) + getLocation().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for the `RetrieveRegisterParameters` method.
* </pre>
*
* Protobuf type {@code google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest)
com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.domains.v1beta1.DomainsProto
.internal_static_google_cloud_domains_v1beta1_RetrieveRegisterParametersRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.domains.v1beta1.DomainsProto
.internal_static_google_cloud_domains_v1beta1_RetrieveRegisterParametersRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest.class,
com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest.Builder.class);
}
// Construct using
// com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
domainName_ = "";
location_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.domains.v1beta1.DomainsProto
.internal_static_google_cloud_domains_v1beta1_RetrieveRegisterParametersRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest
getDefaultInstanceForType() {
return com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest build() {
com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest buildPartial() {
com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest result =
new com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest(this);
result.domainName_ = domainName_;
result.location_ = location_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest) {
return mergeFrom(
(com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest other) {
if (other
== com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest
.getDefaultInstance()) return this;
if (!other.getDomainName().isEmpty()) {
domainName_ = other.domainName_;
onChanged();
}
if (!other.getLocation().isEmpty()) {
location_ = other.location_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest)
e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object domainName_ = "";
/**
*
*
* <pre>
* Required. The domain name. Unicode domain names must be expressed in Punycode format.
* </pre>
*
* <code>string domain_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The domainName.
*/
public java.lang.String getDomainName() {
java.lang.Object ref = domainName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
domainName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The domain name. Unicode domain names must be expressed in Punycode format.
* </pre>
*
* <code>string domain_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for domainName.
*/
public com.google.protobuf.ByteString getDomainNameBytes() {
java.lang.Object ref = domainName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
domainName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The domain name. Unicode domain names must be expressed in Punycode format.
* </pre>
*
* <code>string domain_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The domainName to set.
* @return This builder for chaining.
*/
public Builder setDomainName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
domainName_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The domain name. Unicode domain names must be expressed in Punycode format.
* </pre>
*
* <code>string domain_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearDomainName() {
domainName_ = getDefaultInstance().getDomainName();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The domain name. Unicode domain names must be expressed in Punycode format.
* </pre>
*
* <code>string domain_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for domainName to set.
* @return This builder for chaining.
*/
public Builder setDomainNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
domainName_ = value;
onChanged();
return this;
}
private java.lang.Object location_ = "";
/**
*
*
* <pre>
* Required. The location. Must be in the format `projects/*/locations/*`.
* </pre>
*
* <code>
* string location = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The location.
*/
public java.lang.String getLocation() {
java.lang.Object ref = location_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
location_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The location. Must be in the format `projects/*/locations/*`.
* </pre>
*
* <code>
* string location = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for location.
*/
public com.google.protobuf.ByteString getLocationBytes() {
java.lang.Object ref = location_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
location_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The location. Must be in the format `projects/*/locations/*`.
* </pre>
*
* <code>
* string location = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The location to set.
* @return This builder for chaining.
*/
public Builder setLocation(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
location_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The location. Must be in the format `projects/*/locations/*`.
* </pre>
*
* <code>
* string location = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearLocation() {
location_ = getDefaultInstance().getLocation();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The location. Must be in the format `projects/*/locations/*`.
* </pre>
*
* <code>
* string location = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for location to set.
* @return This builder for chaining.
*/
public Builder setLocationBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
location_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest)
private static final com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest();
}
public static com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RetrieveRegisterParametersRequest> PARSER =
new com.google.protobuf.AbstractParser<RetrieveRegisterParametersRequest>() {
@java.lang.Override
public RetrieveRegisterParametersRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RetrieveRegisterParametersRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<RetrieveRegisterParametersRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RetrieveRegisterParametersRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.domains.v1beta1.RetrieveRegisterParametersRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache-2.0
|
GerritCodeReview/homepage
|
pages/design-docs/delete-groups/conclusion.md
|
1163
|
---
title: "Design Doc - Deletion of groups - Conclusion"
permalink: design-docs/delete-groups-conclusion.html
hide_sidebar: true
hide_navtoggle: true
toc: false
---
# Conclusion
Deletion of groups is a generally desired feature we'd like to support in
Gerrit. However, there seem to be two major behaviors/modes depending on host
admin/owner preferences:
1) Remove a group without keeping a Git ref to it to reduce the number of Git
refs on the Gerrit server.
2) Remove a group but keep its complete audit and add an audit entry for the
group deletion.
In core Gerrit, we can't implement 1) without offering 2) by default as existing
host admins/owners might rely on core Gerrit's audit for groups to keep
track of all modifications. Enabling traceless group deletion without explicit
consent from these persons would likely result in negative surprises.
Implementing 2) is more effort, though, and even more so if want to offer 1)
and 2) both inside of core Gerrit. The proposer of the design wishes to support
just 1). Hence, we recommend to implement 1) in a plugin. Before that plugin
can become a core plugin in the future, it needs to also support 2).
|
apache-2.0
|
HongjianLi/cudart
|
thrust/Makefile
|
113
|
CC=nvcc
thrust: thrust.o
$(CC) -o $@ $^
thrust.o: thrust.cu
$(CC) -o $@ $< -c
clean:
rm -f thrust thrust.o
|
apache-2.0
|
hortonworks/cloudbreak
|
datalake/src/main/java/com/sequenceiq/datalake/flow/diagnostics/event/SdxCmDiagnosticsFailedEvent.java
|
934
|
package com.sequenceiq.datalake.flow.diagnostics.event;
import static com.sequenceiq.datalake.flow.diagnostics.SdxCmDiagnosticsEvent.SDX_CM_DIAGNOSTICS_COLLECTION_FAILED_EVENT;
import java.util.Map;
import com.sequenceiq.datalake.flow.SdxFailedEvent;
public class SdxCmDiagnosticsFailedEvent extends SdxFailedEvent {
private final Map<String, Object> properties;
public SdxCmDiagnosticsFailedEvent(Long sdxId, String userId, Map<String, Object> properties, Exception exception) {
super(sdxId, userId, exception);
this.properties = properties;
}
public static SdxDiagnosticsFailedEvent from(BaseSdxCmDiagnosticsEvent event, Exception exception) {
return new SdxDiagnosticsFailedEvent(event.getResourceId(), event.getUserId(), event.getProperties(), exception);
}
@Override
public String selector() {
return SDX_CM_DIAGNOSTICS_COLLECTION_FAILED_EVENT.event();
}
}
|
apache-2.0
|
vintsie/iblog
|
src/main/java/com/vint/iblog/web/servlet/ConfigurationServlet.java
|
3565
|
package com.vint.iblog.web.servlet;
import com.vint.iblog.datastore.define.SequenceManagerDAO;
import com.vint.iblog.datastore.define.StaticDataDAO;
import org.apache.commons.lang3.StringUtils;
import org.vintsie.jcobweb.proxy.ServiceFactory;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.PrintWriter;
/**
*
* Created by Vin on 14-2-17.
*/
public class ConfigurationServlet extends HttpServlet {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
doPost(req, resp);
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
String cfgType = req.getParameter("cfgType");
PrintWriter pw = resp.getWriter();
// 配置静态数据
if (StringUtils.equals(cfgType, "sd")) {
String operation = req.getParameter("operation");
// 新增配置数据
if (StringUtils.equals("add", operation)) {
String dataType = req.getParameter("dataType");
String dataValue = req.getParameter("dataValue");
String sort = req.getParameter("sort");
if (StringUtils.isEmpty(dataType) || StringUtils.isEmpty(dataValue)) {
pw.write("When creating new static data, neither dataType or dataValue can not be null.");
} else {
try {
StaticDataDAO sdd = ServiceFactory.getService(StaticDataDAO.class);
sdd.newStaticData(dataType, dataValue, Integer.parseInt(sort));
pw.write("success");
} catch (Exception e) {
pw.write(e.getMessage());
e.printStackTrace();
}
}
}
} else if (StringUtils.equals(cfgType, "seq")) {
/**
* 序列的操作分为两种,read和write.如果传入的operation是read,则只查询
* 对应Type的序列值。当传入的操作时write时,则会清理对应Type的序列数据,
* 并使用新的16进制字符串代替。
*
* Url实例:
* http://localhost:8080/cfg?cfgType=seq&operation=read&type=BLOG&hex=101c0701
* http://localhost:8080/cfg?cfgType=seq&operation=wirte&type=BLOG&hex=101c0701
*/
String type = req.getParameter("type");
String hex = req.getParameter("hex");
String operation = req.getParameter("operation");
try {
SequenceManagerDAO sequenceManagerDAO = ServiceFactory.getService(SequenceManagerDAO.class);
if (StringUtils.equals("read", operation)) {
pw.write(type + "'s current Sequence is " + sequenceManagerDAO.getCurrentSeq(type));
} else if (StringUtils.equals("write", operation)) {
sequenceManagerDAO.createSequence(type, hex);
pw.write("Success!!");
}
} catch (Exception e) {
pw.write(e.getMessage());
e.printStackTrace();
}
} else {
pw.write("No mapping operation found according to " + cfgType);
}
pw.flush();
pw.close();
}
}
|
apache-2.0
|
datacratic/aws-sdk-go
|
internal/protocol/rest/build.go
|
4968
|
package rest
import (
"bytes"
"encoding/base64"
"fmt"
"io"
"net/url"
"path"
"reflect"
"strconv"
"strings"
"time"
"github.com/datacratic/aws-sdk-go/aws"
)
// RFC822 returns an RFC822 formatted timestamp for AWS protocols
const RFC822 = "Mon, 2 Jan 2006 15:04:05 GMT"
func Build(r *aws.Request) {
if r.ParamsFilled() {
v := reflect.ValueOf(r.Params).Elem()
buildLocationElements(r, v)
buildBody(r, v)
}
}
func buildLocationElements(r *aws.Request, v reflect.Value) {
query := r.HTTPRequest.URL.Query()
for i := 0; i < v.NumField(); i++ {
m := v.Field(i)
if n := v.Type().Field(i).Name; n[0:1] == strings.ToLower(n[0:1]) {
continue
}
if m.IsValid() {
field := v.Type().Field(i)
name := field.Tag.Get("locationName")
if name == "" {
name = field.Name
}
if m.Kind() == reflect.Ptr {
m = m.Elem()
}
if !m.IsValid() {
continue
}
switch field.Tag.Get("location") {
case "headers": // header maps
buildHeaderMap(r, m, field.Tag.Get("locationName"))
case "header":
buildHeader(r, m, name)
case "uri":
buildURI(r, m, name)
case "querystring":
buildQueryString(r, m, name, query)
}
}
if r.Error != nil {
return
}
}
r.HTTPRequest.URL.RawQuery = query.Encode()
updatePath(r.HTTPRequest.URL, r.HTTPRequest.URL.Path)
}
func buildBody(r *aws.Request, v reflect.Value) {
if field, ok := v.Type().FieldByName("SDKShapeTraits"); ok {
if payloadName := field.Tag.Get("payload"); payloadName != "" {
pfield, _ := v.Type().FieldByName(payloadName)
if ptag := pfield.Tag.Get("type"); ptag != "" && ptag != "structure" {
payload := reflect.Indirect(v.FieldByName(payloadName))
if payload.IsValid() && payload.Interface() != nil {
switch reader := payload.Interface().(type) {
case io.ReadSeeker:
r.SetReaderBody(reader)
case []byte:
r.SetBufferBody(reader)
case string:
r.SetBufferBody([]byte(reader))
default:
r.Error = fmt.Errorf("unknown payload type %s", payload.Type())
}
}
}
}
}
}
func buildHeader(r *aws.Request, v reflect.Value, name string) {
str, err := convertType(v)
if err != nil {
r.Error = err
} else if str != nil {
r.HTTPRequest.Header.Add(name, *str)
}
}
func buildHeaderMap(r *aws.Request, v reflect.Value, prefix string) {
for _, key := range v.MapKeys() {
str, err := convertType(v.MapIndex(key))
if err != nil {
r.Error = err
} else if str != nil {
r.HTTPRequest.Header.Add(prefix+key.String(), *str)
}
}
}
func buildURI(r *aws.Request, v reflect.Value, name string) {
value, err := convertType(v)
if err != nil {
r.Error = err
} else if value != nil {
uri := r.HTTPRequest.URL.Path
uri = strings.Replace(uri, "{"+name+"}", escapePath(*value, true), -1)
uri = strings.Replace(uri, "{"+name+"+}", escapePath(*value, false), -1)
r.HTTPRequest.URL.Path = uri
}
}
func buildQueryString(r *aws.Request, v reflect.Value, name string, query url.Values) {
str, err := convertType(v)
if err != nil {
r.Error = err
} else if str != nil {
query.Set(name, *str)
}
}
func updatePath(url *url.URL, urlPath string) {
scheme, query := url.Scheme, url.RawQuery
// clean up path
urlPath = path.Clean(urlPath)
// get formatted URL minus scheme so we can build this into Opaque
url.Scheme, url.Path, url.RawQuery = "", "", ""
s := url.String()
url.Scheme = scheme
url.RawQuery = query
// build opaque URI
url.Opaque = s + urlPath
}
// Whether the byte value can be sent without escaping in AWS URLs
var noEscape [256]bool
var noEscapeInitialized = false
// initialise noEscape
func initNoEscape() {
for i := range noEscape {
// Amazon expects every character except these escaped
noEscape[i] = (i >= 'A' && i <= 'Z') ||
(i >= 'a' && i <= 'z') ||
(i >= '0' && i <= '9') ||
i == '-' ||
i == '.' ||
i == '_' ||
i == '~'
}
}
// escapePath escapes part of a URL path in Amazon style
func escapePath(path string, encodeSep bool) string {
if !noEscapeInitialized {
initNoEscape()
noEscapeInitialized = true
}
var buf bytes.Buffer
for i := 0; i < len(path); i++ {
c := path[i]
if noEscape[c] || (c == '/' && !encodeSep) {
buf.WriteByte(c)
} else {
buf.WriteByte('%')
buf.WriteString(strings.ToUpper(strconv.FormatUint(uint64(c), 16)))
}
}
return buf.String()
}
func convertType(v reflect.Value) (*string, error) {
v = reflect.Indirect(v)
if !v.IsValid() {
return nil, nil
}
var str string
switch value := v.Interface().(type) {
case string:
str = value
case []byte:
str = base64.StdEncoding.EncodeToString(value)
case bool:
str = strconv.FormatBool(value)
case int64:
str = strconv.FormatInt(value, 10)
case float64:
str = strconv.FormatFloat(value, 'f', -1, 64)
case time.Time:
str = value.UTC().Format(RFC822)
default:
err := fmt.Errorf("Unsupported value for param %v (%s)", v.Interface(), v.Type())
return nil, err
}
return &str, nil
}
|
apache-2.0
|
yunmel/rps
|
src/main/java/com/yunmel/db/utils/PropertyHolderUtil.java
|
2765
|
/**
* Copyright 2010-2016 the original author or authors.
*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.yunmel.db.utils;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import com.yunmel.db.common.DatasourceConfig;
public class PropertyHolderUtil {
private final static String CONFIG_FILE_PATH = "app.properties";
private static Properties props = null;
static {
props = new Properties();
try {
InputStream input = PropertyHolderUtil.class.getResourceAsStream("/" + CONFIG_FILE_PATH);
if (input == null) {
ClassLoader classLoader = PropertyHolderUtil.class.getClassLoader();
input = PropertyHolderUtil.class.getResourceAsStream("" + classLoader.getResource(CONFIG_FILE_PATH));
}
if (input == null) {
String path = System.getProperty("user.dir") + "/" + CONFIG_FILE_PATH;
input = new FileInputStream(path);
}
props.load(input);
} catch (Exception e) {
e.printStackTrace();
}
}
private PropertyHolderUtil() {
super();
}
public static String getProperty(String propName) {
return props.getProperty(propName);
}
public static DatasourceConfig getDsConfig() {
DatasourceConfig config = new DatasourceConfig();
String driverClass = props.getProperty("jdbc.driverClass");
String jdbcUrl = props.getProperty("jdbc.url");
String username = props.getProperty("jdbc.user");
String password = props.getProperty("jdbc.password");
config.setDriverClass(driverClass);
config.setJdbcUrl(jdbcUrl);
config.setUsername(username);
config.setPassword(password);
String poolType = props.getProperty("jdbc.pool.type");
if (StringUtils.isNotBlank(poolType)) {
Map<String, String> poolPerperties = new HashMap<>();
poolPerperties.put(DatasourceConfig._POOL_TYPE, poolType);
config.setPoolPerperties(poolPerperties);
}
return config;
}
}
|
apache-2.0
|
tobster/openengsb
|
core/workflow/domains/src/main/java/org/openengsb/drools/NotificationDomain.java
|
784
|
/**
Copyright 2010 OpenEngSB Division, Vienna University of Technology
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE\-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openengsb.drools;
import org.openengsb.drools.model.Notification;
public interface NotificationDomain extends Domain {
void notify(Notification notification);
}
|
apache-2.0
|
quarkusio/quarkus
|
extensions/resteasy-reactive/quarkus-resteasy-reactive-jackson/deployment/src/test/java/io/quarkus/resteasy/reactive/jackson/deployment/test/VertxJsonTest.java
|
1773
|
package io.quarkus.resteasy.reactive.jackson.deployment.test;
import java.util.function.Supplier;
import org.hamcrest.Matchers;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import io.quarkus.test.QuarkusUnitTest;
import io.restassured.RestAssured;
public class VertxJsonTest {
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest()
.setArchiveProducer(new Supplier<JavaArchive>() {
@Override
public JavaArchive get() {
return ShrinkWrap.create(JavaArchive.class)
.addClasses(VertxJsonEndpoint.class);
}
});
@Test
public void testJsonObject() {
RestAssured.with()
.body("{\"name\": \"Bob\"}")
.contentType("application/json")
.post("/vertx/jsonObject")
.then()
.statusCode(200)
.contentType("application/json")
.body("name", Matchers.equalTo("Bob"))
.body("age", Matchers.equalTo(50))
.body("nested.foo", Matchers.equalTo("bar"))
.body("bools[0]", Matchers.equalTo(true));
}
@Test
public void testJsonArray() {
RestAssured.with()
.body("[\"first\"]")
.contentType("application/json")
.post("/vertx/jsonArray")
.then()
.statusCode(200)
.contentType("application/json")
.body("[0]", Matchers.equalTo("first"))
.body("[1]", Matchers.equalTo("last"));
}
}
|
apache-2.0
|
jcamachor/hive
|
ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java
|
26009
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.txn.compactor;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.ServerUtils;
import org.apache.hadoop.hive.common.ValidCompactorWriteIdList;
import org.apache.hadoop.hive.common.ValidTxnList;
import org.apache.hadoop.hive.common.ValidWriteIdList;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.TransactionalValidationListener;
import org.apache.hadoop.hive.metastore.api.AbortTxnRequest;
import org.apache.hadoop.hive.metastore.api.AllocateTableWriteIdsRequest;
import org.apache.hadoop.hive.metastore.api.AllocateTableWriteIdsResponse;
import org.apache.hadoop.hive.metastore.api.CommitTxnRequest;
import org.apache.hadoop.hive.metastore.api.CompactionRequest;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.GetValidWriteIdsRequest;
import org.apache.hadoop.hive.metastore.api.LockRequest;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchTxnException;
import org.apache.hadoop.hive.metastore.api.OpenTxnRequest;
import org.apache.hadoop.hive.metastore.api.OpenTxnsResponse;
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.api.TxnAbortedException;
import org.apache.hadoop.hive.metastore.api.TxnType;
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.metrics.AcidMetricService;
import org.apache.hadoop.hive.metastore.txn.CompactionInfo;
import org.apache.hadoop.hive.metastore.txn.TxnCommonUtils;
import org.apache.hadoop.hive.metastore.utils.TestTxnDbUtil;
import org.apache.hadoop.hive.metastore.txn.TxnStore;
import org.apache.hadoop.hive.metastore.txn.TxnUtils;
import org.apache.hadoop.hive.ql.io.AcidInputFormat;
import org.apache.hadoop.hive.ql.io.AcidOutputFormat;
import org.apache.hadoop.hive.ql.io.AcidUtils;
import org.apache.hadoop.hive.ql.io.RecordIdentifier;
import org.apache.hadoop.hive.ql.io.RecordUpdater;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.RecordWriter;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.util.Progressable;
import org.apache.thrift.TException;
import org.junit.Before;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.EOFException;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.Stack;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.hadoop.hive.ql.txn.compactor.CompactorTestUtilities.CompactorThreadType;
/**
* Super class for all of the compactor test modules.
*/
public abstract class CompactorTest {
static final private String CLASS_NAME = CompactorTest.class.getName();
static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
public static final String WORKER_VERSION = "4.0.0";
protected TxnStore txnHandler;
protected IMetaStoreClient ms;
protected HiveConf conf;
private final AtomicBoolean stop = new AtomicBoolean();
protected File tmpdir;
@Before
public void setup() throws Exception {
conf = new HiveConf();
TestTxnDbUtil.setConfValues(conf);
TestTxnDbUtil.cleanDb(conf);
TestTxnDbUtil.prepDb(conf);
ms = new HiveMetaStoreClient(conf);
txnHandler = TxnUtils.getTxnStore(conf);
tmpdir = new File(Files.createTempDirectory("compactor_test_table_").toString());
}
protected void compactorTestCleanup() throws IOException {
FileUtils.deleteDirectory(tmpdir);
}
protected void startInitiator() throws Exception {
startThread(CompactorThreadType.INITIATOR, true);
}
protected void startWorker() throws Exception {
startThread(CompactorThreadType.WORKER, true);
}
protected void startCleaner() throws Exception {
startThread(CompactorThreadType.CLEANER, true);
}
protected void runAcidMetricService() throws Exception {
TestTxnDbUtil.setConfValues(conf);
AcidMetricService t = new AcidMetricService();
t.setConf(conf);
t.run();
}
protected Table newTable(String dbName, String tableName, boolean partitioned) throws TException {
return newTable(dbName, tableName, partitioned, new HashMap<String, String>(), null, false);
}
protected Table newTable(String dbName, String tableName, boolean partitioned,
Map<String, String> parameters) throws TException {
return newTable(dbName, tableName, partitioned, parameters, null, false);
}
protected Table newTable(String dbName, String tableName, boolean partitioned,
Map<String, String> parameters, List<Order> sortCols,
boolean isTemporary)
throws TException {
Table table = new Table();
table.setTableType(TableType.MANAGED_TABLE.name());
table.setTableName(tableName);
table.setDbName(dbName);
table.setOwner("me");
table.setSd(newStorageDescriptor(getLocation(tableName, null), sortCols));
List<FieldSchema> partKeys = new ArrayList<FieldSchema>(1);
if (partitioned) {
partKeys.add(new FieldSchema("ds", "string", "no comment"));
table.setPartitionKeys(partKeys);
}
// Set the table as transactional for compaction to work
if (parameters == null) {
parameters = new HashMap<>();
}
parameters.put(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL, "true");
if (sortCols != null) {
// Sort columns are not allowed for full ACID table. So, change it to insert-only table
parameters.put(hive_metastoreConstants.TABLE_TRANSACTIONAL_PROPERTIES,
TransactionalValidationListener.INSERTONLY_TRANSACTIONAL_PROPERTY);
}
table.setParameters(parameters);
if (isTemporary) table.setTemporary(true);
// drop the table first, in case some previous test created it
ms.dropTable(dbName, tableName);
ms.createTable(table);
return table;
}
protected Partition newPartition(Table t, String value) throws Exception {
return newPartition(t, value, null);
}
protected Partition newPartition(Table t, String value, List<Order> sortCols) throws Exception {
Partition part = new Partition();
part.addToValues(value);
part.setDbName(t.getDbName());
part.setTableName(t.getTableName());
part.setSd(newStorageDescriptor(getLocation(t.getTableName(), value), sortCols));
part.setParameters(new HashMap<String, String>());
ms.add_partition(part);
return part;
}
protected long openTxn() throws MetaException {
return openTxn(TxnType.DEFAULT);
}
protected long openTxn(TxnType txnType) throws MetaException {
OpenTxnRequest rqst = new OpenTxnRequest(1, System.getProperty("user.name"), ServerUtils.hostname());
rqst.setTxn_type(txnType);
if (txnType == TxnType.REPL_CREATED) {
rqst.setReplPolicy("default.*");
rqst.setReplSrcTxnIds(Arrays.asList(1L));
}
List<Long> txns = txnHandler.openTxns(rqst).getTxn_ids();
return txns.get(0);
}
protected long allocateWriteId(String dbName, String tblName, long txnid)
throws MetaException, TxnAbortedException, NoSuchTxnException {
AllocateTableWriteIdsRequest awiRqst
= new AllocateTableWriteIdsRequest(dbName, tblName);
awiRqst.setTxnIds(Collections.singletonList(txnid));
AllocateTableWriteIdsResponse awiResp = txnHandler.allocateTableWriteIds(awiRqst);
return awiResp.getTxnToWriteIds().get(0).getWriteId();
}
protected void addDeltaFile(Table t, Partition p, long minTxn, long maxTxn, int numRecords)
throws Exception {
addFile(t, p, minTxn, maxTxn, numRecords, FileType.DELTA, 2, true);
}
protected void addLengthFile(Table t, Partition p, long minTxn, long maxTxn, int numRecords)
throws Exception {
addFile(t, p, minTxn, maxTxn, numRecords, FileType.LENGTH_FILE, 2, true);
}
protected void addBaseFile(Table t, Partition p, long maxTxn, int numRecords) throws Exception {
addFile(t, p, 0, maxTxn, numRecords, FileType.BASE, 2, true);
}
protected void addBaseFile(Table t, Partition p, long maxTxn, int numRecords, long visibilityId) throws Exception {
addFile(t, p, 0, maxTxn, numRecords, FileType.BASE, 2, true, visibilityId);
}
protected void addLegacyFile(Table t, Partition p, int numRecords) throws Exception {
addFile(t, p, 0, 0, numRecords, FileType.LEGACY, 2, true);
}
protected void addDeltaFile(Table t, Partition p, long minTxn, long maxTxn, int numRecords,
int numBuckets, boolean allBucketsPresent) throws Exception {
addFile(t, p, minTxn, maxTxn, numRecords, FileType.DELTA, numBuckets, allBucketsPresent);
}
protected void addBaseFile(Table t, Partition p, long maxTxn, int numRecords, int numBuckets,
boolean allBucketsPresent) throws Exception {
addFile(t, p, 0, maxTxn, numRecords, FileType.BASE, numBuckets, allBucketsPresent);
}
protected List<Path> getDirectories(HiveConf conf, Table t, Partition p) throws Exception {
String partValue = (p == null) ? null : p.getValues().get(0);
String location = getLocation(t.getTableName(), partValue);
Path dir = new Path(location);
FileSystem fs = FileSystem.get(conf);
FileStatus[] stats = fs.listStatus(dir);
List<Path> paths = new ArrayList<Path>(stats.length);
for (int i = 0; i < stats.length; i++) paths.add(stats[i].getPath());
return paths;
}
protected void burnThroughTransactions(String dbName, String tblName, int num)
throws MetaException, NoSuchTxnException, TxnAbortedException {
burnThroughTransactions(dbName, tblName, num, null, null);
}
protected void burnThroughTransactions(String dbName, String tblName, int num, Set<Long> open, Set<Long> aborted)
throws NoSuchTxnException, TxnAbortedException, MetaException {
burnThroughTransactions(dbName, tblName, num, open, aborted, null);
}
protected void burnThroughTransactions(String dbName, String tblName, int num, Set<Long> open, Set<Long> aborted, LockRequest lockReq)
throws MetaException, NoSuchTxnException, TxnAbortedException {
OpenTxnsResponse rsp = txnHandler.openTxns(new OpenTxnRequest(num, "me", "localhost"));
AllocateTableWriteIdsRequest awiRqst = new AllocateTableWriteIdsRequest(dbName, tblName);
awiRqst.setTxnIds(rsp.getTxn_ids());
AllocateTableWriteIdsResponse awiResp = txnHandler.allocateTableWriteIds(awiRqst);
int i = 0;
for (long tid : rsp.getTxn_ids()) {
assert(awiResp.getTxnToWriteIds().get(i++).getTxnId() == tid);
if(lockReq != null) {
lockReq.setTxnid(tid);
txnHandler.lock(lockReq);
}
if (aborted != null && aborted.contains(tid)) {
txnHandler.abortTxn(new AbortTxnRequest(tid));
} else if (open == null || (open != null && !open.contains(tid))) {
txnHandler.commitTxn(new CommitTxnRequest(tid));
}
}
}
protected void stopThread() {
stop.set(true);
}
private StorageDescriptor newStorageDescriptor(String location, List<Order> sortCols) {
StorageDescriptor sd = new StorageDescriptor();
List<FieldSchema> cols = new ArrayList<FieldSchema>(2);
cols.add(new FieldSchema("a", "varchar(25)", "still no comment"));
cols.add(new FieldSchema("b", "int", "comment"));
sd.setCols(cols);
sd.setLocation(location);
sd.setInputFormat(MockInputFormat.class.getName());
sd.setOutputFormat(MockOutputFormat.class.getName());
sd.setNumBuckets(1);
SerDeInfo serde = new SerDeInfo();
serde.setSerializationLib(LazySimpleSerDe.class.getName());
sd.setSerdeInfo(serde);
List<String> bucketCols = new ArrayList<String>(1);
bucketCols.add("a");
sd.setBucketCols(bucketCols);
if (sortCols != null) {
sd.setSortCols(sortCols);
}
return sd;
}
// I can't do this with @Before because I want to be able to control when the thread starts
private void startThread(CompactorThreadType type, boolean stopAfterOne) throws Exception {
TestTxnDbUtil.setConfValues(conf);
CompactorThread t;
switch (type) {
case INITIATOR: t = new Initiator(); break;
case WORKER: t = new Worker(); break;
case CLEANER: t = new Cleaner(); break;
default: throw new RuntimeException("Huh? Unknown thread type.");
}
t.setThreadId((int) t.getId());
t.setConf(conf);
stop.set(stopAfterOne);
t.init(stop);
if (stopAfterOne) t.run();
else t.start();
}
private String getLocation(String tableName, String partValue) {
String location = tmpdir.getAbsolutePath() +
System.getProperty("file.separator") + tableName;
if (partValue != null) {
location += System.getProperty("file.separator") + "ds=" + partValue;
}
return location;
}
private enum FileType {BASE, DELTA, LEGACY, LENGTH_FILE}
private void addFile(Table t, Partition p, long minTxn, long maxTxn, int numRecords, FileType type, int numBuckets,
boolean allBucketsPresent) throws Exception {
addFile(t, p, minTxn, maxTxn, numRecords, type, numBuckets, allBucketsPresent, 0);
}
private void addFile(Table t, Partition p, long minTxn, long maxTxn, int numRecords, FileType type, int numBuckets,
boolean allBucketsPresent, long visibilityId) throws Exception {
String partValue = (p == null) ? null : p.getValues().get(0);
Path location = new Path(getLocation(t.getTableName(), partValue));
String filename = null;
switch (type) {
case BASE: filename = AcidUtils.BASE_PREFIX + maxTxn + (visibilityId > 0 ? AcidUtils.VISIBILITY_PREFIX + visibilityId : ""); break;
case LENGTH_FILE: // Fall through to delta
case DELTA: filename = makeDeltaDirName(minTxn, maxTxn); break;
case LEGACY: break; // handled below
}
FileSystem fs = FileSystem.get(conf);
for (int bucket = 0; bucket < numBuckets; bucket++) {
if (bucket == 0 && !allBucketsPresent) continue; // skip one
Path partFile = null;
if (type == FileType.LEGACY) {
partFile = new Path(location, String.format(AcidUtils.LEGACY_FILE_BUCKET_DIGITS, bucket) + "_0");
} else {
Path dir = new Path(location, filename);
fs.mkdirs(dir);
partFile = AcidUtils.createBucketFile(dir, bucket);
if (type == FileType.LENGTH_FILE) {
partFile = new Path(partFile.toString() + AcidUtils.DELTA_SIDE_FILE_SUFFIX);
}
}
FSDataOutputStream out = fs.create(partFile);
if (type == FileType.LENGTH_FILE) {
out.writeInt(numRecords);//hmm - length files should store length in bytes...
} else {
for (int i = 0; i < numRecords; i++) {
RecordIdentifier ri = new RecordIdentifier(maxTxn - 1, bucket, i);
ri.write(out);
out.writeBytes("mary had a little lamb its fleece was white as snow\n");
}
}
out.close();
}
}
static class MockInputFormat implements AcidInputFormat<WritableComparable,Text> {
@Override
public AcidInputFormat.RowReader<Text> getReader(InputSplit split,
Options options) throws
IOException {
return null;
}
@Override
public RawReader<Text> getRawReader(Configuration conf, boolean collapseEvents, int bucket,
ValidWriteIdList validWriteIdList,
Path baseDirectory, Path[] deltaDirectory, Map<String, Integer> deltaToAttemptId) throws IOException {
List<Path> filesToRead = new ArrayList<Path>();
if (baseDirectory != null) {
if (baseDirectory.getName().startsWith(AcidUtils.BASE_PREFIX)) {
Path p = AcidUtils.createBucketFile(baseDirectory, bucket);
FileSystem fs = p.getFileSystem(conf);
if (fs.exists(p)) filesToRead.add(p);
} else {
filesToRead.add(new Path(baseDirectory, "000000_0"));
}
}
for (int i = 0; i < deltaDirectory.length; i++) {
Path p = AcidUtils.createBucketFile(deltaDirectory[i], bucket);
FileSystem fs = p.getFileSystem(conf);
if (fs.exists(p)) filesToRead.add(p);
}
return new MockRawReader(conf, filesToRead);
}
@Override
public InputSplit[] getSplits(JobConf entries, int i) throws IOException {
return new InputSplit[0];
}
@Override
public RecordReader<WritableComparable, Text> getRecordReader(InputSplit inputSplit, JobConf entries,
Reporter reporter) throws IOException {
return null;
}
@Override
public boolean validateInput(FileSystem fs, HiveConf conf, List<FileStatus> files) throws
IOException {
return false;
}
}
static class MockRawReader implements AcidInputFormat.RawReader<Text> {
private final Stack<Path> filesToRead;
private final Configuration conf;
private FSDataInputStream is = null;
private final FileSystem fs;
private boolean lastWasDelete = true;
MockRawReader(Configuration conf, List<Path> files) throws IOException {
filesToRead = new Stack<Path>();
for (Path file : files) filesToRead.push(file);
this.conf = conf;
fs = FileSystem.get(conf);
}
@Override
public ObjectInspector getObjectInspector() {
return null;
}
/**
* This is bogus especially with split update acid tables. This causes compaction to create
* delete_delta_x_y where none existed before. Makes the data layout such as would never be
* created by 'real' code path.
*/
@Override
public boolean isDelete(Text value) {
// Alternate between returning deleted and not. This is easier than actually
// tracking operations. We test that this is getting properly called by checking that only
// half the records show up in base files after major compactions.
lastWasDelete = !lastWasDelete;
return lastWasDelete;
}
@Override
public boolean next(RecordIdentifier identifier, Text text) throws IOException {
if (is == null) {
// Open the next file
if (filesToRead.empty()) return false;
Path p = filesToRead.pop();
LOG.debug("Reading records from " + p.toString());
is = fs.open(p);
}
String line = null;
try {
identifier.readFields(is);
line = is.readLine();
} catch (EOFException e) {
}
if (line == null) {
// Set our current entry to null (since it's done) and try again.
is = null;
return next(identifier, text);
}
text.set(line);
return true;
}
@Override
public RecordIdentifier createKey() {
return new RecordIdentifier();
}
@Override
public Text createValue() {
return new Text();
}
@Override
public long getPos() throws IOException {
return 0;
}
@Override
public void close() throws IOException {
}
@Override
public float getProgress() throws IOException {
return 0;
}
}
// This class isn't used and I suspect does totally the wrong thing. It's only here so that I
// can provide some output format to the tables and partitions I create. I actually write to
// those tables directory.
static class MockOutputFormat implements AcidOutputFormat<WritableComparable, Text> {
@Override
public RecordUpdater getRecordUpdater(Path path, Options options) throws
IOException {
return null;
}
@Override
public org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter getRawRecordWriter(Path path, Options options) throws IOException {
return new MockRecordWriter(path, options);
}
@Override
public org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter getHiveRecordWriter(JobConf jc, Path finalOutPath,
Class<? extends Writable> valueClass,
boolean isCompressed, Properties tableProperties,
Progressable progress) throws IOException {
return null;
}
@Override
public RecordWriter<WritableComparable, Text> getRecordWriter(FileSystem fileSystem, JobConf entries,
String s,
Progressable progressable) throws
IOException {
return null;
}
@Override
public void checkOutputSpecs(FileSystem fileSystem, JobConf entries) throws IOException {
}
}
// This class isn't used and I suspect does totally the wrong thing. It's only here so that I
// can provide some output format to the tables and partitions I create. I actually write to
// those tables directory.
static class MockRecordWriter implements org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter {
private final FSDataOutputStream os;
MockRecordWriter(Path basedir, AcidOutputFormat.Options options) throws IOException {
FileSystem fs = FileSystem.get(options.getConfiguration());
Path p = AcidUtils.createFilename(basedir, options);
os = fs.create(p);
}
@Override
public void write(Writable w) throws IOException {
Text t = (Text)w;
os.writeBytes(t.toString());
os.writeBytes("\n");
}
@Override
public void close(boolean abort) throws IOException {
os.close();
}
}
/**
* in Hive 1.3.0 delta file names changed to delta_xxxx_yyyy_zzzz; prior to that
* the name was delta_xxxx_yyyy. We want to run compaction tests such that both formats
* are used since new (1.3) code has to be able to read old files.
*/
abstract boolean useHive130DeltaDirName();
String makeDeltaDirName(long minTxnId, long maxTxnId) {
if(minTxnId != maxTxnId) {
//covers both streaming api and post compaction style.
return makeDeltaDirNameCompacted(minTxnId, maxTxnId);
}
return useHive130DeltaDirName() ?
AcidUtils.deltaSubdir(minTxnId, maxTxnId, 0) : AcidUtils.deltaSubdir(minTxnId, maxTxnId);
}
/**
* delta dir name after compaction
*/
String makeDeltaDirNameCompacted(long minTxnId, long maxTxnId) {
return AcidUtils.deltaSubdir(minTxnId, maxTxnId);
}
String makeDeleteDeltaDirNameCompacted(long minTxnId, long maxTxnId) {
return AcidUtils.deleteDeltaSubdir(minTxnId, maxTxnId);
}
protected long compactInTxn(CompactionRequest rqst) throws Exception {
txnHandler.compact(rqst);
CompactionInfo ci = txnHandler.findNextToCompact("fred", WORKER_VERSION);
ci.runAs = System.getProperty("user.name");
long compactorTxnId = openTxn(TxnType.COMPACTION);
// Need to create a valid writeIdList to set the highestWriteId in ci
ValidTxnList validTxnList = TxnCommonUtils.createValidReadTxnList(txnHandler.getOpenTxns(), compactorTxnId);
GetValidWriteIdsRequest writeIdsRequest = new GetValidWriteIdsRequest();
writeIdsRequest.setValidTxnList(validTxnList.writeToString());
writeIdsRequest
.setFullTableNames(Collections.singletonList(TxnUtils.getFullTableName(rqst.getDbname(), rqst.getTablename())));
// with this ValidWriteIdList is capped at whatever HWM validTxnList has
ValidCompactorWriteIdList tblValidWriteIds = TxnUtils
.createValidCompactWriteIdList(txnHandler.getValidWriteIds(writeIdsRequest).getTblValidWriteIds().get(0));
ci.highestWriteId = tblValidWriteIds.getHighWatermark();
txnHandler.updateCompactorState(ci, compactorTxnId);
txnHandler.markCompacted(ci);
txnHandler.commitTxn(new CommitTxnRequest(compactorTxnId));
Thread.sleep(MetastoreConf.getTimeVar(conf, MetastoreConf.ConfVars.TXN_OPENTXN_TIMEOUT, TimeUnit.MILLISECONDS));
return compactorTxnId;
}
}
|
apache-2.0
|
trentdm/Foos
|
src/Foos/App/app.js
|
4265
|
'use strict';
var app = angular.module('app', [
'ngAnimate',
'ngCookies',
'ui.router',
'ui.bootstrap'
]);
app.config(['$stateProvider', '$httpProvider',
function ($stateProvider, $httpProvider) {
$stateProvider.state('home', {
url: '',
templateUrl: 'App/partials/home.html',
controller: 'HomeCtrl',
data: {
requireLogin: false
}
});
$stateProvider.state('play', {
url: '/play',
templateUrl: 'App/partials/play.html',
controller: 'PlayCtrl',
data: {
requireLogin: true
}
});
$stateProvider.state('match', {
url: '/match',
templateUrl: 'App/partials/match.html',
controller: 'MatchCtrl',
data: {
requireLogin: false
}
});
$stateProvider.state('team', {
url: '/team',
templateUrl: 'App/partials/team.html',
controller: 'TeamCtrl',
data: {
requireLogin: false
}
});
$stateProvider.state('player', {
url: '/player',
templateUrl: 'App/partials/player.html',
controller: 'PlayerCtrl',
data: {
requireLogin: false
}
});
$stateProvider.state('about', {
url: '/about',
templateUrl: 'App/partials/about.html',
data: {
requireLogin: false
}
});
$stateProvider.state('preferences', {
url: '/preferences',
templateUrl: 'App/partials/preferences.html',
controller: 'PreferencesCtrl',
data: {
requireLogin: true
}
});
$httpProvider.interceptors.push(function ($timeout, $q, $injector) {
var authModal, $http, $state;
// this trick must be done so that we don't receive
// `Uncaught Error: [$injector:cdep] Circular dependency found`
$timeout(function () {
authModal = $injector.get('authModal');
$http = $injector.get('$http');
$state = $injector.get('$state');
});
return {
responseError: function (rejection) {
return rejection;//may want to force modal on 401 auth failure, but not at this time
if (rejection.status !== 401) {
return rejection;
}
var deferred = $q.defer();
authModal()
.then(function () {
deferred.resolve($http(rejection.config));
})
.catch(function () {
$state.go('home');
deferred.reject(rejection);
});
return deferred.promise;
}
};
});
}
]);
app.run(['$rootScope', '$state', 'authModal', 'authService', 'versionService',
function ($rootScope, $state, authModal, authService, versionService) {
$rootScope.$on('$stateChangeStart', function (event, toState, toParams) {
versionService.getVersionInfo(
function (version) {
if (version.isOutOfDate) {
$rootScope.$broadcast('alert', { type: 'warning', msg: 'Client version is out of date. Please refresh your browser.' });
}
},
function () {
$rootScope.$broadcast({ type: 'danger', msg: 'Server could not be reached. Please try again later.' });
});
if (toState.data.requireLogin && !authService.user.isAuthenticated) {
event.preventDefault();
authModal()
.then(function(data) {
return $state.go(toState.name, toParams);
})
.catch(function() {
return $state.go('home');
});
}
});
}]);
|
apache-2.0
|
micrometer-metrics/micrometer
|
micrometer-binders/src/test/java/io/micrometer/binder/jetty/JettyConnectionMetricsTest.java
|
5449
|
/*
* Copyright 2019 VMware, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.micrometer.binder.jetty;
import io.micrometer.core.instrument.MockClock;
import io.micrometer.core.instrument.simple.SimpleConfig;
import io.micrometer.core.instrument.simple.SimpleMeterRegistry;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.eclipse.jetty.client.HttpClient;
import org.eclipse.jetty.client.api.Request;
import org.eclipse.jetty.client.util.StringContentProvider;
import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.util.component.LifeCycle;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import java.util.concurrent.CountDownLatch;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertTrue;
class JettyConnectionMetricsTest {
private SimpleMeterRegistry registry = new SimpleMeterRegistry(SimpleConfig.DEFAULT, new MockClock());
private Server server = new Server(0);
private ServerConnector connector = new ServerConnector(server);
private CloseableHttpClient client = HttpClients.createDefault();
void setup() throws Exception {
connector.addBean(new JettyConnectionMetrics(registry));
server.setConnectors(new Connector[]{connector});
server.start();
}
@AfterEach
void teardown() throws Exception {
if (server.isRunning()) {
server.stop();
}
}
@Test
void contributesServerConnectorMetrics() throws Exception {
setup();
HttpPost post = new HttpPost("http://localhost:" + connector.getLocalPort());
post.setEntity(new StringEntity("123456"));
try (CloseableHttpResponse ignored = client.execute(post)) {
try (CloseableHttpResponse ignored2 = client.execute(post)) {
assertThat(registry.get("jetty.connections.current").gauge().value()).isEqualTo(2.0);
assertThat(registry.get("jetty.connections.max").gauge().value()).isEqualTo(2.0);
}
}
CountDownLatch latch = new CountDownLatch(1);
connector.addLifeCycleListener(new LifeCycle.Listener() {
@Override
public void lifeCycleStopped(LifeCycle event) {
latch.countDown();
}
});
// Convenient way to get Jetty to flush its connections, which is required to update the sent/received bytes metrics
server.stop();
assertTrue(latch.await(10, SECONDS));
assertThat(registry.get("jetty.connections.max").gauge().value()).isEqualTo(2.0);
assertThat(registry.get("jetty.connections.request").tag("type", "server").timer().count())
.isEqualTo(2);
assertThat(registry.get("jetty.connections.bytes.in").summary().totalAmount()).isGreaterThan(1);
}
@Test
void contributesClientConnectorMetrics() throws Exception {
setup();
HttpClient httpClient = new HttpClient();
httpClient.setFollowRedirects(false);
httpClient.addBean(new JettyConnectionMetrics(registry));
CountDownLatch latch = new CountDownLatch(1);
httpClient.addLifeCycleListener(new LifeCycle.Listener() {
@Override
public void lifeCycleStopped(LifeCycle event) {
latch.countDown();
}
});
httpClient.start();
Request post = httpClient.POST("http://localhost:" + connector.getLocalPort());
post.content(new StringContentProvider("123456"));
post.send();
httpClient.stop();
assertTrue(latch.await(10, SECONDS));
assertThat(registry.get("jetty.connections.max").gauge().value()).isEqualTo(1.0);
assertThat(registry.get("jetty.connections.request").tag("type", "client").timer().count())
.isEqualTo(1);
assertThat(registry.get("jetty.connections.bytes.out").summary().totalAmount()).isGreaterThan(1);
}
@Test
void passingConnectorAddsConnectorNameTag() {
new JettyConnectionMetrics(registry, connector);
assertThat(registry.get("jetty.connections.messages.in").counter().getId().getTag("connector.name"))
.isEqualTo("unnamed");
}
@Test
void namedConnectorsGetTaggedWithName() {
connector.setName("super-fast-connector");
new JettyConnectionMetrics(registry, connector);
assertThat(registry.get("jetty.connections.messages.in").counter().getId().getTag("connector.name"))
.isEqualTo("super-fast-connector");
}
}
|
apache-2.0
|
lsinfo3/onos
|
core/store/dist/src/main/java/org/onosproject/store/resource/impl/DiscreteResources.java
|
1461
|
/*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.store.resource.impl;
import org.onosproject.net.resource.DiscreteResource;
import org.onosproject.net.resource.DiscreteResourceId;
import java.util.List;
import java.util.Optional;
import java.util.Set;
interface DiscreteResources {
static DiscreteResources empty() {
return NonEncodableDiscreteResources.empty();
}
Optional<DiscreteResource> lookup(DiscreteResourceId id);
DiscreteResources difference(DiscreteResources other);
boolean isEmpty();
boolean containsAny(List<DiscreteResource> other);
// returns a new instance, not mutate the current instance
DiscreteResources add(DiscreteResources other);
// returns a new instance, not mutate the current instance
DiscreteResources remove(List<DiscreteResource> removed);
Set<DiscreteResource> values();
}
|
apache-2.0
|
tanhaichao/leopard-data
|
leopard-jdbc/src/test/java/io/leopard/jdbc/JdbcDaoSupportTest.java
|
1196
|
package io.leopard.jdbc;
import io.leopard.jdbc.JdbcDaoSupport;
import java.sql.Connection;
import javax.sql.DataSource;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.Mockito;
import org.springframework.jdbc.core.JdbcTemplate;
public class JdbcDaoSupportTest {
JdbcDaoSupport jdbcDao = new JdbcDaoSupport();
@Test
public void JdbcDaoSupport() {
}
@Test
public void setDataSource() {
DataSource dataSource = Mockito.mock(DataSource.class);
jdbcDao.setDataSource(dataSource);
Assert.assertNotNull(jdbcDao.getDataSource());
}
@Test
public void setJdbcTemplate() {
JdbcTemplate jdbcTemplate = Mockito.mock(JdbcTemplate.class);
jdbcDao.setJdbcTemplate(jdbcTemplate);
Assert.assertNotNull(jdbcDao.getJdbcTemplate());
jdbcDao.getExceptionTranslator();
}
@Test
public void getConnection() {
DataSource dataSource = Mockito.mock(DataSource.class);
jdbcDao.setDataSource(dataSource);
jdbcDao.getConnection();
}
@Test
public void releaseConnection() {
DataSource dataSource = Mockito.mock(DataSource.class);
jdbcDao.setDataSource(dataSource);
Connection conn = Mockito.mock(Connection.class);
jdbcDao.releaseConnection(conn);
}
}
|
apache-2.0
|
OpenConext/SamlValueObject
|
src/OpenConext/Value/Saml/Metadata/ShibbolethMetadataScope.php
|
2613
|
<?php
namespace OpenConext\Value\Saml\Metadata;
use OpenConext\Value\Assert\Assertion;
use OpenConext\Value\RegularExpression;
use OpenConext\Value\Serializable;
final class ShibbolethMetadataScope implements Serializable
{
/**
* @var string
*/
private $scope;
/**
* @var bool
*/
private $isRegexp;
/**
* @param string $literal
* @return ShibbolethMetadataScope
*/
public static function literal($literal)
{
Assertion::nonEmptyString($literal, 'literal');
return new self($literal);
}
/**
* @param string $regexp
* @return ShibbolethMetadataScope
*/
public static function regexp($regexp)
{
Assertion::nonEmptyString($regexp, 'regexp');
return new self($regexp, true);
}
/**
* @param string $scope the scope as defined
* @param bool $isRegexp whether or not the scope is a regular expression as identified by the regexp attribute
*/
public function __construct($scope, $isRegexp = false)
{
Assertion::nonEmptyString($scope, 'scope');
Assertion::boolean($isRegexp);
if ($isRegexp) {
Assertion::validRegularExpression('#' . $scope . '#i', 'scope');
}
$this->scope = $scope;
$this->isRegexp = $isRegexp;
}
/**
* @param string $string
* @return bool
*/
public function allows($string)
{
Assertion::string($string, 'Scope to check should be a string, "%s" given');
if (!$this->isRegexp) {
return strcasecmp($this->scope, $string) === 0;
}
$regexp = new RegularExpression('#' . $this->scope . '#i');
return $regexp->matches($string);
}
/**
* @param ShibbolethMetadataScope $other
* @return bool
*/
public function equals(ShibbolethMetadataScope $other)
{
return (strcasecmp($this->scope, $other->scope) === 0 && $this->isRegexp === $other->isRegexp);
}
public static function deserialize($data)
{
Assertion::isArray($data);
Assertion::keysExist($data, array('is_regexp', 'scope'));
return new self($data['scope'], $data['is_regexp']);
}
public function serialize()
{
return array(
'scope' => $this->scope,
'is_regexp' => $this->isRegexp
);
}
public function __toString()
{
return sprintf(
'ShibbolethMetadataScope(scope=%s, regexp=%s)',
$this->scope,
$this->isRegexp ? 'true' : 'false'
);
}
}
|
apache-2.0
|
zhongyi-zhang/meta-azure-service-broker
|
test/integration/cleaner.js
|
1244
|
/*jshint camelcase: false */
var common = require('../../lib/common');
var msRestRequest = require('../../lib/common/msRestRequest');
var chai = require('chai');
var should = chai.should();
var util = require('util');
exports.clean = function(provisioningParameters, done) {
var resourceGroupName = provisioningParameters.resourceGroup;
if (!resourceGroupName) {
return done();
}
var environmentName = process.env['ENVIRONMENT'];
var subscriptionId = process.env['SUBSCRIPTION_ID'];
var API_VERSIONS = common.API_VERSION[environmentName];
var environment = common.getEnvironment(environmentName);
var resourceManagerEndpointUrl = environment.resourceManagerEndpointUrl;
var resourceGroupUrl = util.format('%s/subscriptions/%s/resourcegroups/%s',
resourceManagerEndpointUrl,
subscriptionId,
resourceGroupName);
var headers = common.mergeCommonHeaders('Delete resource group for integration test', {});
msRestRequest.DELETE(resourceGroupUrl, headers, API_VERSIONS.RESOURCE_GROUP, function (err, res, body) {
should.not.exist(err);
res.statusCode.should.equal(202);
done();
});
};
|
apache-2.0
|
cpollet/sportracker
|
webapp/src/test/java/net/cpollet/sportracker/dozer/TestHashingDozerConverter.java
|
2427
|
/*
* Copyright 2014 Christophe Pollet
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.cpollet.sportracker.dozer;
import net.cpollet.sportracker.service.api.HashingService;
import org.dozer.MappingException;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.runners.MockitoJUnitRunner;
import static org.fest.assertions.Assertions.assertThat;
/**
* @author Christophe Pollet
*/
@RunWith(MockitoJUnitRunner.class)
public class TestHashingDozerConverter {
private HashingDozerConverter hashingDozerConverter;
@Mock
private HashingService hashingService;
@Rule
public ExpectedException expectedException = ExpectedException.none();
@Before
public void setUp() {
Mockito.when(hashingService.hash("plain")).thenReturn("hash");
hashingDozerConverter = new HashingDozerConverter();
hashingDozerConverter.setHashingService(hashingService);
}
@Test
public void convertReturnsNullWhenNullPassed() {
// GIVEN
String plain = null;
// WHEN
String hash = (String) hashingDozerConverter.convert(null, plain, null, null);
// THEN
assertThat(hash).isNull();
}
@Test
public void convertThrowsExceptionWhenNoStringPassed() {
// GIVEN
Object plain = new Object();
// THEN
expectedException.expect(MappingException.class);
expectedException.expectMessage("Converter HashingDozerConverter used incorrectly. Arguments passed were " //
+ "null and " + plain.toString());
// WHEN
hashingDozerConverter.convert(null, plain, null, null);
}
@Test
public void convertHashesTheInputStringAndReturnsTheHash() {
// GIVEN
String plain = "plain";
// WHEN
String hash = (String) hashingDozerConverter.convert(null, plain, null, null);
// THEN
assertThat(hash).isEqualTo("hash");
}
}
|
apache-2.0
|
linsun/kubernetes11
|
docs/user-guide/kubectl/kubectl_apply.md
|
3124
|
<!-- BEGIN MUNGE: UNVERSIONED_WARNING -->
<!-- END MUNGE: UNVERSIONED_WARNING -->
## kubectl apply
Apply a configuration to a resource by filename or stdin
### Synopsis
Apply a configuration to a resource by filename or stdin.
JSON and YAML formats are accepted.
```
kubectl apply -f FILENAME
```
### Examples
```
# Apply the configuration in pod.json to a pod.
$ kubectl apply -f ./pod.json
# Apply the JSON passed into stdin to a pod.
$ cat pod.json | kubectl apply -f -
```
### Options
```
-f, --filename=[]: Filename, directory, or URL to file that contains the configuration to apply
-o, --output="": Output mode. Use "-o name" for shorter output (resource/name).
--schema-cache-dir="/tmp/kubectl.schema": If non-empty, load/store cached API schemas in this directory, default is '/tmp/kubectl.schema'
--validate[=true]: If true, use a schema to validate the input before sending it
```
### Options inherited from parent commands
```
--alsologtostderr[=false]: log to standard error as well as files
--api-version="": The API version to use when talking to the server
--certificate-authority="": Path to a cert. file for the certificate authority.
--client-certificate="": Path to a client key file for TLS.
--client-key="": Path to a client key file for TLS.
--cluster="": The name of the kubeconfig cluster to use
--context="": The name of the kubeconfig context to use
--insecure-skip-tls-verify[=false]: If true, the server's certificate will not be checked for validity. This will make your HTTPS connections insecure.
--kubeconfig="": Path to the kubeconfig file to use for CLI requests.
--log-backtrace-at=:0: when logging hits line file:N, emit a stack trace
--log-dir="": If non-empty, write log files in this directory
--log-flush-frequency=5s: Maximum number of seconds between log flushes
--logtostderr[=true]: log to standard error instead of files
--match-server-version[=false]: Require server version to match client version
--namespace="": If present, the namespace scope for this CLI request.
--password="": Password for basic authentication to the API server.
-s, --server="": The address and port of the Kubernetes API server
--stderrthreshold=2: logs at or above this threshold go to stderr
--token="": Bearer token for authentication to the API server.
--user="": The name of the kubeconfig user to use
--username="": Username for basic authentication to the API server.
--v=0: log level for V logs
--vmodule=: comma-separated list of pattern=N settings for file-filtered logging
```
### SEE ALSO
* [kubectl](kubectl.md) - kubectl controls the Kubernetes cluster manager
###### Auto generated by spf13/cobra on 10-Oct-2015
<!-- BEGIN MUNGE: IS_VERSIONED -->
<!-- TAG IS_VERSIONED -->
<!-- END MUNGE: IS_VERSIONED -->
<!-- BEGIN MUNGE: GENERATED_ANALYTICS -->
[]()
<!-- END MUNGE: GENERATED_ANALYTICS -->
|
apache-2.0
|
escidoc-ng/escidoc-ng
|
escidocng-backend/src/main/java/de/escidocng/WebInitializer.java
|
1132
|
/*
* Copyright 2014 FIZ Karlsruhe
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ROLE_ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.escidocng;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.context.web.SpringBootServletInitializer;
/**
* @author mih
*/
public class WebInitializer extends SpringBootServletInitializer {
@Override
protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
return application.sources(EscidocngServerConfiguration.class, EscidocngServerSecurityConfiguration.class,
OAuth2ServerConfiguration.class);
}
}
|
apache-2.0
|
ebidel/WebFundamentals
|
src/content/en/fundamentals/performance/poor-connectivity/index.md
|
8159
|
project_path: /web/fundamentals/_project.yaml
book_path: /web/fundamentals/_book.yaml
description: It's important to understand what using your app or site feels like when connectivity is poor or unreliable, and build accordingly. A range of tools can help you.
{# wf_updated_on: 2018-01-10 #}
{# wf_published_on: 2016-05-09 #}
{# wf_blink_components: Platform>DevTools #}
# Understanding Low Bandwidth and High Latency {: .page-title }
{% include "web/_shared/contributors/samdutton.html" %}
It's important to understand what using your app or site feels like when
connectivity is poor or unreliable, and build accordingly. A range of tools
can help you.
## Test with low bandwidth and high latency {: #testing }
An <a href="http://adwords.blogspot.co.uk/2015/05/building-for-next-moment.html">
increasing proportion</a> of people experience the web on mobile devices. Even at home, <a
href="https://www.washingtonpost.com/news/the-switch/wp/2016/04/18/new-data-americans-are-abandoning-wired-home-internet/">
many people are abandoning fixed broadband for mobile</a>.
In this context, it's important to understand what using your app or site feels
like when connectivity is poor or unreliable. A range of software tools can help
you [emulate and simulate](https://stackoverflow.com/questions/1584617/simulator-or-emulator-what-is-the-difference)
low bandwidth and high [latency](https://www.igvita.com/2012/07/19/latency-the-new-web-performance-bottleneck/).
### Emulate network throttling
When building or updating a site, you must ensure adequate performance in a variety
of connectivity conditions. Several tools can help.
#### Browser tools
[Chrome DevTools](/web/tools/chrome-devtools) lets you test your site with a
variety of upload/download speeds and [round-trip times][rtt], using presets
or custom settings from the Network panel. See [Get Started with Analyze
Network Performance](/web/tools/chrome-devtools/network-performance) to
learn the basics.

[rtt]: https://www.igvita.com/2012/07/19/latency-the-new-web-performance-bottleneck/
#### System tools
Network Link Conditioner is a preference panel available on Mac if you install
[Hardware IO Tools](https://developer.apple.com/downloads/?q=Hardware%20IO%20Tools)
for Xcode:



#### Device emulation
[Android Emulator](http://developer.android.com/tools/devices/emulator.html#netspeed)
allows you to simulate various network conditions while running apps (including
web browsers and hybrid web apps) on Android:


For iPhone, Network Link Conditioner can be used to simulate impaired network
conditions (see above).
### Test from different locations and networks
Connectivity performance depends on server location as well as network type.
[WebPagetest](https://webpagetest.org) is an online service that enables a set
of performance tests to be run for your site using a variety of networks and host
locations. For example, you can try out your site from a server in India on a 2G
network, or over cable from a city in the US.

Select a location and, from advanced settings, select a connection type. You can
even automate testing using [scripts](https://sites.google.com/a/webpagetest.org/docs/using-webpagetest/scripting)
(for example, to log in to a site) or using their
[RESTful APIs](https://sites.google.com/a/webpagetest.org/docs/advanced-features/webpagetest-restful-apis).
This helps you to include connectivity testing into build processes or performance
logging.
[Fiddler](http://www.telerik.com/fiddler) supports Global proxying via
[GeoEdge](http://www.geoedge.com/faq), and its custom rules can be used to simulate
modem speeds:

### Test on an impaired network
Software and hardware proxies enable you to emulate problematic mobile network
conditions, such as bandwidth throttling, packet delay, and random packet loss.
A shared proxy or impaired network can enable a team of developers to incorporate
real-world network testing in their workflow.
Facebook's [Augmented Traffic Control](http://facebook.github.io/augmented-traffic-control/)
(ATC) is a BSD-licensed set of applications that can be used to shape traffic and
emulate impaired network conditions:

> Facebook even instituted [2G Tuesdays](https://code.facebook.com/posts/1556407321275493/building-for-emerging-markets-the-story-behind-2g-tuesdays/)
> to help understand how people on 2G use their product. On Tuesdays, employees
> get a pop-up that gives them the option to simulate a 2G connection.
The [Charles](https://www.charlesproxy.com/){: .external } HTTP/HTTPS proxy can
be used to [adjust bandwidth and latency](http://www.charlesproxy.com/documentation/proxying/throttling/).
Charles is commercial software, but a free trial is available.

More information about Charles is available from [codewithchris.com](http://codewithchris.com/tutorial-using-charles-proxy-with-your-ios-development-and-http-debugging/).
## Handle unreliable connectivity and "lie-fi" {: #lie-fi }
### What is lie-fi?
The term <a href="http://www.urbandictionary.com/define.php?term=lie-fi">lie-fi</a>
dates back to at least 2008 (when phones looked like
<a href="https://www.mobilegazette.com/2008-phones-wallchart.htm"
title="Images of phones from 2008">this</a>), and refers to connectivity that
isn't what it seems. Your browser behaves as if it has connectivity when, for
whatever reason, it doesn't.
Misinterpreted connectivity can result in a poor experience as the browser
(or JavaScript) persists in trying to retrieve resources rather than giving up
and choosing a sensible fallback. Lie-fi can actually be worse than offline; at
least if a device is definitely offline, your JavaScript can take appropriate
evasive action.
Lie-fi is likely to become a bigger problem as more people move to mobile and away
from fixed broadband. Recent [US Census data](https://www.ntia.doc.gov/blog/2016/evolving-technologies-change-nature-internet-use)
shows a [move away from fixed broadband](https://www.washingtonpost.com/news/the-switch/wp/2016/04/18/new-data-americans-are-abandoning-wired-home-internet/).
The following chart shows the use of mobile internet at home in 2015 compared with 2013:
<img src="images/home-broadband.png" class="center" alt="Chart from US census data
showing the move to mobile away from fixed broadband, particularly in lower-income households">
### Use timeouts to handle intermittent connectivity
In the past, [hacky methods using XHR](http://stackoverflow.com/questions/189430/detect-that-the-internet-connection-is-offline)
have been used to test for intermittent connectivity, but service worker enables
more reliable methods to set network timeouts. This can be achieved using
[Workbox](/web/tools/workbox/) with only a few lines of code:
workboxSW.router.registerRoute(
'/path/to/image',
workboxSW.strategies.networkFirst({networkTimeoutSeconds: 3})
);
You can learn more about Workbox in Jeff Posnick's Chrome Dev Summit talk,
[Workbox: Flexible PWA Libraries](https://www.youtube.com/watch?v=DtuJ55tmjps).
[Timeout functionality](/web/updates/2017/09/abortable-fetch) is also being developed
for the [Fetch API](https://developer.mozilla.org/en-US/docs/Web/API/GlobalFetch/fetch),
and the [Streams API](https://www.w3.org/TR/streams-api/) should help by optimizing
content delivery and avoiding monolithic requests. Jake Archibald gives more details
about tackling lie-fi in [Supercharging page load](https://youtu.be/d5_6yHixpsQ?t=6m42s).
|
apache-2.0
|
rudygt/dot-kafka
|
DotKafka.Prototype/Common/Errors/UnknownServerException.cs
|
346
|
using System;
namespace DotKafka.Prototype.Common.Errors
{
public class UnknownServerException : ApiException
{
public UnknownServerException() { }
public UnknownServerException(string message) : base(message) { }
public UnknownServerException(string message, Exception inner) : base(message, inner) { }
}
}
|
apache-2.0
|
di3goleite/ongometro
|
README.md
|
68
|
ongometro
=========
Final project for submission in hackathonJusBr
|
apache-2.0
|
grahamgilbert/munki-dnd
|
munki-dnd.py
|
1572
|
#!/usr/bin/python
import sys
# try to import from the default place Munki installs it
try:
from munkilib import FoundationPlist, munkicommon
except:
sys.path.append('/usr/local/munki')
from munkilib import FoundationPlist, munkicommon
import os
from datetime import datetime
FILE_LOCATION = "/Users/Shared/.msc-dnd.plist"
# Does the file exist?
if not os.path.isfile(FILE_LOCATION):
# File isn't here, set the Munki pref to False
munkicommon.set_pref('SuppressUserNotification', False)
sys.exit(0)
# If it does, get the current date?
else:
plist = FoundationPlist.readPlist(FILE_LOCATION)
if 'DNDEndDate' not in plist:
# The key we need isn't in there, remove the file, set pref and exit
os.remove(FILE_LOCATION)
munkicommon.set_pref('SuppressUserNotification', False)
sys.exit(0)
else:
# Is the current date greater than the DND date?
saved_time = datetime.strptime(str(plist['DNDEndDate']), "%Y-%m-%d %H:%M:%S +0000")
current_time = datetime.now()
if saved_time < current_time:
# print "Current time is greater"
# If yes, remove the file and set the Munki pref for suppress notifications to False
os.remove(FILE_LOCATION)
munkicommon.set_pref('SuppressUserNotification', False)
sys.exit(0)
else:
# print "Saved Time is greater"
munkicommon.set_pref('SuppressUserNotification', True)
sys.exit(0)
# If no, make sure suppress notifications is True
|
apache-2.0
|
NitorCreations/willow
|
willow-servers/src/main/java/com/nitorcreations/willow/metrics/MessageTypesMetric.java
|
206
|
package com.nitorcreations.willow.metrics;
import javax.inject.Named;
@Named("/types")
public class MessageTypesMetric extends TagListMetric {
public MessageTypesMetric() {
super("category");
}
}
|
apache-2.0
|
LQJJ/demo
|
126-go-common-master/app/job/live/xlottery/internal/model/model.go
|
2202
|
package model
// Pool .
type Pool struct {
Id int64 `json:"id"`
CoinId int64 `json:"coin_id"`
Title string `json:"title"`
Description string `json:"description"`
StartTime int64 `json:"start_time"`
EndTime int64 `json:"end_time"`
Status int64 `json:"status"`
IsBottom int64 `json:"is_bottom"`
}
// Coin .
type Coin struct {
Id int64 `json:"id"`
Title string `json:"title"`
GiftType int64 `json:"gift_type"`
ChangeNum int64 `json:"change_num"`
StartTime int64 `json:"start_time"`
EndTime int64 `json:"end_time"`
Status int64 `json:"status"`
}
// UserInfo .
type UserInfo struct {
Uid int64 `json:"uid"`
NormalScore int64 `json:"normal_score"`
ColorfulScore int64 `json:"colorful_score"`
}
// GiftMsg .
type GiftMsg struct {
// by notify
MsgContent string `form:"msg_content"`
}
// CoinConfig .
type CoinConfig struct {
CoinId int64 `json:"coin_id"`
Type int64 `json:"type"`
AreaV2ParentId int64 `json:"area_v2_parent_id"`
AreaV2Id int64 `json:"area_v2_id"`
GiftId int64 `json:"gift_id"`
IsAll int64 `json:"is_all"`
}
// PoolPrize .
type PoolPrize struct {
Id int64 `json:"id"`
PoolId int64 `json:"pool_id"`
Type int64 `json:"type"`
Num int64 `json:"num"`
ObjectId int64 `json:"object_id"`
Expire int64 `json:"expire"`
WebUrl string `json:"web_url"`
MobileUrl string `json:"mobile_url"`
Description string `json:"description"`
JumpUrl string `json:"jump_url"`
ProType int64 `json:"pro_type"`
Chance int64 `json:"chance"`
LoopNum int64 `json:"loop_num"`
LimitNum int64 `json:"limit_num"`
Weight int64 `json:"weight"`
}
// AddCapsule AddCapsule
type AddCapsule struct {
Uid int64 `json:"uid"`
Type string `json:"type"`
CoinId int64 `json:"coin_id"`
Num int64 `json:"num"`
Source string `json:"source"`
MsgId string `json:"msg_id"`
}
// ExtraData .
type ExtraData struct {
Id int64 `json:"id"`
Uid int64 `json:"uid"`
Type string `json:"type"`
ItemValue int64 `json:"item_value"`
ItemExtra string `json:"item_extra"`
}
|
apache-2.0
|
khartig/assimilator
|
rio-resolver/resolver-aether/src/main/java/org/rioproject/resolver/aether/util/ConsoleTransferListener.java
|
4898
|
/*
* Copyright to the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.rioproject.resolver.aether.util;
import java.io.PrintStream;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.sonatype.aether.transfer.AbstractTransferListener;
import org.sonatype.aether.transfer.TransferEvent;
import org.sonatype.aether.transfer.TransferResource;
/**
* A simplistic transfer listener that logs uploads/downloads to the console.
*/
public class ConsoleTransferListener extends AbstractTransferListener {
private PrintStream out;
private Map<TransferResource, Long> downloads = new ConcurrentHashMap<TransferResource, Long>();
private int lastLength;
public ConsoleTransferListener() {
this(null);
}
public ConsoleTransferListener(PrintStream out) {
this.out = (out != null) ? out : System.out;
}
@Override
public void transferProgressed(TransferEvent event) {
TransferResource resource = event.getResource();
downloads.put(resource, event.getTransferredBytes());
StringBuilder buffer = new StringBuilder(64);
for (Map.Entry<TransferResource, Long> entry : downloads.entrySet()) {
long total = entry.getKey().getContentLength();
long complete = entry.getValue();
buffer.append(getStatus(complete, total)).append(" ");
}
int pad = lastLength - buffer.length();
lastLength = buffer.length();
pad(buffer, pad);
buffer.append('\r');
out.print(buffer);
}
private String getStatus(long complete, long total) {
if (total >= 1024) {
return toKB(complete) + "/" + toKB(total) + " KB ";
} else if (total >= 0) {
return complete + "/" + total + " B ";
} else if (complete >= 1024) {
return toKB(complete) + " KB ";
} else {
return complete + " B ";
}
}
private void pad(StringBuilder buffer, final int spaces) {
int spaceCountDown = spaces;
String block = " ";
while (spaceCountDown > 0) {
int n = Math.min(spaces, block.length());
buffer.append(block, 0, n);
spaceCountDown -= n;
}
}
@Override
public void transferSucceeded(TransferEvent event) {
transferCompleted(event);
TransferResource resource = event.getResource();
long contentLength = event.getTransferredBytes();
if (contentLength >= 0) {
String type = (event.getRequestType() == TransferEvent.RequestType.PUT ? "Uploaded" : "Downloaded");
String len = contentLength >= 1024 ? toKB(contentLength) + " KB" : contentLength + " B";
String throughput = "";
long duration = System.currentTimeMillis() - resource.getTransferStartTime();
if (duration > 0) {
DecimalFormat format = new DecimalFormat("0.0", new DecimalFormatSymbols(Locale.ENGLISH));
double kbPerSec = (contentLength / 1024.0) / (duration / 1000.0);
throughput = " at " + format.format(kbPerSec) + " KB/sec";
}
out.println(type + ": " + resource.getRepositoryUrl() + resource.getResourceName() + " (" + len
+ throughput + ")");
}
}
@Override
public void transferFailed(TransferEvent event) {
transferCompleted(event);
//out.println("[WARNING] "+event.getException().getLocalizedMessage());
}
private void transferCompleted(TransferEvent event) {
downloads.remove(event.getResource());
if (event.getDataLength() > 0) {
StringBuilder buffer = new StringBuilder(64);
pad(buffer, lastLength);
buffer.append('\r');
out.print(buffer);
}
}
public void transferCorrupted(TransferEvent event) {
TransferResource resource = event.getResource();
out.println("[WARNING] " + event.getException().getMessage() + " for " +
resource.getRepositoryUrl() + resource.getResourceName());
}
protected long toKB(long bytes) {
return (bytes + 1023) / 1024;
}
}
|
apache-2.0
|
gw1018/imageservice
|
images/heatmap.js
|
1415
|
var Canvas = require('canvas');
var Image = Canvas.Image;
// Constants
var canvasWidth = 350;
var canvasHeight = 150;
var cellSize = 30;
var colorMap = {
0: 'rgba(0, 0, 0, 0)',
1: '#F7F6EA',
2: '#E6E6E1',
3: '#EBC000'
};
module.exports = generate;
function generate(title, accuracy) {
var canvas = new Canvas(canvasWidth, canvasHeight);
var ctx = canvas.getContext('2d');
drawTitle(ctx, title);
drawAccuracy(ctx, accuracy);
drawAccuracySub(ctx);
drawHeatMap(ctx);
return canvas.toBuffer();
};
function drawHeatMap(ctx) {
var grid = [
[ 1, 3, 3, 2 ],
[ 0, 1, 2, 1 ],
[ 0, 2, 3, 1 ]
];
var y = 60;
grid.forEach(function(r) {
var x = canvasWidth - cellSize;
r.reverse().forEach(function(c, j) {
drawCell(ctx, x - (j * 2), y, colorMap[c]);
x -= cellSize;
});
y += cellSize;
});
}
function drawCell(ctx, x, y, fillColor) {
ctx.fillStyle = fillColor;
ctx.fillRect(x, y, cellSize, cellSize);
}
function drawTitle(ctx, title) {
//var title = 'Profit and Discount drives sales with a';
ctx.font = '14px Kozuka Gothic Pro';
ctx.fillText(title, 20, 30);
}
function drawAccuracy(ctx, accuracy) {
accuracy += '%';
ctx.font = '60px Kozuka Gothic Pro';
ctx.fillText(accuracy, 20, 100);
}
function drawAccuracySub(ctx) {
var sub = 'driver strength';
ctx.font = '14px Kozuka Gothic Pro';
ctx.fillText(sub, 20, 125);
}
|
apache-2.0
|
beforeeight/cpush-apns
|
src/main/java/com/cyou/cpush/apns/notification/DefaultNotification.java
|
1207
|
package com.cyou.cpush.apns.notification;
import java.util.concurrent.atomic.AtomicInteger;
public class DefaultNotification implements Notification {
private static AtomicInteger IDENTIFIER_GENERATOR = new AtomicInteger(Integer.MAX_VALUE-1);
private int identifier;
private Device device;
private Payload payload;
public DefaultNotification(Device device, Payload payload) {
this(IDENTIFIER_GENERATOR.incrementAndGet(), device, payload);
}
public DefaultNotification(int identifier, Device device, Payload payload) {
this.identifier = identifier;
this.device = device;
this.payload = payload;
}
/*
* (non-Javadoc)
*
* @see com.cyou.cpush.apns.Notification#getDevice()
*/
@Override
public Device getDevice() {
return device;
}
/*
* (non-Javadoc)
*
* @see com.cyou.cpush.apns.Notification#getPayload()
*/
@Override
public Payload getPayload() {
return payload;
}
public void setDevice(Device device) {
this.device = device;
}
public void setPayload(Payload payload) {
this.payload = payload;
}
public void setIdentifier(int identifier) {
this.identifier = identifier;
}
@Override
public int getIdentifier() {
return identifier;
}
}
|
apache-2.0
|
budthapa/social-network
|
README.md
|
80
|
# social-network
Social Network application using Spring Boot, Thymeleaf, MySQL
|
apache-2.0
|
ApocalypsjeNL/OpenAudioMc
|
plugin/src/main/java/com/craftmend/openaudiomc/spigot/modules/proxy/service/ProxyNetworkingService.java
|
3602
|
package com.craftmend.openaudiomc.spigot.modules.proxy.service;
import com.craftmend.openaudiomc.generic.networking.DefaultNetworkingService;
import com.craftmend.openaudiomc.generic.networking.abstracts.AbstractPacket;
import com.craftmend.openaudiomc.generic.networking.client.objects.player.ClientConnection;
import com.craftmend.openaudiomc.generic.networking.interfaces.Authenticatable;
import com.craftmend.openaudiomc.generic.networking.interfaces.INetworkingEvents;
import com.craftmend.openaudiomc.generic.networking.interfaces.NetworkingService;
import com.craftmend.openaudiomc.generic.node.packets.ForwardSocketPacket;
import com.craftmend.openaudiomc.generic.player.SpigotPlayerAdapter;
import com.craftmend.openaudiomc.spigot.OpenAudioMcSpigot;
import com.craftmend.openaudiomc.spigot.modules.proxy.listeners.BungeePacketListener;
import com.craftmend.openaudiomc.api.velocitypluginmessageframework.PacketPlayer;
import com.craftmend.openaudiomc.api.velocitypluginmessageframework.implementations.BukkitPacketManager;
import lombok.Getter;
import net.md_5.bungee.api.connection.ProxiedPlayer;
import org.bukkit.entity.Player;
import java.util.*;
public class ProxyNetworkingService extends NetworkingService {
@Getter private final Set<INetworkingEvents> eventHandlers = new HashSet<>();
private final DefaultNetworkingService realService = new DefaultNetworkingService();
private final BukkitPacketManager packetManager;
public ProxyNetworkingService() {
packetManager = new BukkitPacketManager(OpenAudioMcSpigot.getInstance(), "openaudiomc:node");
packetManager.registerListener(new BungeePacketListener());
}
@Override
public void connectIfDown() {
// unused in fake system
}
@Override
public void send(Authenticatable client, AbstractPacket packet) {
// handle packet if it should be passed to bungee
// forward every packet starting with PacketClient
if (!(client instanceof ClientConnection)) throw new UnsupportedOperationException("The bungee adapter for the networking service only supports client connections");
if (packet.getClass().getSimpleName().startsWith("PacketClient")) {
packet.setClient(client.getOwnerUUID());
Player player = ((SpigotPlayerAdapter) ((ClientConnection) client).getPlayer()).getPlayer();
packetManager.sendPacket(new PacketPlayer(player), new ForwardSocketPacket(packet));
}
}
@Override
public void triggerPacket(AbstractPacket abstractPacket) {
// unused in fake system
}
@Override
public ClientConnection getClient(UUID uuid) {
return realService.getClient(uuid);
}
@Override
public Collection<ClientConnection> getClients() {
return realService.getClients();
}
@Override
public void remove(UUID player) {
realService.remove(player);
}
@Override
public ClientConnection register(Player player) {
return realService.register(player);
}
@Override
public ClientConnection register(ProxiedPlayer player) {
return realService.register(player);
}
public ClientConnection register(com.velocitypowered.api.proxy.Player player) {
return realService.register(player);
}
@Override
public void stop() {
// unused in fake system
}
@Override
public Set<INetworkingEvents> getEvents() {
return eventHandlers;
}
@Override
public void addEventHandler(INetworkingEvents events) {
eventHandlers.add(events);
}
}
|
apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.