text
stringlengths 2
1.04M
| meta
dict |
---|---|
<?php declare (strict_types=1);
namespace Sabre\DAV\Auth\Backend;
use Sabre\HTTP;
class ApacheTest extends \PHPUnit_Framework_TestCase {
function testConstruct() {
$backend = new Apache();
$this->assertInstanceOf('Sabre\DAV\Auth\Backend\Apache', $backend);
}
function testNoHeader() {
$request = new HTTP\Request('GET', '/');
$response = new HTTP\Response();
$backend = new Apache();
$this->assertFalse(
$backend->check($request, $response)[0]
);
}
function testRemoteUser() {
$request = HTTP\Sapi::createFromServerArray([
'REQUEST_METHOD' => 'GET',
'REQUEST_URI' => '/',
'REMOTE_USER' => 'username',
]);
$response = new HTTP\Response();
$backend = new Apache();
$this->assertEquals(
[true, 'principals/username'],
$backend->check($request, $response)
);
}
function testRedirectRemoteUser() {
$request = HTTP\Sapi::createFromServerArray([
'REQUEST_METHOD' => 'GET',
'REQUEST_URI' => '/',
'REDIRECT_REMOTE_USER' => 'username',
]);
$response = new HTTP\Response();
$backend = new Apache();
$this->assertEquals(
[true, 'principals/username'],
$backend->check($request, $response)
);
}
function testRequireAuth() {
$request = new HTTP\Request('GET', '/');
$response = new HTTP\Response();
$backend = new Apache();
$backend->challenge($request, $response);
$this->assertNull(
$response->getHeader('WWW-Authenticate')
);
}
}
| {
"content_hash": "7cd21f4e08b54c01140c873dcf76f5c3",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 75,
"avg_line_length": 23.293333333333333,
"alnum_prop": 0.5231825987406984,
"repo_name": "c0d3z3r0/sabre-dav",
"id": "a8949ad7e6114a34accc4a9545d6e909bf15e471",
"size": "1747",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/Sabre/DAV/Auth/Backend/ApacheTest.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "17108"
},
{
"name": "PHP",
"bytes": "2093731"
},
{
"name": "Python",
"bytes": "12922"
},
{
"name": "Shell",
"bytes": "56"
}
],
"symlink_target": ""
} |
package com.mateuszkoslacz.moviper.recyclerview.butterknife;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() throws Exception {
assertEquals(4, 2 + 2);
}
} | {
"content_hash": "6dbeb191a96df28741af7bbc859c2b60",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 81,
"avg_line_length": 25.235294117647058,
"alnum_prop": 0.7086247086247086,
"repo_name": "mkoslacz/Moviper",
"id": "558b4acdf6564d3869b1b748e352243beb7db398",
"size": "429",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "moviper-recyclerview-butterknife/src/test/java/com/mateuszkoslacz/moviper/recyclerview/butterknife/ExampleUnitTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "708328"
},
{
"name": "Kotlin",
"bytes": "23054"
}
],
"symlink_target": ""
} |
package org.cellprofiler.knimebridge.message;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.json.Json;
import javax.json.JsonArray;
import javax.json.JsonObject;
import javax.json.JsonReader;
import javax.json.JsonString;
import javax.json.JsonValue;
import org.cellprofiler.knimebridge.FeatureDescriptionImpl;
import org.cellprofiler.knimebridge.IFeatureDescription;
import org.cellprofiler.knimebridge.KBConstants;
import org.cellprofiler.knimebridge.PipelineException;
import org.cellprofiler.knimebridge.ProtocolException;
import org.zeromq.ZMQ.Socket;
import org.zeromq.ZMsg;
/**
* A reply to a pipeline info request
*
* @author Lee Kamentsky
*
* The PipelineInfoReply reports the required inputs,
* and the output measurements for the pipeline passed
* through PipelineInfoReq.
*
* Format of a PipelineInfoReply:
*
* msgName
* body: Json string
* Array of channel names
* Array of type names
* Dictionary of segmentation name to array of 2-tuples
* 2-tuple is feature name, index of type name
*
*/
public class PipelineInfoReply extends AbstractReply {
private static final String msgName = "pipeline-info-reply-1";
private List<String> channels;
private List<String> objects;
private Map<String, List<IFeatureDescription>> objectFeatures;
protected PipelineInfoReply() {
}
/**
* Install the information retrieved from the server
*
* @param channels the names of the image inputs to the pipeline
* @param objectFeatures a map of object name to feature produced
*/
protected void setInfo(List<String> channels, Map<String, List<IFeatureDescription>> objectFeatures) {
this.channels = Collections.unmodifiableList(channels);
this.objectFeatures = Collections.unmodifiableMap(objectFeatures);
final Set<String> objects = new HashSet<String>(objectFeatures.keySet());
objects.remove(KBConstants.IMAGE);
this.objects = Collections.unmodifiableList(new ArrayList<String>(objects));
}
/**
* @return the names of the image channels that need to be supplied to run()
*/
public List<String> getChannels() {
return this.channels;
}
/**
* @return the names of the segmentations
*/
public List<String> getObjects() {
return objects;
}
/**
* @param object the name of the segmentation
* @return the per-object features
*/
public List<IFeatureDescription> getFeatureDescriptions(String object) {
if (! objectFeatures.containsKey(object))
return Collections.emptyList();
return Collections.unmodifiableList(this.objectFeatures.get(object));
}
/**
* @return the per-image features
*/
public List<IFeatureDescription> getImageFeatureDescriptions() {
return getFeatureDescriptions(KBConstants.IMAGE);
}
/**
* Receive a reply to a PipelineInfoReq
*
* @param socket read the reply message on this socket
* @return a PipelineInfoReply containing the inputs and outputs of the pipeline
*
* @throws ProtocolException if the client could not understand the server
* @throws PipelineException if the pipeline could not be loaded
*/
public static PipelineInfoReply recvReply(Socket socket) throws ProtocolException, PipelineException {
PipelineInfoReply reply = new PipelineInfoReply();
reply.recvNoCPException(socket);
return reply;
}
@Override
protected void parse(ZMsg reply) throws ProtocolException {
final String body = popString(reply);
if (body == null) {
throw new ProtocolException("Pipeline info reply is missing its body");
}
final JsonReader rdr = Json.createReader(new StringReader(body));
final JsonArray wrapper = rdr.readArray();
final JsonArray aChannels = wrapper.getJsonArray(0);
if (aChannels == null) {
throw new ProtocolException("Pipeline info is missing channel list");
}
final List<String> channels = new ArrayList<String>(aChannels.size());
for (JsonValue v:aChannels) {
if (!(v instanceof JsonString))
throw new ProtocolException(String.format("Expected channel to be a String, was a %s", v.getValueType()));
channels.add(((JsonString)v).getString());
}
final JsonArray aTypes = wrapper.getJsonArray(1);
if (aTypes == null) {
throw new ProtocolException("Pipeline info is missing list of types");
}
final List<Class<?>> types = new ArrayList<Class<?>>(aTypes.size());
for (JsonValue v:aTypes) {
if (!(v instanceof JsonString))
throw new ProtocolException(String.format("Expected channel to be a String, was a %s", v.getValueType()));
try {
types.add(Class.forName(((JsonString)v).getString()));
} catch (ClassNotFoundException e) {
throw new ProtocolException(e.getMessage());
}
}
Map<String, List<IFeatureDescription>> objectFeatures =
new Hashtable<String, List<IFeatureDescription>>();
final JsonObject sObjectFeatures = wrapper.getJsonObject(2);
if (sObjectFeatures == null) {
throw new ProtocolException("Pipeline info is missing feature list");
}
for (String key:sObjectFeatures.keySet()) {
final JsonArray ofTuples = sObjectFeatures.getJsonArray(key);
if (ofTuples == null) {
throw new ProtocolException(String.format("Segmentation %s is missing its features", key));
}
List<IFeatureDescription> features = new ArrayList<IFeatureDescription>(ofTuples.size());
objectFeatures.put(key, features);
for (JsonValue v:ofTuples) {
if (! (v instanceof JsonArray)) {
throw new ProtocolException(String.format("Expected Json array, got %s", v.getValueType().toString()));
}
final JsonArray ofTuple = (JsonArray)v;
if (ofTuple.size() < 2) {
throw new ProtocolException("Expected 2-tuple for feature description");
}
final String name = ofTuple.getString(0);
final int typeIdx = ofTuple.getInt(1);
if (typeIdx >= types.size()) {
throw new ProtocolException(String.format("Got out of bounds type index: %d", typeIdx));
}
Class<?> type = types.get(typeIdx);
addFeature(key, features, name, type);
}
}
setInfo(channels, objectFeatures);
}
private static void addFeature(String key,
List<IFeatureDescription> features, final String name,
Class<?> type) {
features.add(new FeatureDescriptionImpl(key, name, type));
}
@Override
protected String getMsgName() {
return msgName;
}
}
| {
"content_hash": "a7bdaabbaa9e50847b682f474837f6c9",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 110,
"avg_line_length": 33.873684210526314,
"alnum_prop": 0.7316656308266004,
"repo_name": "CellProfiler/knime-bridge",
"id": "c0714c7cad20b506d12985cf6627ac5f5348df38",
"size": "6566",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/org/cellprofiler/knimebridge/message/PipelineInfoReply.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "131067"
}
],
"symlink_target": ""
} |
template <typename T, size_t N, size_t... Elems>
struct swizzle;
namespace typefu {
// recursive multi argument variant of std::is_arithmetic
template <typename T, typename... Ts>
struct is_arithmetic {
static const bool value =
is_arithmetic<T>::value and is_arithmetic<Ts...>::value;
};
template <typename T, size_t N, size_t A>
struct is_arithmetic<swizzle<T, N, A>> {
static const bool value = std::is_arithmetic<T>::value;
};
template <typename T>
struct is_arithmetic<T> {
static const bool value = std::is_arithmetic<T>::value;
};
// recursive multi argument variant of std::is_floating_point
template <typename T, typename... Ts>
struct is_floating_point {
static const bool value =
is_floating_point<T>::value and is_floating_point<Ts...>::value;
};
template <typename T, size_t N, size_t A>
struct is_floating_point<swizzle<T, N, A>> {
static const bool value = std::is_floating_point<T>::value;
};
template <typename T>
struct is_floating_point<T> {
static const bool value = std::is_floating_point<T>::value;
};
// recursive multi argument variant of std::is_integral
template <typename T, typename... Ts>
struct is_integral {
static const bool value =
is_integral<T>::value and is_integral<Ts...>::value;
};
template <typename T, size_t N, size_t A>
struct is_integral<swizzle<T, N, A>> {
static const bool value = std::is_integral<T>::value;
};
template <typename T>
struct is_integral<T> {
static const bool value = std::is_integral<T>::value;
};
// recursive multi argument variant of std::is_signed
template <typename T, typename... Ts>
struct is_signed {
static const bool value =
is_signed<T>::value and is_signed<Ts...>::value;
};
template <typename T, size_t N, size_t A>
struct is_signed<swizzle<T, N, A>> {
static const bool value = std::is_signed<T>::value;
};
template <typename T>
struct is_signed<T> {
static const bool value = std::is_signed<T>::value;
};
// recursive multi argument shorthand for is_signed and is_integral
template <typename... Ts>
struct is_signed_integral {
static const bool value =
is_signed<Ts...>::value and is_integral<Ts...>::value;
};
// recursive multi argument variant of std::is_unsigned
template <typename T, typename... Ts>
struct is_unsigned {
static const bool value =
is_unsigned<T>::value and is_unsigned<Ts...>::value;
};
template <typename T, size_t N, size_t A>
struct is_unsigned<swizzle<T, N, A>> {
static const bool value = std::is_unsigned<T>::value;
};
template <typename T>
struct is_unsigned<T> {
static const bool value = std::is_unsigned<T>::value;
};
// recursive multi argument reverse ordered variant of std::is_convertible.
// returns true if all arguments 'From' can be
// converted to type 'To'.
template <typename To, typename From, typename... Froms>
struct is_convertible {
static const bool value =
is_convertible<To,From>::value and is_convertible<To,Froms...>::value;
};
template <typename To, typename From>
struct is_convertible<To, From> {
static const bool value = std::is_convertible<From, To>::value;
};
}
| {
"content_hash": "fcecf4b9377667b468334ebf2b2446cc",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 77,
"avg_line_length": 29.107142857142858,
"alnum_prop": 0.65920245398773,
"repo_name": "Thhethssmuz/mmm",
"id": "5b2a78fef5b88b8f2623da538990feeb13364c5c",
"size": "3274",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/typefu/traits.hpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "338799"
},
{
"name": "CMake",
"bytes": "1920"
},
{
"name": "Shell",
"bytes": "812"
}
],
"symlink_target": ""
} |
#include <cstddef>
#include <gmp.h>
#include "util/memory.h"
#include "util/numerics/mpz.h"
#include "util/numerics/mpq.h"
#include "util/numerics/mpbq.h"
#include "util/numerics/mpfp.h"
#include "util/numerics/zpz.h"
#include "util/numerics/primes.h"
namespace lean {
extern "C" void * cxx_malloc(size_t size) { return lean::malloc(size); }
extern "C" void * cxx_realloc(void * q, size_t, size_t new_size) { return lean::realloc(q, new_size); }
extern "C" void cxx_free(void * p, size_t) { return lean::free(p); }
void initialize_numerics_module() {
mp_set_memory_functions(cxx_malloc, cxx_realloc, cxx_free);
initialize_mpz();
initialize_mpq();
initialize_mpbq();
initialize_mpfp();
initialize_zpz();
initialize_primes();
}
void finalize_numerics_module() {
finalize_primes();
finalize_zpz();
finalize_mpfp();
finalize_mpbq();
finalize_mpq();
finalize_mpz();
}
}
| {
"content_hash": "f243a98dc2fe9721cef47cc3e065f8c2",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 103,
"avg_line_length": 26.314285714285713,
"alnum_prop": 0.6655808903365906,
"repo_name": "sp3ctum/lean",
"id": "4ad081f3e43f606e5161ba65321b31aeb0722b4e",
"size": "1084",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "src/util/numerics/init_module.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "134"
},
{
"name": "C",
"bytes": "552"
},
{
"name": "C++",
"bytes": "3706838"
},
{
"name": "CMake",
"bytes": "86218"
},
{
"name": "Cucumber",
"bytes": "278"
},
{
"name": "Emacs Lisp",
"bytes": "221050"
},
{
"name": "Lean",
"bytes": "2820063"
},
{
"name": "Lex",
"bytes": "79"
},
{
"name": "Lua",
"bytes": "134720"
},
{
"name": "Makefile",
"bytes": "185"
},
{
"name": "Perl",
"bytes": "5792"
},
{
"name": "Python",
"bytes": "48235"
},
{
"name": "Shell",
"bytes": "20016"
},
{
"name": "TeX",
"bytes": "10138"
}
],
"symlink_target": ""
} |
package org.apache.camel.dataformat.univocity;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import org.apache.camel.EndpointInject;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.spi.DataFormat;
import org.apache.camel.test.junit5.CamelTestSupport;
import org.junit.jupiter.api.Test;
import static org.apache.camel.dataformat.univocity.UniVocityTestHelper.asMap;
import static org.apache.camel.dataformat.univocity.UniVocityTestHelper.join;
import static org.apache.camel.test.junit5.TestSupport.assertIsInstanceOf;
import static org.junit.jupiter.api.Assertions.assertEquals;
/**
* This class tests the marshalling of {@link org.apache.camel.dataformat.univocity.UniVocityCsvDataFormat}.
*/
public final class UniVocityCsvDataFormatMarshalTest extends CamelTestSupport {
@EndpointInject("mock:result")
MockEndpoint result;
/**
* Tests that we can marshal CSV with the default configuration.
*/
@Test
public void shouldMarshalWithDefaultConfiguration() throws Exception {
template.sendBody("direct:default", Arrays.asList(
asMap("A", "1", "B", "2", "C", "3"),
asMap("A", "one", "B", "two", "C", "three")));
result.expectedMessageCount(1);
result.assertIsSatisfied();
String body = assertIsInstanceOf(String.class, result.getExchanges().get(0).getIn().getBody());
assertEquals(join("1,2,3", "one,two,three"), body);
}
/**
* Tests that we can marshal a single line with CSV.
*/
@Test
public void shouldMarshalSingleLine() throws Exception {
template.sendBody("direct:default", asMap("A", "1", "B", "2", "C", "3"));
result.expectedMessageCount(1);
result.assertIsSatisfied();
String body = assertIsInstanceOf(String.class, result.getExchanges().get(0).getIn().getBody());
assertEquals(join("1,2,3"), body);
}
/**
* Tests that the marshalling adds new columns on the fly and keep its order
*/
@Test
public void shouldMarshalAndAddNewColumns() throws Exception {
template.sendBody("direct:default", Arrays.asList(
asMap("A", "1", "B", "2"),
asMap("C", "three", "A", "one", "B", "two")));
result.expectedMessageCount(1);
result.assertIsSatisfied();
String body = assertIsInstanceOf(String.class, result.getExchanges().get(0).getIn().getBody());
assertEquals(join("1,2", "one,two,three"), body);
}
/**
* Tests that we can marshal CSV with specific headers
*/
@Test
public void shouldMarshalWithSpecificHeaders() throws Exception {
template.sendBody("direct:header", Arrays.asList(
asMap("A", "1", "B", "2", "C", "3"),
asMap("A", "one", "B", "two", "C", "three")));
result.expectedMessageCount(1);
result.assertIsSatisfied();
String body = assertIsInstanceOf(String.class, result.getExchanges().get(0).getIn().getBody());
assertEquals(join("1,3", "one,three"), body);
}
/**
* Tests that we can marshal CSV using and advanced configuration
*/
@Test
public void shouldMarshalUsingAdvancedConfiguration() throws Exception {
template.sendBody("direct:advanced", Arrays.asList(
asMap("A", null, "B", "", "C", "_"),
asMap("A", "one", "B", "two", "C", "three")));
result.expectedMessageCount(1);
result.assertIsSatisfied();
String body = assertIsInstanceOf(String.class, result.getExchanges().get(0).getIn().getBody());
assertEquals(join("_N/A_;_empty_;_-__", "_one_;_two_;_three_"), body);
}
@Override
protected RouteBuilder createRouteBuilder() {
final Map<String, DataFormat> tests = new HashMap<>();
// Default writing of CSV
tests.put("default", new UniVocityCsvDataFormat());
// Write a CSV with specific headers
tests.put("header", new UniVocityCsvDataFormat()
.setHeaders(new String[] { "A", "C" }));
// Write a CSV with an advanced configuration
tests.put("advanced", new UniVocityCsvDataFormat()
.setNullValue("N/A")
.setEmptyValue("empty")
.setQuote('_')
.setQuoteAllFields(true)
.setQuoteEscape('-')
.setDelimiter(';'));
return new RouteBuilder() {
@Override
public void configure() {
for (Map.Entry<String, DataFormat> test : tests.entrySet()) {
from("direct:" + test.getKey()).marshal(test.getValue()).convertBodyTo(String.class).to("mock:result");
}
}
};
}
}
| {
"content_hash": "5de83b2b89516628e2026309ea460b49",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 123,
"avg_line_length": 36.11940298507463,
"alnum_prop": 0.6179752066115702,
"repo_name": "cunningt/camel",
"id": "e9cc0ffb21af5c6f618852b40dd2bab96ca26624",
"size": "5642",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "components/camel-univocity-parsers/src/test/java/org/apache/camel/dataformat/univocity/UniVocityCsvDataFormatMarshalTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Apex",
"bytes": "6695"
},
{
"name": "Batchfile",
"bytes": "2353"
},
{
"name": "CSS",
"bytes": "5472"
},
{
"name": "Dockerfile",
"bytes": "5676"
},
{
"name": "Elm",
"bytes": "10852"
},
{
"name": "FreeMarker",
"bytes": "8015"
},
{
"name": "Groovy",
"bytes": "396363"
},
{
"name": "HTML",
"bytes": "212954"
},
{
"name": "Java",
"bytes": "113234282"
},
{
"name": "JavaScript",
"bytes": "103655"
},
{
"name": "Jsonnet",
"bytes": "1734"
},
{
"name": "Kotlin",
"bytes": "41869"
},
{
"name": "Mustache",
"bytes": "525"
},
{
"name": "RobotFramework",
"bytes": "8461"
},
{
"name": "Ruby",
"bytes": "88"
},
{
"name": "Shell",
"bytes": "15221"
},
{
"name": "Tcl",
"bytes": "4974"
},
{
"name": "Thrift",
"bytes": "6979"
},
{
"name": "XQuery",
"bytes": "699"
},
{
"name": "XSLT",
"bytes": "276597"
}
],
"symlink_target": ""
} |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package es.ua.dlsi.probabilitiesfromhmm;
import es.ua.dlsi.suffixtree.Node;
import java.util.Set;
/**
*
* @author miquel
*/
public class StoredNode{
public StoredNode(Node current_node, Set<String> visited_paradigms,
Set<String> possible_paradigms){
this.current_node=current_node;
this.visited_paradigms=visited_paradigms;
this.possible_paradigms=possible_paradigms;
}
public Node current_node;
public Set<String> visited_paradigms;
public Set<String> possible_paradigms;
} | {
"content_hash": "cf24590637a6878aaed035fd681bcf28",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 71,
"avg_line_length": 23.107142857142858,
"alnum_prop": 0.6924265842349304,
"repo_name": "abumatran/pat",
"id": "86ce9751a36ade520f801c401adec5a2e254a3f3",
"size": "647",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "DictionaryAnalyser/src/es/ua/dlsi/probabilitiesfromhmm/StoredNode.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Erlang",
"bytes": "40"
},
{
"name": "Groff",
"bytes": "228784"
},
{
"name": "Java",
"bytes": "599429"
},
{
"name": "M4",
"bytes": "1228"
},
{
"name": "Makefile",
"bytes": "67538"
},
{
"name": "Python",
"bytes": "28424"
},
{
"name": "Shell",
"bytes": "43019"
},
{
"name": "XSLT",
"bytes": "7247"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "d178f214fde23d26565aa2d7bcc3570a",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "be301817da2ce9afa952f4279f2d656cfa66e218",
"size": "205",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Saxifragales/Crassulaceae/Crassula/Crassula capensis/Crassula capensis albertiniae/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package org.apache.tools.ant.gui.event;
import org.apache.tools.ant.gui.acs.ACSElement;
import org.apache.tools.ant.gui.acs.ACSTaskElement;
import org.apache.tools.ant.gui.core.AppContext;
/**
* Event fired when one or more tasks are selected.
*
* @version $Revision: 1.1.1.1 $
* @author Simeon Fitch
*/
public class TaskSelectionEvent extends ElementSelectionEvent {
/**
* Standard ctor.
*
* @param context application context.
* @param selected the selected Elements.
*/
public TaskSelectionEvent(AppContext context,
ACSElement[] selected) {
super(context, selected);
}
/**
* Get the selected tasks.
*
*/
public ACSTaskElement[] getSelectedTasks() {
return (ACSTaskElement[]) getFiltered(ACSTaskElement.class);
}
}
| {
"content_hash": "99e97ba40d7bcbcfa57cde3f1c59094a",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 68,
"avg_line_length": 25.12121212121212,
"alnum_prop": 0.6562123039806996,
"repo_name": "deepakalur/acre",
"id": "49c31e6bda17aa81fc7324fa9c0f5ffe2113d213",
"size": "3380",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "external/ant-antidote/src/java/org/apache/tools/ant/gui/event/TaskSelectionEvent.java",
"mode": "33261",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
namespace net {
class NET_EXPORT_PRIVATE QuicEncrypter {
public:
virtual ~QuicEncrypter() {}
static QuicEncrypter* Create(QuicTag algorithm);
// Sets the encryption key. Returns true on success, false on failure.
//
// NOTE: The key is the client_write_key or server_write_key derived from
// the master secret.
virtual bool SetKey(base::StringPiece key) = 0;
// Sets the fixed initial bytes of the nonce. Returns true on success,
// false on failure.
//
// NOTE: The nonce prefix is the client_write_iv or server_write_iv
// derived from the master secret. A 64-bit packet sequence number will
// be appended to form the nonce.
//
// <------------ 64 bits ----------->
// +---------------------+----------------------------------+
// | Fixed prefix | Packet sequence number |
// +---------------------+----------------------------------+
// Nonce format
//
// The security of the nonce format requires that QUIC never reuse a
// packet sequence number, even when retransmitting a lost packet.
virtual bool SetNoncePrefix(base::StringPiece nonce_prefix) = 0;
// Encrypt encrypts |plaintext| and writes the ciphertext, plus a MAC over
// both |associated_data| and |plaintext| to |output|, using |nonce| as the
// nonce. |nonce| must be |8+GetNoncePrefixSize()| bytes long and |output|
// must point to a buffer that is at least
// |GetCiphertextSize(plaintext.size()| bytes long.
virtual bool Encrypt(base::StringPiece nonce,
base::StringPiece associated_data,
base::StringPiece plaintext,
unsigned char* output) = 0;
// Returns a newly created QuicData object containing the encrypted
// |plaintext| as well as a MAC over both |plaintext| and |associated_data|,
// or nullptr if there is an error. |sequence_number| is appended to the
// |nonce_prefix| value provided in SetNoncePrefix() to form the nonce.
virtual QuicData* EncryptPacket(QuicPacketSequenceNumber sequence_number,
base::StringPiece associated_data,
base::StringPiece plaintext) = 0;
// GetKeySize() and GetNoncePrefixSize() tell the HKDF class how many bytes
// of key material needs to be derived from the master secret.
// NOTE: the sizes returned by GetKeySize() and GetNoncePrefixSize() are
// also correct for the QuicDecrypter of the same algorithm. So only
// QuicEncrypter has these two methods.
// Returns the size in bytes of a key for the algorithm.
virtual size_t GetKeySize() const = 0;
// Returns the size in bytes of the fixed initial part of the nonce.
virtual size_t GetNoncePrefixSize() const = 0;
// Returns the maximum length of plaintext that can be encrypted
// to ciphertext no larger than |ciphertext_size|.
virtual size_t GetMaxPlaintextSize(size_t ciphertext_size) const = 0;
// Returns the length of the ciphertext that would be generated by encrypting
// to plaintext of size |plaintext_size|.
virtual size_t GetCiphertextSize(size_t plaintext_size) const = 0;
// For use by unit tests only.
virtual base::StringPiece GetKey() const = 0;
virtual base::StringPiece GetNoncePrefix() const = 0;
};
} // namespace net
#endif // NET_QUIC_CRYPTO_QUIC_ENCRYPTER_H_
| {
"content_hash": "62b110858496ed1cc9682599d0be6a4b",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 79,
"avg_line_length": 44.526315789473685,
"alnum_prop": 0.650709219858156,
"repo_name": "jaruba/chromium.src",
"id": "5cb40a1fa5afbe136ece0ad1acebf5639784dc8c",
"size": "3709",
"binary": false,
"copies": "11",
"ref": "refs/heads/nw12",
"path": "net/quic/crypto/quic_encrypter.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "34522"
},
{
"name": "Batchfile",
"bytes": "8451"
},
{
"name": "C",
"bytes": "9252710"
},
{
"name": "C++",
"bytes": "222931723"
},
{
"name": "CSS",
"bytes": "875874"
},
{
"name": "Dart",
"bytes": "74976"
},
{
"name": "Go",
"bytes": "18155"
},
{
"name": "HTML",
"bytes": "27190037"
},
{
"name": "Java",
"bytes": "7645280"
},
{
"name": "JavaScript",
"bytes": "18828195"
},
{
"name": "Makefile",
"bytes": "96270"
},
{
"name": "Objective-C",
"bytes": "1226550"
},
{
"name": "Objective-C++",
"bytes": "7575073"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "PLpgSQL",
"bytes": "248854"
},
{
"name": "Perl",
"bytes": "63937"
},
{
"name": "Protocol Buffer",
"bytes": "418340"
},
{
"name": "Python",
"bytes": "8032766"
},
{
"name": "Shell",
"bytes": "464218"
},
{
"name": "Standard ML",
"bytes": "4965"
},
{
"name": "XSLT",
"bytes": "418"
},
{
"name": "nesC",
"bytes": "18335"
}
],
"symlink_target": ""
} |
package org.apache.jackrabbit.oak.plugins.segment;
import static junit.framework.Assert.assertEquals;
import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
import java.util.Calendar;
import java.util.Collections;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.plugins.memory.PropertyStates;
import org.apache.jackrabbit.oak.plugins.segment.memory.MemoryStore;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.util.ISO8601;
import org.junit.Test;
import com.google.common.collect.ImmutableList;
/**
* Test case for ensuring that segment size remains within bounds.
*/
public class SegmentSizeTest {
@Test
public void testNodeSize() {
NodeBuilder builder = EMPTY_NODE.builder();
assertEquals(48, getSize(builder));
assertEquals(4, getAmortizedSize(builder));
builder = EMPTY_NODE.builder();
builder.setProperty("foo", "bar");
assertEquals(48, getSize(builder));
assertEquals(8, getAmortizedSize(builder));
builder = EMPTY_NODE.builder();
builder.setProperty("foo", "bar");
builder.setProperty("baz", 123);
assertEquals(80, getSize(builder));
assertEquals(16, getAmortizedSize(builder));
builder = EMPTY_NODE.builder();
builder.child("foo");
assertEquals(64, getSize(builder));
assertEquals(12, getAmortizedSize(builder));
builder = EMPTY_NODE.builder();
builder.child("foo");
builder.child("bar");
assertEquals(96, getSize(builder));
assertEquals(40, getAmortizedSize(builder));
}
@Test
public void testDuplicateStrings() {
String string = "More than just a few bytes of example content.";
SegmentWriter writer = new MemoryStore().getTracker().getWriter();
SegmentNodeBuilder builder = writer.writeNode(EMPTY_NODE).builder();
builder.setProperty(PropertyStates.createProperty(
"test", Collections.nCopies(1, string), Type.STRINGS));
RecordId id1 = builder.getNodeState().getRecordId();
builder.setProperty(PropertyStates.createProperty(
"test", Collections.nCopies(12, string), Type.STRINGS));
RecordId id2 = builder.getNodeState().getRecordId();
assertEquals(16 + 12 * Segment.RECORD_ID_BYTES,
id1.getOffset() - id2.getOffset());
builder.setProperty(PropertyStates.createProperty(
"test", Collections.nCopies(100, string), Type.STRINGS));
RecordId id3 = builder.getNodeState().getRecordId();
assertEquals(16 + 100 * Segment.RECORD_ID_BYTES,
id2.getOffset() - id3.getOffset());
}
@Test
public void testDuplicateDates() {
String now = ISO8601.format(Calendar.getInstance());
SegmentWriter writer = new MemoryStore().getTracker().getWriter();
SegmentNodeBuilder builder = writer.writeNode(EMPTY_NODE).builder();
builder.setProperty(PropertyStates.createProperty(
"test", Collections.nCopies(1, now), Type.DATES));
RecordId id1 = builder.getNodeState().getRecordId();
builder.setProperty(PropertyStates.createProperty(
"test", Collections.nCopies(12, now), Type.DATES));
RecordId id2 = builder.getNodeState().getRecordId();
assertEquals(16 + 12 * Segment.RECORD_ID_BYTES,
id1.getOffset() - id2.getOffset());
builder.setProperty(PropertyStates.createProperty(
"test", Collections.nCopies(100, now), Type.DATES));
RecordId id3 = builder.getNodeState().getRecordId();
assertEquals(16 + 100 * Segment.RECORD_ID_BYTES,
id2.getOffset() - id3.getOffset());
}
@Test
public void testAccessControlNodes() {
NodeBuilder builder = EMPTY_NODE.builder();
builder.setProperty("jcr:primaryType", "rep:ACL", Type.NAME);
assertEquals(48, getSize(builder));
assertEquals(4, getAmortizedSize(builder));
NodeBuilder deny = builder.child("deny");
deny.setProperty("jcr:primaryType", "rep:DenyACE", Type.NAME);
deny.setProperty("rep:principalName", "everyone");
deny.setProperty(PropertyStates.createProperty(
"rep:privileges", ImmutableList.of("jcr:read"), Type.NAMES));
assertEquals(176, getSize(builder));
assertEquals(32, getAmortizedSize(builder));
NodeBuilder allow = builder.child("allow");
allow.setProperty("jcr:primaryType", "rep:GrantACE");
allow.setProperty("rep:principalName", "administrators");
allow.setProperty(PropertyStates.createProperty(
"rep:privileges", ImmutableList.of("jcr:all"), Type.NAMES));
assertEquals(320, getSize(builder));
assertEquals(84, getAmortizedSize(builder));
NodeBuilder deny0 = builder.child("deny0");
deny0.setProperty("jcr:primaryType", "rep:DenyACE", Type.NAME);
deny0.setProperty("rep:principalName", "everyone");
deny0.setProperty("rep:glob", "*/activities/*");
builder.setProperty(PropertyStates.createProperty(
"rep:privileges", ImmutableList.of("jcr:read"), Type.NAMES));
assertEquals(416, getSize(builder));
assertEquals(124, getAmortizedSize(builder));
NodeBuilder allow0 = builder.child("allow0");
allow0.setProperty("jcr:primaryType", "rep:GrantACE");
allow0.setProperty("rep:principalName", "user-administrators");
allow0.setProperty(PropertyStates.createProperty(
"rep:privileges", ImmutableList.of("jcr:all"), Type.NAMES));
assertEquals(480, getSize(builder));
assertEquals(160, getAmortizedSize(builder));
}
@Test
public void testFlatNodeUpdate() {
SegmentStore store = new MemoryStore();
SegmentWriter writer = store.getTracker().getWriter();
NodeBuilder builder = EMPTY_NODE.builder();
for (int i = 0; i < 1000; i++) {
builder.child("child" + i);
}
SegmentNodeState state = writer.writeNode(builder.getNodeState());
writer.flush();
Segment segment = store.readSegment(state.getRecordId().getSegmentId());
assertEquals(27520, segment.size());
writer.flush(); // force flushing of the previous segment
builder = state.builder();
builder.child("child1000");
state = writer.writeNode(builder.getNodeState());
writer.flush();
segment = store.readSegment(state.getRecordId().getSegmentId());
assertEquals(496, segment.size());
}
private int getSize(NodeBuilder builder) {
SegmentWriter writer = new MemoryStore().getTracker().getWriter();
RecordId id = writer.writeNode(builder.getNodeState()).getRecordId();
writer.flush();
return id.getSegment().size();
}
private int getAmortizedSize(NodeBuilder builder) {
SegmentWriter writer = new MemoryStore().getTracker().getWriter();
NodeState state = builder.getNodeState();
RecordId id1 = writer.writeNode(state).getRecordId();
RecordId id2 = writer.writeNode(state).getRecordId();
return id1.getOffset() - id2.getOffset();
}
}
| {
"content_hash": "cd9bcd12105b4b4cadb86b6edf4605b2",
"timestamp": "",
"source": "github",
"line_count": 183,
"max_line_length": 81,
"avg_line_length": 40.28415300546448,
"alnum_prop": 0.6576234400434074,
"repo_name": "ieb/jackrabbit-oak",
"id": "6ab0deeb4c3f805154ef34b2edff4c840c60dc5d",
"size": "8174",
"binary": false,
"copies": "6",
"ref": "refs/heads/0dt",
"path": "oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/segment/SegmentSizeTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "3451"
},
{
"name": "Groovy",
"bytes": "99009"
},
{
"name": "Java",
"bytes": "15799095"
},
{
"name": "JavaScript",
"bytes": "42583"
},
{
"name": "Perl",
"bytes": "7585"
},
{
"name": "Shell",
"bytes": "17311"
}
],
"symlink_target": ""
} |
#include "facekit/model/orthographic_projection.hpp"
/**
* @namespace FaceKit
* @brief Development space
*/
namespace FaceKit {
#pragma mark -
#pragma mark Initialization
/*
* @name OrthographicProjection
* @fn OrthographicProjection(const T focal, const T width, const T height)
* @brief Constructor
* @param[in] focal Focal length
* @param[in] width Image width
* @param[in] height Image height
*/
template<typename T>
OrthographicProjection<T>::OrthographicProjection(const T focal,
const T width,
const T height) {
this->focal_ = T(1.0);
this->cx_ = width / T(2.0);
this->cy_ = height / T(2.0);
n_param_ = 0;
}
/*
* @name ToVector
* @fn void ToVector(T* vector)
* @brief Export parameters to a given vector:
* vector = [f, cx, cy]
* @param[in,out] vector Vector where ot output parameters, need to be
* initialized before calling
*/
template<typename T>
void OrthographicProjection<T>::ToVector(T* vector) const {
vector[0] = this->focal_;
vector[1] = this->cx_;
vector[2] = this->cy_;
}
/*
* @name FromVector
* @fn void FromVector(const T* vector) const
* @brief Initialize parameters from a given vector:
* vector = [f, cx, cy]
* @param[in] vector Vector holding parameters
*/
template<typename T>
void OrthographicProjection<T>::FromVector(const T* vector) {
this->cx_ = vector[1];
this->cy_ = vector[2];
}
#pragma mark -
#pragma mark Usage
/*
* @name operator()
* @fn void operator()(const Point3& pts, Point2* proj) const
* @brief Project a given points
* @param[in] pts 3D point to project
* @param[out] proj 2D projected point
*/
template<typename T>
void OrthographicProjection<T>::operator()(const Point3& pts,
Point2* proj) const {
// Project pts
proj->x_ = this->focal_ * pts.x_ + this->cx_;
proj->y_ = this->focal_ * pts.y_ + this->cy_;
}
/*
* @name operator()
* @fn void operator()(const std::vector<Point3>& pts,
std::vector<Point2>* proj) const
* @brief Project a list of given points
* @param[in] pts List of 3D points to project
* @param[out] proj 2D projected points
*/
template<typename T>
void OrthographicProjection<T>::operator()(const std::vector<Point3>& pts,
std::vector<Point2>* proj) const {
// Init output
proj->resize(pts.size());
for (size_t p = 0; p < pts.size(); ++p) {
proj->at(p).x_ = this->focal_ * pts[p].x_ + this->cx_;
proj->at(p).y_ = this->focal_ * pts[p].y_ + this->cy_;
}
}
#pragma mark -
#pragma mark Explicit Instantiation
/** Float */
template class OrthographicProjection<float>;
/** Double */
template class OrthographicProjection<double>;
} // namespace FaceKit
| {
"content_hash": "6e8d559c548593895df82aecc7a329d7",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 78,
"avg_line_length": 27.64761904761905,
"alnum_prop": 0.6011023079572856,
"repo_name": "ChristopheEcabert/FaceKit",
"id": "2ac9e5cd43c8f99f76c77d0def0324be586d15ea",
"size": "3095",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "modules/model/src/orthographic_projection.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "793496"
},
{
"name": "CMake",
"bytes": "127596"
},
{
"name": "CSS",
"bytes": "2284"
},
{
"name": "Shell",
"bytes": "6856"
}
],
"symlink_target": ""
} |
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
Annls Sci. Nat. , Bot. , sér. 4 19: 357 (1863)
#### Original name
Lecidea melina Kremp. ex Nyl.
### Remarks
null | {
"content_hash": "37b1039af0b751e60533665fb20c2889",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 46,
"avg_line_length": 15.384615384615385,
"alnum_prop": 0.67,
"repo_name": "mdoering/backbone",
"id": "b6945a4ebce1c2df874ca4ec079dd5c031290ef3",
"size": "254",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Fungi/Ascomycota/Lecanoromycetes/Lecanorales/Mycoblastaceae/Mycoblastus/Mycoblastus affinis/ Syn. Lecidea melina/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
function [theta] = normalEqn(X, y)
%NORMALEQN Computes the closed-form solution to linear regression
% NORMALEQN(X,y) computes the closed-form solution to linear
% regression using the normal equations.
theta = zeros(size(X, 2), 1);
% ====================== YOUR CODE HERE ======================
% Instructions: Complete the code to compute the closed form solution
% to linear regression and put the result in theta.
%
theta = pinv(X' * X) * X' * y;
% ============================================================
end
| {
"content_hash": "b6dfad2e88e4b3cd46833c8b608a46d2",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 69,
"avg_line_length": 31.941176470588236,
"alnum_prop": 0.5580110497237569,
"repo_name": "glennrfisher/coursera-machine-learning",
"id": "d4539cec82723f68b6d6c7cd0a46970a8faf3b43",
"size": "543",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "ex1/ex1/normalEqn.m",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Matlab",
"bytes": "606549"
}
],
"symlink_target": ""
} |
<?php
namespace Google\Service\BackupforGKE;
class Namespaces extends \Google\Collection
{
protected $collection_key = 'namespaces';
/**
* @var string[]
*/
public $namespaces;
/**
* @param string[]
*/
public function setNamespaces($namespaces)
{
$this->namespaces = $namespaces;
}
/**
* @return string[]
*/
public function getNamespaces()
{
return $this->namespaces;
}
}
// Adding a class alias for backwards compatibility with the previous class name.
class_alias(Namespaces::class, 'Google_Service_BackupforGKE_Namespaces');
| {
"content_hash": "4865e18cfd08f548c5497f362db56720",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 81,
"avg_line_length": 18.70967741935484,
"alnum_prop": 0.6706896551724137,
"repo_name": "googleapis/google-api-php-client-services",
"id": "c6012e572a7df9c87181baf01cd878634e6d68be",
"size": "1170",
"binary": false,
"copies": "6",
"ref": "refs/heads/main",
"path": "src/BackupforGKE/Namespaces.php",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PHP",
"bytes": "55414116"
},
{
"name": "Python",
"bytes": "427325"
},
{
"name": "Shell",
"bytes": "787"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<servico xsi:schemaLocation="http://servicos.gov.br/v3/schema.../servico.xsd" xmlns="http://servicos.gov.br/v3/schema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<id>modificar-registro-de-frota-de-empresa-nacional</id>
<dbId>1059</dbId>
<nome>Modificar registro de frota de empresa nacional</nome>
<sigla></sigla>
<descricao><p style="justify">Compreende a solicitação de inclusão, exclusão ou alteração de dados de veículos da frota de transportador brasileiro que detenha Licença Originária, concedida pela Agência Nacional de Transportes Terrestres – ANTT, para realização de transporte rodoviário internacional de cargas.</p></descricao>
<contato>Entre em contato com a Ouvidoria da ANTT por meio do site www.antt.gov.br ou pelo telefone 166.</contato>
<gratuito>false</gratuito>
<porcentagem-manual>false</porcentagem-manual>
<servico-digital>true</servico-digital>
<link-servico-digital>https://mpdg-hom-ps.lecom.com.br/processos/iniciar</link-servico-digital>
<solicitantes>
<solicitante id="810">
<tipo>Empresas e cooperativas de transportadores rodoviários internacionais de cargas já autorizados pela ANTT.</tipo>
<requisitos></requisitos>
</solicitante>
</solicitantes>
<tempo-total-estimado>
<ate max="3" unidade="dias-uteis"/>
<descricao>Esse prazo começa a contar a partir do recebimento dos documentos pela ANTT.</descricao>
</tempo-total-estimado>
<validade-documento>
<descricao></descricao>
</validade-documento>
<etapas>
<etapa id="2065">
<titulo>Verificar requisitos dos veículos</titulo>
<descricao><p style="justify">Os veículos devem atender os requisitos em termos de estabelecidos em acordos internacionais previsto nas Resoluções MERCOSUL/GMC/RES. nº 25/11 e nº 26/11, aprovadas na LXXXVI Reunião do Grupo Mercado Comum – GMC em 18 de novembro de 2011, considerando os valores de carga útil informados <a href="https://servicos.nuvem.gov.br/pagina-tematica/requisitos-de-veiculos-para-frotas-nacionais" target="_blank">aqui</a>.</p></descricao>
<documentos>
<default/>
</documentos>
<custos>
<default/>
</custos>
<canais-de-prestacao>
<default>
<canal-de-prestacao id="2096" tipo="presencial">
<descricao>Local onde se encontram os veículos da frota objeto do serviço.</descricao>
</canal-de-prestacao>
</default>
</canais-de-prestacao>
</etapa>
<etapa id="2066">
<titulo>Gerar boleto e Pagar a taxa</titulo>
<descricao><p>A GRU deve ser emitida utilizando-se os seguintes dados:</p><br><ul><li><b>Unidade favorecida</b>: 393001/39250 - Agência Nacional de Transportes Terrestres</li><li><b>Código de recolhimento</b>: 28830-6</li><li><b>Número de referência</b>: 105</li><li><b>Nome do contribuinte</b>: Informar o nome do recolhedor, pessoa física ou jurídica.</li><li><b>CPF ou CNPJ</b>: informar o CPF ou CNPJ do recolhedor, conforme o caso.</li><li><b>Valor total</b>: valor calculado para pagamento.</li></ul></descricao>
<documentos>
<default>
<item id="1525">GRU</item>
</default>
</documentos>
<custos>
<default>
<custo id="203">
<descricao>Emolumento</descricao>
<moeda>R$</moeda>
<valorVariavel>Para cada ligação: R$10,00 + (nº de placas x R$5,00)</valorVariavel>
<statusCustoVariavel>1</statusCustoVariavel>
</custo>
</default>
</custos>
<canais-de-prestacao>
<default>
<canal-de-prestacao id="2097" tipo="presencial">
<descricao>Exclusivamente em <a href="https://www36.bb.com.br/encontreobb/s001t026p001,500830,500831,1,1,1,1.bb#/" target="_blank">uma agência do Banco do Brasil</a>.</descricao>
</canal-de-prestacao>
</default>
<caso id="424" descricao="Para gerar a GRU">
<canal-de-prestacao id="2066" tipo="web">
<descricao>https://appweb.antt.gov.br/gru/index.asp</descricao>
</canal-de-prestacao>
</caso>
</canais-de-prestacao>
</etapa>
<etapa id="2067">
<titulo>Enviar formulários e documentação</titulo>
<descricao>Devem ser encaminhados o formulário e anexados os documentos.</descricao>
<documentos>
<default>
<item id="1526">Comprovante de pagamento de emolumento. Não são aceitos comprovantes de agendamento.</item>
<item id="1527">Cópia do CRLV vigente (ano em curso ou ano anterior)</item>
</default>
<caso id="425" descricao="Veículos de cooperados:">
<documento>
<id>1499</id>
<descricao>Cópia autenticada do CRLV vigente (ano em curso ou ano anterior), juntamente com a comprovação de que o proprietário é cooperado.</descricao>
</documento>
</caso>
<caso id="426" descricao="Veículos arrendados/locados:">
<documento>
<id>1504</id>
<descricao>Além disso, o contrato deverá apresentar as seguintes informações dos veículos: tipo, ano, marca, modelo, chassis, número de eixos e placa</descricao>
</documento>
<documento>
<id>1503</id>
<descricao>"A LOCATÁRIA obriga-se à contratação de Seguro Obrigatório de Responsabilidade Civil, nos termos da legislação específica vigente e destinado à reparação dos danos causados a terceiros, em decorrência da utilização dos veículos locados.
Parágrafo único. A LOCATÁRIA responderá pelos prejuízos que excederem os limites previstos em lei, para o mencionado seguro compulsório."</descricao>
</documento>
<documento>
<id>1500</id>
<descricao>Cópia autenticada do CRLV vigente (ano em curso ou ano anterior), constando a anotação do contrato de aluguel ou arrendamento não vinculado ao financiamento do veículo ou;</descricao>
</documento>
<documento>
<id>1502</id>
<descricao>Cópia autenticada dos contratos de locação dos veículos firmados entre o proprietário do veículo e a empresa, registrado em cartório. Nesse caso, o contrato de locação deve conter a seguinte cláusula contratual:</descricao>
</documento>
<documento>
<id>1501</id>
<descricao>Caso a anotação não esteja registrada no CRLV vigente, encaminhar a certidão de arrendamento, por meio eletrônico, e advinda diretamente do órgão competente ou;</descricao>
</documento>
</caso>
<caso id="427" descricao="Veículos que apresentem no campo de observações pendência judicial:">
<documento>
<id>1505</id>
<descricao>Permissão expressa do Juízo</descricao>
</documento>
</caso>
<caso id="428" descricao="Veículos que apresentem no campo de observações pendência judicial/administrativa:">
<documento>
<id>1506</id>
<descricao>Documento comprovando se tratar de pendência administrativa ou permissão expressa do Juízo, para o caso de pendência judicial.</descricao>
</documento>
</caso>
<caso id="429" descricao="Veículos que não possuem CRLV">
<documento>
<id>1507</id>
<descricao>Licença para trânsito vigente.</descricao>
</documento>
</caso>
</documentos>
<custos>
<default/>
</custos>
<canais-de-prestacao>
<default>
<canal-de-prestacao id="2098" tipo="postal">
<descricao>Para uma <a href="http://www.antt.gov.br/textogeral/Enderecos.html" target="_blank">unidade da ANTT</a>.</descricao>
</canal-de-prestacao>
</default>
</canais-de-prestacao>
</etapa>
<etapa id="2068">
<titulo>Receber resultado</titulo>
<descricao>O resultado poderá ser deferimento, pendências ou indeferimento. Se deferido o pedido, será emitida a Licença Originária.</descricao>
<documentos>
<default/>
</documentos>
<custos>
<default/>
</custos>
<canais-de-prestacao>
<default>
<canal-de-prestacao id="2099" tipo="e-mail">
<descricao>E-mail informado pelo usuário no formulário.</descricao>
</canal-de-prestacao>
</default>
</canais-de-prestacao>
</etapa>
</etapas>
<orgao id="http://estruturaorganizacional.dados.gov.br/id/unidade-organizacional/54793" dbId="913">
<nomeOrgao>Agência Nacional de Transportes Terrestres (ANTT)</nomeOrgao>
</orgao>
<segmentos-da-sociedade>
<item idSegmento="3" idServicoSegmento="2355">Empresas</item>
</segmentos-da-sociedade>
<areas-de-interesse>
<item>Transportes</item>
<item>Transporte Rodoviário</item>
</areas-de-interesse>
<palavras-chave>
<item id="2418">frota</item>
<item id="2419">registro</item>
<item id="2420">caminhão</item>
</palavras-chave>
</servico>
| {
"content_hash": "d499bf1ad1497fd2668a41cdbffd11de",
"timestamp": "",
"source": "github",
"line_count": 175,
"max_line_length": 704,
"avg_line_length": 60.09142857142857,
"alnum_prop": 0.5820654241156333,
"repo_name": "servicosgovbr/cartas-de-homologacao",
"id": "ec972e1c008fc6facca277a7a7ea6fa28f73cb2f",
"size": "10647",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cartas-servico/v3/servicos/modificar-registro-de-frota-de-empresa-nacional.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Shell",
"bytes": "906"
}
],
"symlink_target": ""
} |
require("../../base/statistics.js");
require("../metric_registry.js");
require("./utils.js");
require("../../model/helpers/chrome_model_helper.js");
require("../../value/numeric.js");
require("../../value/value.js");
'use strict';
global.tr.exportTo('tr.metrics.sh', function() {
var timeDurationInMs_smallerIsBetter =
tr.v.Unit.byName.timeDurationInMs_smallerIsBetter;
function findTargetRendererHelper(model) {
var chromeHelper = model.getOrCreateHelper(
tr.model.helpers.ChromeModelHelper);
var largestPid = -1;
for (var pid in chromeHelper.rendererHelpers) {
var rendererHelper = chromeHelper.rendererHelpers[pid];
if (rendererHelper.isChromeTracingUI)
continue;
if (pid > largestPid)
largestPid = pid;
}
if (largestPid === -1)
return undefined;
return chromeHelper.rendererHelpers[largestPid];
}
function findNavigationStartEvent(rendererHelper) {
var navigationStartEvent = undefined;
rendererHelper.mainThread.sliceGroup.iterateAllEventsInThisContainer(
() => true, function(ev) {
if (navigationStartEvent !== undefined ||
ev.category !== 'blink.user_timing')
return;
if (ev.title === 'navigationStart')
navigationStartEvent = ev;
},
this);
return navigationStartEvent;
}
function findFirstPaintEvent(rendererHelper, title, frame) {
var firstPaintEvent = undefined;
rendererHelper.process.iterateAllEvents(
function(ev) {
if (firstPaintEvent !== undefined ||
ev.category !== 'blink.user_timing' ||
ev.title !== title ||
ev.args === undefined || ev.args['frame'] !== frame)
return;
firstPaintEvent = ev;
}, this);
return firstPaintEvent;
}
function firstPaintMetric(valueList, model) {
var rendererHelper = findTargetRendererHelper(model);
var navigationStartEvent = findNavigationStartEvent(rendererHelper);
if (navigationStartEvent === undefined)
throw new Error('Failed to find navigationStartEvent.');
var frame = navigationStartEvent.args['frame'];
var firstContentfulPaintEvent = findFirstPaintEvent(rendererHelper,
'firstContentfulPaint', frame);
if (firstContentfulPaintEvent === undefined)
throw new Error(
'Failed to find firstContentfulPaintEvent for frame ' + frame);
var grouping_keys = {};
var timeToFirstContentfulPaint =
firstContentfulPaintEvent.start - navigationStartEvent.start;
valueList.addValue(new tr.v.NumericValue(
model.canonicalUrlThatCreatedThisTrace, 'firstContentfulPaint',
new tr.v.ScalarNumeric(timeDurationInMs_smallerIsBetter,
timeToFirstContentfulPaint),
{ description: 'time to first contentful paint' },
grouping_keys));
}
firstPaintMetric.prototype = {
__proto__: Function.prototype
};
tr.metrics.MetricRegistry.register(firstPaintMetric);
return {
firstPaintMetric: firstPaintMetric
};
});
| {
"content_hash": "32a2adc808071024cce9cd5df00086dc",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 73,
"avg_line_length": 29.97087378640777,
"alnum_prop": 0.6663427275672174,
"repo_name": "zhaoz/lighthouse",
"id": "67d69409a8f8a1e1198dfd107dc198a65b34e3c2",
"size": "3250",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lighthouse-core/third_party/traceviewer-js/metrics/system_health/first_paint_metric.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "19710"
},
{
"name": "HTML",
"bytes": "19600"
},
{
"name": "JavaScript",
"bytes": "381024"
},
{
"name": "Shell",
"bytes": "2686"
}
],
"symlink_target": ""
} |
using UnityEngine;
namespace HoloToolkit.Unity
{
public enum PivotAxis
{
// Rotate about all axes.
Free,
// Rotate about an individual axis.
Y
}
/// <summary>
/// The Billboard class implements the behaviors needed to keep a GameObject oriented towards the user.
/// </summary>
public class Billboard : MonoBehaviour
{
/// <summary>
/// The axis about which the object will rotate.
/// </summary>
[Tooltip("Specifies the axis about which the object will rotate.")]
public PivotAxis PivotAxis = PivotAxis.Free;
private void OnEnable()
{
Update();
}
/// <summary>
/// Keeps the object facing the camera.
/// </summary>
private void Update()
{
if (!Camera.main)
{
return;
}
// Get a Vector that points from the target to the main camera.
Vector3 directionToTarget = Camera.main.transform.position - transform.position;
// Adjust for the pivot axis.
switch (PivotAxis)
{
case PivotAxis.Y:
directionToTarget.y = 0.0f;
break;
case PivotAxis.Free:
default:
// No changes needed.
break;
}
// If we are right next to the camera the rotation is undefined.
if (directionToTarget.sqrMagnitude < 0.001f)
{
return;
}
// Calculate and apply the rotation required to reorient the object
transform.rotation = Quaternion.LookRotation(-directionToTarget);
}
}
} | {
"content_hash": "acb451a794dc2b869ada6d003906fca0",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 107,
"avg_line_length": 27.46153846153846,
"alnum_prop": 0.5159663865546219,
"repo_name": "Go2bad/HoloLens_LandscapeAdventure",
"id": "c20bb2d5580c5f3e251b1c19cd0e325d75c9bc70",
"size": "1787",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Assets/HoloToolKit/Utilities/Scripts/Billboard.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "3149618"
},
{
"name": "FLUX",
"bytes": "275"
},
{
"name": "GLSL",
"bytes": "22717"
},
{
"name": "HLSL",
"bytes": "54233"
},
{
"name": "ShaderLab",
"bytes": "98421"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<!-- Copyright (C) 2009 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:xliff="urn:oasis:names:tc:xliff:document:1.2">
<string name="discoverable" msgid="3169265480789026010">"Synlighet"</string>
<string name="start_scan" msgid="6035699220942169744">"Starta sökning"</string>
<string name="stop_scan" msgid="527546916633745779">"Stoppa sökningen"</string>
<string name="operator_hello" msgid="292208161864910159">"Hej operatör!"</string>
<string name="operator_settings_title" msgid="1410094511974808567">"Operatör"</string>
<string name="operator_settings_summary" msgid="5916597343019392258">"Operatörsfunktion som kan användas för att starta en valfri aktivitet"</string>
<string name="manufacturer_hello" msgid="7525744633645544137">"Hej tillverkare!"</string>
<string name="manufacturer_settings_title" msgid="2503105049808838612">"Tillverkare"</string>
<string name="manufacturer_settings_summary" msgid="766746044826063472">"Tillverkarfunktion som kan användas för att starta en valfri aktivitet"</string>
</resources>
| {
"content_hash": "e44d6e493056d2564e5a4bc25035e5e1",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 157,
"avg_line_length": 62.25,
"alnum_prop": 0.7452667814113597,
"repo_name": "manuelmagix/android_packages_apps_Settings",
"id": "f06b18866514bafce9bec331ce15a38aa9f6a778",
"size": "1752",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "tests/res/values-sv/strings.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "6041065"
},
{
"name": "Makefile",
"bytes": "3909"
}
],
"symlink_target": ""
} |
namespace AngleSharp.Dom.Html
{
using AngleSharp.Extensions;
using AngleSharp.Html;
using System;
/// <summary>
/// Represents the HTML optgroup element.
/// </summary>
sealed class HtmlOptionsGroupElement : HtmlElement, IHtmlOptionsGroupElement
{
#region ctor
public HtmlOptionsGroupElement(Document owner, String prefix = null)
: base(owner, TagNames.Optgroup, prefix, NodeFlags.ImplicitelyClosed | NodeFlags.ImpliedEnd | NodeFlags.HtmlSelectScoped)
{
}
#endregion
#region Properties
public String Label
{
get { return this.GetOwnAttribute(AttributeNames.Label); }
set { this.SetOwnAttribute(AttributeNames.Label, value); }
}
public Boolean IsDisabled
{
get { return this.GetBoolAttribute(AttributeNames.Disabled); }
set { this.SetBoolAttribute(AttributeNames.Disabled, value); }
}
#endregion
}
}
| {
"content_hash": "775a57d91d9ec2bd01e38671315ed4bd",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 133,
"avg_line_length": 27.944444444444443,
"alnum_prop": 0.6292246520874751,
"repo_name": "FlorianRappl/AngleSharp",
"id": "e980f8a964826e0594e5f29d859a0315562eeca2",
"size": "1008",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/AngleSharp/Dom/Html/HtmlOptionsGroupElement.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "9902810"
},
{
"name": "HTML",
"bytes": "469943"
},
{
"name": "JavaScript",
"bytes": "75888"
},
{
"name": "PowerShell",
"bytes": "1922"
},
{
"name": "Shell",
"bytes": "2300"
},
{
"name": "Smalltalk",
"bytes": "694"
}
],
"symlink_target": ""
} |
module ArelExtensions
module Nodes
class Cast < Function
@return_type = :string
attr_accessor :as_attr
def initialize expr
@as_attr = expr[1]
case expr[1]
when :int, 'bigint', 'int', 'smallint', 'tinyint', 'bit'
@return_type = :int
when :float, :decimal, 'decimal', 'numeric', 'money', 'smallmoney', 'float', 'real'
@return_type = :decimal
when :number
@return_type = :number
when 'char', 'varchar', 'nchar', 'nvarchar'
@return_type = :string
when 'text', :text, 'ntext', :ntext
@as_attr = expr[1].to_sym
@return_type = :string
when :datetime, 'datetime', 'smalldatetime'
@return_type = :datetime
when :time, 'time'
@return_type = :time
when :date, 'date'
@return_type = :date
when :binary, 'binary', 'varbinary', 'image'
@return_type = :binary
else
@return_type = :string
@as_attr = :string
end
tab = [convert_to_node(expr.first)]
super(tab)
end
def +(other)
case @return_type
when :string
ArelExtensions::Nodes::Concat.new [self, other]
when :ruby_time
ArelExtensions::Nodes::DateAdd.new [self, other]
else
Arel.grouping(Arel::Nodes::Addition.new self, other)
end
end
def return_type
@return_type
end
end
end
end
| {
"content_hash": "1c68a5d87c28adf718d4a225ff675b0f",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 91,
"avg_line_length": 27.72222222222222,
"alnum_prop": 0.5250501002004008,
"repo_name": "Faveod/arel-extensions",
"id": "f44df4bc75928e9290134fbc4f6f452c91847f85",
"size": "1497",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/arel_extensions/nodes/cast.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "12657"
},
{
"name": "PLpgSQL",
"bytes": "2470"
},
{
"name": "Ruby",
"bytes": "345624"
},
{
"name": "Shell",
"bytes": "546"
},
{
"name": "TSQL",
"bytes": "3454"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "20297b5a556bf453028aa3b2ce957ef9",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "f2aae9bf74e84442ae30eeaf1333b796a01744ae",
"size": "179",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Lamiales/Acanthaceae/Anisotes/Anisotes umbrosus/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
{-
(c) The University of Glasgow 2006
(c) The AQUA Project, Glasgow University, 1996-1998
TcHsSyn: Specialisations of the @HsSyn@ syntax for the typechecker
This module is an extension of @HsSyn@ syntax, for use in the type
checker.
-}
{-# LANGUAGE CPP #-}
module TcHsSyn (
mkHsConApp, mkHsDictLet, mkHsApp,
hsLitType, hsLPatType, hsPatType,
mkHsAppTy, mkSimpleHsAlt,
nlHsIntLit,
shortCutLit, hsOverLitName,
conLikeResTy,
-- re-exported from TcMonad
TcId, TcIdSet,
zonkTopDecls, zonkTopExpr, zonkTopLExpr,
zonkTopBndrs, zonkTyBndrsX,
emptyZonkEnv, mkEmptyZonkEnv, mkTyVarZonkEnv,
zonkTcTypeToType, zonkTcTypeToTypes, zonkTyVarOcc,
) where
#include "HsVersions.h"
import HsSyn
import Id
import TcRnMonad
import PrelNames
import TypeRep -- We can see the representation of types
import TcType
import RdrName ( RdrName, rdrNameOcc )
import TcMType ( defaultKindVarToStar, zonkQuantifiedTyVar, writeMetaTyVar )
import TcEvidence
import Coercion
import TysPrim
import TysWiredIn
import Type
import ConLike
import DataCon
import PatSyn( patSynInstResTy )
import Name
import NameSet
import Var
import VarSet
import VarEnv
import DynFlags
import Literal
import BasicTypes
import Maybes
import SrcLoc
import Bag
import FastString
import Outputable
import Util
#if __GLASGOW_HASKELL__ < 709
import Data.Traversable ( traverse )
#endif
{-
************************************************************************
* *
\subsection[mkFailurePair]{Code for pattern-matching and other failures}
* *
************************************************************************
Note: If @hsLPatType@ doesn't bear a strong resemblance to @exprType@,
then something is wrong.
-}
hsLPatType :: OutPat Id -> Type
hsLPatType (L _ pat) = hsPatType pat
hsPatType :: Pat Id -> Type
hsPatType (ParPat pat) = hsLPatType pat
hsPatType (WildPat ty) = ty
hsPatType (VarPat var) = idType var
hsPatType (BangPat pat) = hsLPatType pat
hsPatType (LazyPat pat) = hsLPatType pat
hsPatType (LitPat lit) = hsLitType lit
hsPatType (AsPat var _) = idType (unLoc var)
hsPatType (ViewPat _ _ ty) = ty
hsPatType (ListPat _ ty Nothing) = mkListTy ty
hsPatType (ListPat _ _ (Just (ty,_))) = ty
hsPatType (PArrPat _ ty) = mkPArrTy ty
hsPatType (TuplePat _ bx tys) = mkTupleTy (boxityNormalTupleSort bx) tys
hsPatType (ConPatOut { pat_con = L _ con, pat_arg_tys = tys })
= conLikeResTy con tys
hsPatType (SigPatOut _ ty) = ty
hsPatType (NPat (L _ lit) _ _) = overLitType lit
hsPatType (NPlusKPat id _ _ _) = idType (unLoc id)
hsPatType (CoPat _ _ ty) = ty
hsPatType p = pprPanic "hsPatType" (ppr p)
conLikeResTy :: ConLike -> [Type] -> Type
conLikeResTy (RealDataCon con) tys = mkTyConApp (dataConTyCon con) tys
conLikeResTy (PatSynCon ps) tys = patSynInstResTy ps tys
hsLitType :: HsLit -> TcType
hsLitType (HsChar _ _) = charTy
hsLitType (HsCharPrim _ _) = charPrimTy
hsLitType (HsString _ _) = stringTy
hsLitType (HsStringPrim _ _) = addrPrimTy
hsLitType (HsInt _ _) = intTy
hsLitType (HsIntPrim _ _) = intPrimTy
hsLitType (HsWordPrim _ _) = wordPrimTy
hsLitType (HsInt64Prim _ _) = int64PrimTy
hsLitType (HsWord64Prim _ _) = word64PrimTy
hsLitType (HsInteger _ _ ty) = ty
hsLitType (HsRat _ ty) = ty
hsLitType (HsFloatPrim _) = floatPrimTy
hsLitType (HsDoublePrim _) = doublePrimTy
-- Overloaded literals. Here mainly because it uses isIntTy etc
shortCutLit :: DynFlags -> OverLitVal -> TcType -> Maybe (HsExpr TcId)
shortCutLit dflags (HsIntegral src i) ty
| isIntTy ty && inIntRange dflags i = Just (HsLit (HsInt src i))
| isWordTy ty && inWordRange dflags i
= Just (mkLit wordDataCon (HsWordPrim src i))
| isIntegerTy ty = Just (HsLit (HsInteger src i ty))
| otherwise = shortCutLit dflags (HsFractional (integralFractionalLit i)) ty
-- The 'otherwise' case is important
-- Consider (3 :: Float). Syntactically it looks like an IntLit,
-- so we'll call shortCutIntLit, but of course it's a float
-- This can make a big difference for programs with a lot of
-- literals, compiled without -O
shortCutLit _ (HsFractional f) ty
| isFloatTy ty = Just (mkLit floatDataCon (HsFloatPrim f))
| isDoubleTy ty = Just (mkLit doubleDataCon (HsDoublePrim f))
| otherwise = Nothing
shortCutLit _ (HsIsString src s) ty
| isStringTy ty = Just (HsLit (HsString src s))
| otherwise = Nothing
mkLit :: DataCon -> HsLit -> HsExpr Id
mkLit con lit = HsApp (nlHsVar (dataConWrapId con)) (nlHsLit lit)
------------------------------
hsOverLitName :: OverLitVal -> Name
-- Get the canonical 'fromX' name for a particular OverLitVal
hsOverLitName (HsIntegral {}) = fromIntegerName
hsOverLitName (HsFractional {}) = fromRationalName
hsOverLitName (HsIsString {}) = fromStringName
{-
************************************************************************
* *
\subsection[BackSubst-HsBinds]{Running a substitution over @HsBinds@}
* *
************************************************************************
The rest of the zonking is done *after* typechecking.
The main zonking pass runs over the bindings
a) to convert TcTyVars to TyVars etc, dereferencing any bindings etc
b) convert unbound TcTyVar to Void
c) convert each TcId to an Id by zonking its type
The type variables are converted by binding mutable tyvars to immutable ones
and then zonking as normal.
The Ids are converted by binding them in the normal Tc envt; that
way we maintain sharing; eg an Id is zonked at its binding site and they
all occurrences of that Id point to the common zonked copy
It's all pretty boring stuff, because HsSyn is such a large type, and
the environment manipulation is tiresome.
-}
type UnboundTyVarZonker = TcTyVar-> TcM Type
-- How to zonk an unbound type variable
-- Note [Zonking the LHS of a RULE]
data ZonkEnv
= ZonkEnv
UnboundTyVarZonker
(TyVarEnv TyVar) --
(IdEnv Var) -- What variables are in scope
-- Maps an Id or EvVar to its zonked version; both have the same Name
-- Note that all evidence (coercion variables as well as dictionaries)
-- are kept in the ZonkEnv
-- Only *type* abstraction is done by side effect
-- Is only consulted lazily; hence knot-tying
instance Outputable ZonkEnv where
ppr (ZonkEnv _ _ty_env var_env) = vcat (map ppr (varEnvElts var_env))
emptyZonkEnv :: ZonkEnv
emptyZonkEnv = mkEmptyZonkEnv zonkTypeZapping
mkEmptyZonkEnv :: UnboundTyVarZonker -> ZonkEnv
mkEmptyZonkEnv zonker = ZonkEnv zonker emptyVarEnv emptyVarEnv
extendIdZonkEnv :: ZonkEnv -> [Var] -> ZonkEnv
extendIdZonkEnv (ZonkEnv zonk_ty ty_env id_env) ids
= ZonkEnv zonk_ty ty_env (extendVarEnvList id_env [(id,id) | id <- ids])
extendIdZonkEnv1 :: ZonkEnv -> Var -> ZonkEnv
extendIdZonkEnv1 (ZonkEnv zonk_ty ty_env id_env) id
= ZonkEnv zonk_ty ty_env (extendVarEnv id_env id id)
extendTyZonkEnv1 :: ZonkEnv -> TyVar -> ZonkEnv
extendTyZonkEnv1 (ZonkEnv zonk_ty ty_env id_env) ty
= ZonkEnv zonk_ty (extendVarEnv ty_env ty ty) id_env
mkTyVarZonkEnv :: [TyVar] -> ZonkEnv
mkTyVarZonkEnv tvs = ZonkEnv zonkTypeZapping (mkVarEnv [(tv,tv) | tv <- tvs]) emptyVarEnv
setZonkType :: ZonkEnv -> UnboundTyVarZonker -> ZonkEnv
setZonkType (ZonkEnv _ ty_env id_env) zonk_ty = ZonkEnv zonk_ty ty_env id_env
zonkEnvIds :: ZonkEnv -> [Id]
zonkEnvIds (ZonkEnv _ _ id_env) = varEnvElts id_env
zonkIdOcc :: ZonkEnv -> TcId -> Id
-- Ids defined in this module should be in the envt;
-- ignore others. (Actually, data constructors are also
-- not LocalVars, even when locally defined, but that is fine.)
-- (Also foreign-imported things aren't currently in the ZonkEnv;
-- that's ok because they don't need zonking.)
--
-- Actually, Template Haskell works in 'chunks' of declarations, and
-- an earlier chunk won't be in the 'env' that the zonking phase
-- carries around. Instead it'll be in the tcg_gbl_env, already fully
-- zonked. There's no point in looking it up there (except for error
-- checking), and it's not conveniently to hand; hence the simple
-- 'orElse' case in the LocalVar branch.
--
-- Even without template splices, in module Main, the checking of
-- 'main' is done as a separate chunk.
zonkIdOcc (ZonkEnv _zonk_ty _ty_env env) id
| isLocalVar id = lookupVarEnv env id `orElse` id
| otherwise = id
zonkIdOccs :: ZonkEnv -> [TcId] -> [Id]
zonkIdOccs env ids = map (zonkIdOcc env) ids
-- zonkIdBndr is used *after* typechecking to get the Id's type
-- to its final form. The TyVarEnv give
zonkIdBndr :: ZonkEnv -> TcId -> TcM Id
zonkIdBndr env id
= do ty' <- zonkTcTypeToType env (idType id)
return (Id.setIdType id ty')
zonkIdBndrs :: ZonkEnv -> [TcId] -> TcM [Id]
zonkIdBndrs env ids = mapM (zonkIdBndr env) ids
zonkTopBndrs :: [TcId] -> TcM [Id]
zonkTopBndrs ids = zonkIdBndrs emptyZonkEnv ids
zonkEvBndrsX :: ZonkEnv -> [EvVar] -> TcM (ZonkEnv, [Var])
zonkEvBndrsX = mapAccumLM zonkEvBndrX
zonkEvBndrX :: ZonkEnv -> EvVar -> TcM (ZonkEnv, EvVar)
-- Works for dictionaries and coercions
zonkEvBndrX env var
= do { var' <- zonkEvBndr env var
; return (extendIdZonkEnv1 env var', var') }
zonkEvBndr :: ZonkEnv -> EvVar -> TcM EvVar
-- Works for dictionaries and coercions
-- Does not extend the ZonkEnv
zonkEvBndr env var
= do { let var_ty = varType var
; ty <-
{-# SCC "zonkEvBndr_zonkTcTypeToType" #-}
zonkTcTypeToType env var_ty
; return (setVarType var ty) }
zonkEvVarOcc :: ZonkEnv -> EvVar -> EvVar
zonkEvVarOcc env v = zonkIdOcc env v
zonkTyBndrsX :: ZonkEnv -> [TyVar] -> TcM (ZonkEnv, [TyVar])
zonkTyBndrsX = mapAccumLM zonkTyBndrX
zonkTyBndrX :: ZonkEnv -> TyVar -> TcM (ZonkEnv, TyVar)
-- This guarantees to return a TyVar (not a TcTyVar)
-- then we add it to the envt, so all occurrences are replaced
zonkTyBndrX env tv
= do { ki <- zonkTcTypeToType env (tyVarKind tv)
; let tv' = mkTyVar (tyVarName tv) ki
; return (extendTyZonkEnv1 env tv', tv') }
zonkTopExpr :: HsExpr TcId -> TcM (HsExpr Id)
zonkTopExpr e = zonkExpr emptyZonkEnv e
zonkTopLExpr :: LHsExpr TcId -> TcM (LHsExpr Id)
zonkTopLExpr e = zonkLExpr emptyZonkEnv e
zonkTopDecls :: Bag EvBind
-> LHsBinds TcId
-> Maybe (Located [LIE RdrName])
-> NameSet
-> [LRuleDecl TcId] -> [LVectDecl TcId] -> [LTcSpecPrag] -> [LForeignDecl TcId]
-> TcM ([Id],
Bag EvBind,
LHsBinds Id,
[LForeignDecl Id],
[LTcSpecPrag],
[LRuleDecl Id],
[LVectDecl Id])
zonkTopDecls ev_binds binds export_ies sig_ns rules vects imp_specs fords
= do { (env1, ev_binds') <- zonkEvBinds emptyZonkEnv ev_binds
-- Warn about missing signatures
-- Do this only when we we have a type to offer
; warn_missing_sigs <- woptM Opt_WarnMissingSigs
; warn_only_exported <- woptM Opt_WarnMissingExportedSigs
; let export_occs = maybe emptyBag
(listToBag . map (rdrNameOcc . ieName . unLoc) . unLoc)
export_ies
sig_warn
| warn_only_exported = topSigWarnIfExported export_occs sig_ns
| warn_missing_sigs = topSigWarn sig_ns
| otherwise = noSigWarn
; (env2, binds') <- zonkRecMonoBinds env1 sig_warn binds
-- Top level is implicitly recursive
; rules' <- zonkRules env2 rules
; vects' <- zonkVects env2 vects
; specs' <- zonkLTcSpecPrags env2 imp_specs
; fords' <- zonkForeignExports env2 fords
; return (zonkEnvIds env2, ev_binds', binds', fords', specs', rules', vects') }
---------------------------------------------
zonkLocalBinds :: ZonkEnv -> HsLocalBinds TcId -> TcM (ZonkEnv, HsLocalBinds Id)
zonkLocalBinds env EmptyLocalBinds
= return (env, EmptyLocalBinds)
zonkLocalBinds _ (HsValBinds (ValBindsIn {}))
= panic "zonkLocalBinds" -- Not in typechecker output
zonkLocalBinds env (HsValBinds vb@(ValBindsOut binds sigs))
= do { warn_missing_sigs <- woptM Opt_WarnMissingLocalSigs
; let sig_warn | not warn_missing_sigs = noSigWarn
| otherwise = localSigWarn sig_ns
sig_ns = getTypeSigNames vb
; (env1, new_binds) <- go env sig_warn binds
; return (env1, HsValBinds (ValBindsOut new_binds sigs)) }
where
go env _ []
= return (env, [])
go env sig_warn ((r,b):bs)
= do { (env1, b') <- zonkRecMonoBinds env sig_warn b
; (env2, bs') <- go env1 sig_warn bs
; return (env2, (r,b'):bs') }
zonkLocalBinds env (HsIPBinds (IPBinds binds dict_binds)) = do
new_binds <- mapM (wrapLocM zonk_ip_bind) binds
let
env1 = extendIdZonkEnv env [ n | L _ (IPBind (Right n) _) <- new_binds]
(env2, new_dict_binds) <- zonkTcEvBinds env1 dict_binds
return (env2, HsIPBinds (IPBinds new_binds new_dict_binds))
where
zonk_ip_bind (IPBind n e)
= do n' <- mapIPNameTc (zonkIdBndr env) n
e' <- zonkLExpr env e
return (IPBind n' e')
---------------------------------------------
zonkRecMonoBinds :: ZonkEnv -> SigWarn -> LHsBinds TcId -> TcM (ZonkEnv, LHsBinds Id)
zonkRecMonoBinds env sig_warn binds
= fixM (\ ~(_, new_binds) -> do
{ let env1 = extendIdZonkEnv env (collectHsBindsBinders new_binds)
; binds' <- zonkMonoBinds env1 sig_warn binds
; return (env1, binds') })
---------------------------------------------
type SigWarn = Bool -> [Id] -> TcM ()
-- Missing-signature warning
-- The Bool is True for an AbsBinds, False otherwise
noSigWarn :: SigWarn
noSigWarn _ _ = return ()
topSigWarnIfExported :: Bag OccName -> NameSet -> SigWarn
topSigWarnIfExported exported sig_ns _ ids
= mapM_ (topSigWarnIdIfExported exported sig_ns) ids
topSigWarnIdIfExported :: Bag OccName -> NameSet -> Id -> TcM ()
topSigWarnIdIfExported exported sig_ns id
| getOccName id `elemBag` exported
= topSigWarnId sig_ns id
| otherwise
= return ()
topSigWarn :: NameSet -> SigWarn
topSigWarn sig_ns _ ids = mapM_ (topSigWarnId sig_ns) ids
topSigWarnId :: NameSet -> Id -> TcM ()
-- The NameSet is the Ids that *lack* a signature
-- We have to do it this way round because there are
-- lots of top-level bindings that are generated by GHC
-- and that don't have signatures
topSigWarnId sig_ns id
| idName id `elemNameSet` sig_ns = warnMissingSig msg id
| otherwise = return ()
where
msg = ptext (sLit "Top-level binding with no type signature:")
localSigWarn :: NameSet -> SigWarn
localSigWarn sig_ns is_abs_bind ids
| not is_abs_bind = return ()
| otherwise = mapM_ (localSigWarnId sig_ns) ids
localSigWarnId :: NameSet -> Id -> TcM ()
-- NameSet are the Ids that *have* type signatures
localSigWarnId sig_ns id
| not (isSigmaTy (idType id)) = return ()
| idName id `elemNameSet` sig_ns = return ()
| otherwise = warnMissingSig msg id
where
msg = ptext (sLit "Polymorphic local binding with no type signature:")
warnMissingSig :: SDoc -> Id -> TcM ()
warnMissingSig msg id
= do { env0 <- tcInitTidyEnv
; let (env1, tidy_ty) = tidyOpenType env0 (idType id)
; addWarnTcM (env1, mk_msg tidy_ty) }
where
mk_msg ty = sep [ msg, nest 2 $ pprPrefixName (idName id) <+> dcolon <+> ppr ty ]
---------------------------------------------
zonkMonoBinds :: ZonkEnv -> SigWarn -> LHsBinds TcId -> TcM (LHsBinds Id)
zonkMonoBinds env sig_warn binds = mapBagM (zonk_lbind env sig_warn) binds
zonk_lbind :: ZonkEnv -> SigWarn -> LHsBind TcId -> TcM (LHsBind Id)
zonk_lbind env sig_warn = wrapLocM (zonk_bind env sig_warn)
zonk_bind :: ZonkEnv -> SigWarn -> HsBind TcId -> TcM (HsBind Id)
zonk_bind env sig_warn bind@(PatBind { pat_lhs = pat, pat_rhs = grhss, pat_rhs_ty = ty})
= do { (_env, new_pat) <- zonkPat env pat -- Env already extended
; sig_warn False (collectPatBinders new_pat)
; new_grhss <- zonkGRHSs env zonkLExpr grhss
; new_ty <- zonkTcTypeToType env ty
; return (bind { pat_lhs = new_pat, pat_rhs = new_grhss, pat_rhs_ty = new_ty }) }
zonk_bind env sig_warn (VarBind { var_id = var, var_rhs = expr, var_inline = inl })
= do { new_var <- zonkIdBndr env var
; sig_warn False [new_var]
; new_expr <- zonkLExpr env expr
; return (VarBind { var_id = new_var, var_rhs = new_expr, var_inline = inl }) }
zonk_bind env sig_warn bind@(FunBind { fun_id = L loc var, fun_matches = ms
, fun_co_fn = co_fn })
= do { new_var <- zonkIdBndr env var
; sig_warn False [new_var]
; (env1, new_co_fn) <- zonkCoFn env co_fn
; new_ms <- zonkMatchGroup env1 zonkLExpr ms
; return (bind { fun_id = L loc new_var, fun_matches = new_ms
, fun_co_fn = new_co_fn }) }
zonk_bind env sig_warn (AbsBinds { abs_tvs = tyvars, abs_ev_vars = evs
, abs_ev_binds = ev_binds
, abs_exports = exports
, abs_binds = val_binds })
= ASSERT( all isImmutableTyVar tyvars )
do { (env0, new_tyvars) <- zonkTyBndrsX env tyvars
; (env1, new_evs) <- zonkEvBndrsX env0 evs
; (env2, new_ev_binds) <- zonkTcEvBinds_s env1 ev_binds
; (new_val_bind, new_exports) <- fixM $ \ ~(new_val_binds, _) ->
do { let env3 = extendIdZonkEnv env2 (collectHsBindsBinders new_val_binds)
; new_val_binds <- zonkMonoBinds env3 noSigWarn val_binds
; new_exports <- mapM (zonkExport env3) exports
; return (new_val_binds, new_exports) }
; sig_warn True (map abe_poly new_exports)
; return (AbsBinds { abs_tvs = new_tyvars, abs_ev_vars = new_evs
, abs_ev_binds = new_ev_binds
, abs_exports = new_exports, abs_binds = new_val_bind }) }
where
zonkExport env (ABE{ abe_wrap = wrap, abe_poly = poly_id
, abe_mono = mono_id, abe_prags = prags })
= do new_poly_id <- zonkIdBndr env poly_id
(_, new_wrap) <- zonkCoFn env wrap
new_prags <- zonkSpecPrags env prags
return (ABE{ abe_wrap = new_wrap, abe_poly = new_poly_id
, abe_mono = zonkIdOcc env mono_id
, abe_prags = new_prags })
zonk_bind env _sig_warn (PatSynBind bind@(PSB { psb_id = L loc id
, psb_args = details
, psb_def = lpat
, psb_dir = dir }))
= do { id' <- zonkIdBndr env id
; details' <- zonkPatSynDetails env details
;(env1, lpat') <- zonkPat env lpat
; (_env2, dir') <- zonkPatSynDir env1 dir
; return $ PatSynBind $
bind { psb_id = L loc id'
, psb_args = details'
, psb_def = lpat'
, psb_dir = dir' } }
zonkPatSynDetails :: ZonkEnv
-> HsPatSynDetails (Located TcId)
-> TcM (HsPatSynDetails (Located Id))
zonkPatSynDetails env = traverse (wrapLocM $ zonkIdBndr env)
zonkPatSynDir :: ZonkEnv -> HsPatSynDir TcId -> TcM (ZonkEnv, HsPatSynDir Id)
zonkPatSynDir env Unidirectional = return (env, Unidirectional)
zonkPatSynDir env ImplicitBidirectional = return (env, ImplicitBidirectional)
zonkPatSynDir env (ExplicitBidirectional mg) = do
mg' <- zonkMatchGroup env zonkLExpr mg
return (env, ExplicitBidirectional mg')
zonkSpecPrags :: ZonkEnv -> TcSpecPrags -> TcM TcSpecPrags
zonkSpecPrags _ IsDefaultMethod = return IsDefaultMethod
zonkSpecPrags env (SpecPrags ps) = do { ps' <- zonkLTcSpecPrags env ps
; return (SpecPrags ps') }
zonkLTcSpecPrags :: ZonkEnv -> [LTcSpecPrag] -> TcM [LTcSpecPrag]
zonkLTcSpecPrags env ps
= mapM zonk_prag ps
where
zonk_prag (L loc (SpecPrag id co_fn inl))
= do { (_, co_fn') <- zonkCoFn env co_fn
; return (L loc (SpecPrag (zonkIdOcc env id) co_fn' inl)) }
{-
************************************************************************
* *
\subsection[BackSubst-Match-GRHSs]{Match and GRHSs}
* *
************************************************************************
-}
zonkMatchGroup :: ZonkEnv
-> (ZonkEnv -> Located (body TcId) -> TcM (Located (body Id)))
-> MatchGroup TcId (Located (body TcId)) -> TcM (MatchGroup Id (Located (body Id)))
zonkMatchGroup env zBody (MG { mg_alts = ms, mg_arg_tys = arg_tys, mg_res_ty = res_ty, mg_origin = origin })
= do { ms' <- mapM (zonkMatch env zBody) ms
; arg_tys' <- zonkTcTypeToTypes env arg_tys
; res_ty' <- zonkTcTypeToType env res_ty
; return (MG { mg_alts = ms', mg_arg_tys = arg_tys', mg_res_ty = res_ty', mg_origin = origin }) }
zonkMatch :: ZonkEnv
-> (ZonkEnv -> Located (body TcId) -> TcM (Located (body Id)))
-> LMatch TcId (Located (body TcId)) -> TcM (LMatch Id (Located (body Id)))
zonkMatch env zBody (L loc (Match mf pats _ grhss))
= do { (env1, new_pats) <- zonkPats env pats
; new_grhss <- zonkGRHSs env1 zBody grhss
; return (L loc (Match mf new_pats Nothing new_grhss)) }
-------------------------------------------------------------------------
zonkGRHSs :: ZonkEnv
-> (ZonkEnv -> Located (body TcId) -> TcM (Located (body Id)))
-> GRHSs TcId (Located (body TcId)) -> TcM (GRHSs Id (Located (body Id)))
zonkGRHSs env zBody (GRHSs grhss binds) = do
(new_env, new_binds) <- zonkLocalBinds env binds
let
zonk_grhs (GRHS guarded rhs)
= do (env2, new_guarded) <- zonkStmts new_env zonkLExpr guarded
new_rhs <- zBody env2 rhs
return (GRHS new_guarded new_rhs)
new_grhss <- mapM (wrapLocM zonk_grhs) grhss
return (GRHSs new_grhss new_binds)
{-
************************************************************************
* *
\subsection[BackSubst-HsExpr]{Running a zonkitution over a TypeCheckedExpr}
* *
************************************************************************
-}
zonkLExprs :: ZonkEnv -> [LHsExpr TcId] -> TcM [LHsExpr Id]
zonkLExpr :: ZonkEnv -> LHsExpr TcId -> TcM (LHsExpr Id)
zonkExpr :: ZonkEnv -> HsExpr TcId -> TcM (HsExpr Id)
zonkLExprs env exprs = mapM (zonkLExpr env) exprs
zonkLExpr env expr = wrapLocM (zonkExpr env) expr
zonkExpr env (HsVar id)
= return (HsVar (zonkIdOcc env id))
zonkExpr _ (HsIPVar id)
= return (HsIPVar id)
zonkExpr env (HsLit (HsRat f ty))
= do new_ty <- zonkTcTypeToType env ty
return (HsLit (HsRat f new_ty))
zonkExpr _ (HsLit lit)
= return (HsLit lit)
zonkExpr env (HsOverLit lit)
= do { lit' <- zonkOverLit env lit
; return (HsOverLit lit') }
zonkExpr env (HsLam matches)
= do new_matches <- zonkMatchGroup env zonkLExpr matches
return (HsLam new_matches)
zonkExpr env (HsLamCase arg matches)
= do new_arg <- zonkTcTypeToType env arg
new_matches <- zonkMatchGroup env zonkLExpr matches
return (HsLamCase new_arg new_matches)
zonkExpr env (HsApp e1 e2)
= do new_e1 <- zonkLExpr env e1
new_e2 <- zonkLExpr env e2
return (HsApp new_e1 new_e2)
zonkExpr _ e@(HsRnBracketOut _ _)
= pprPanic "zonkExpr: HsRnBracketOut" (ppr e)
zonkExpr env (HsTcBracketOut body bs)
= do bs' <- mapM zonk_b bs
return (HsTcBracketOut body bs')
where
zonk_b (PendingTcSplice n e) = do e' <- zonkLExpr env e
return (PendingTcSplice n e')
zonkExpr _ (HsSpliceE s) = WARN( True, ppr s ) -- Should not happen
return (HsSpliceE s)
zonkExpr env (OpApp e1 op fixity e2)
= do new_e1 <- zonkLExpr env e1
new_op <- zonkLExpr env op
new_e2 <- zonkLExpr env e2
return (OpApp new_e1 new_op fixity new_e2)
zonkExpr env (NegApp expr op)
= do new_expr <- zonkLExpr env expr
new_op <- zonkExpr env op
return (NegApp new_expr new_op)
zonkExpr env (HsPar e)
= do new_e <- zonkLExpr env e
return (HsPar new_e)
zonkExpr env (SectionL expr op)
= do new_expr <- zonkLExpr env expr
new_op <- zonkLExpr env op
return (SectionL new_expr new_op)
zonkExpr env (SectionR op expr)
= do new_op <- zonkLExpr env op
new_expr <- zonkLExpr env expr
return (SectionR new_op new_expr)
zonkExpr env (ExplicitTuple tup_args boxed)
= do { new_tup_args <- mapM zonk_tup_arg tup_args
; return (ExplicitTuple new_tup_args boxed) }
where
zonk_tup_arg (L l (Present e)) = do { e' <- zonkLExpr env e
; return (L l (Present e')) }
zonk_tup_arg (L l (Missing t)) = do { t' <- zonkTcTypeToType env t
; return (L l (Missing t')) }
zonkExpr env (HsCase expr ms)
= do new_expr <- zonkLExpr env expr
new_ms <- zonkMatchGroup env zonkLExpr ms
return (HsCase new_expr new_ms)
zonkExpr env (HsIf e0 e1 e2 e3)
= do { new_e0 <- fmapMaybeM (zonkExpr env) e0
; new_e1 <- zonkLExpr env e1
; new_e2 <- zonkLExpr env e2
; new_e3 <- zonkLExpr env e3
; return (HsIf new_e0 new_e1 new_e2 new_e3) }
zonkExpr env (HsMultiIf ty alts)
= do { alts' <- mapM (wrapLocM zonk_alt) alts
; ty' <- zonkTcTypeToType env ty
; return $ HsMultiIf ty' alts' }
where zonk_alt (GRHS guard expr)
= do { (env', guard') <- zonkStmts env zonkLExpr guard
; expr' <- zonkLExpr env' expr
; return $ GRHS guard' expr' }
zonkExpr env (HsLet binds expr)
= do (new_env, new_binds) <- zonkLocalBinds env binds
new_expr <- zonkLExpr new_env expr
return (HsLet new_binds new_expr)
zonkExpr env (HsDo do_or_lc stmts ty)
= do (_, new_stmts) <- zonkStmts env zonkLExpr stmts
new_ty <- zonkTcTypeToType env ty
return (HsDo do_or_lc new_stmts new_ty)
zonkExpr env (ExplicitList ty wit exprs)
= do new_ty <- zonkTcTypeToType env ty
new_wit <- zonkWit env wit
new_exprs <- zonkLExprs env exprs
return (ExplicitList new_ty new_wit new_exprs)
where zonkWit _ Nothing = return Nothing
zonkWit env (Just fln) = do new_fln <- zonkExpr env fln
return (Just new_fln)
zonkExpr env (ExplicitPArr ty exprs)
= do new_ty <- zonkTcTypeToType env ty
new_exprs <- zonkLExprs env exprs
return (ExplicitPArr new_ty new_exprs)
zonkExpr env (RecordCon data_con con_expr rbinds)
= do { new_con_expr <- zonkExpr env con_expr
; new_rbinds <- zonkRecFields env rbinds
; return (RecordCon data_con new_con_expr new_rbinds) }
zonkExpr env (RecordUpd expr rbinds cons in_tys out_tys)
= do { new_expr <- zonkLExpr env expr
; new_in_tys <- mapM (zonkTcTypeToType env) in_tys
; new_out_tys <- mapM (zonkTcTypeToType env) out_tys
; new_rbinds <- zonkRecFields env rbinds
; return (RecordUpd new_expr new_rbinds cons new_in_tys new_out_tys) }
zonkExpr env (ExprWithTySigOut e ty)
= do { e' <- zonkLExpr env e
; return (ExprWithTySigOut e' ty) }
zonkExpr _ (ExprWithTySig _ _ _) = panic "zonkExpr env:ExprWithTySig"
zonkExpr env (ArithSeq expr wit info)
= do new_expr <- zonkExpr env expr
new_wit <- zonkWit env wit
new_info <- zonkArithSeq env info
return (ArithSeq new_expr new_wit new_info)
where zonkWit _ Nothing = return Nothing
zonkWit env (Just fln) = do new_fln <- zonkExpr env fln
return (Just new_fln)
zonkExpr env (PArrSeq expr info)
= do new_expr <- zonkExpr env expr
new_info <- zonkArithSeq env info
return (PArrSeq new_expr new_info)
zonkExpr env (HsSCC src lbl expr)
= do new_expr <- zonkLExpr env expr
return (HsSCC src lbl new_expr)
zonkExpr env (HsTickPragma src info expr)
= do new_expr <- zonkLExpr env expr
return (HsTickPragma src info new_expr)
-- hdaume: core annotations
zonkExpr env (HsCoreAnn src lbl expr)
= do new_expr <- zonkLExpr env expr
return (HsCoreAnn src lbl new_expr)
-- arrow notation extensions
zonkExpr env (HsProc pat body)
= do { (env1, new_pat) <- zonkPat env pat
; new_body <- zonkCmdTop env1 body
; return (HsProc new_pat new_body) }
-- StaticPointers extension
zonkExpr env (HsStatic expr)
= HsStatic <$> zonkLExpr env expr
zonkExpr env (HsWrap co_fn expr)
= do (env1, new_co_fn) <- zonkCoFn env co_fn
new_expr <- zonkExpr env1 expr
return (HsWrap new_co_fn new_expr)
zonkExpr _ (HsUnboundVar v)
= return (HsUnboundVar v)
zonkExpr _ expr = pprPanic "zonkExpr" (ppr expr)
-------------------------------------------------------------------------
zonkLCmd :: ZonkEnv -> LHsCmd TcId -> TcM (LHsCmd Id)
zonkCmd :: ZonkEnv -> HsCmd TcId -> TcM (HsCmd Id)
zonkLCmd env cmd = wrapLocM (zonkCmd env) cmd
zonkCmd env (HsCmdCast co cmd)
= do { co' <- zonkTcCoToCo env co
; cmd' <- zonkCmd env cmd
; return (HsCmdCast co' cmd') }
zonkCmd env (HsCmdArrApp e1 e2 ty ho rl)
= do new_e1 <- zonkLExpr env e1
new_e2 <- zonkLExpr env e2
new_ty <- zonkTcTypeToType env ty
return (HsCmdArrApp new_e1 new_e2 new_ty ho rl)
zonkCmd env (HsCmdArrForm op fixity args)
= do new_op <- zonkLExpr env op
new_args <- mapM (zonkCmdTop env) args
return (HsCmdArrForm new_op fixity new_args)
zonkCmd env (HsCmdApp c e)
= do new_c <- zonkLCmd env c
new_e <- zonkLExpr env e
return (HsCmdApp new_c new_e)
zonkCmd env (HsCmdLam matches)
= do new_matches <- zonkMatchGroup env zonkLCmd matches
return (HsCmdLam new_matches)
zonkCmd env (HsCmdPar c)
= do new_c <- zonkLCmd env c
return (HsCmdPar new_c)
zonkCmd env (HsCmdCase expr ms)
= do new_expr <- zonkLExpr env expr
new_ms <- zonkMatchGroup env zonkLCmd ms
return (HsCmdCase new_expr new_ms)
zonkCmd env (HsCmdIf eCond ePred cThen cElse)
= do { new_eCond <- fmapMaybeM (zonkExpr env) eCond
; new_ePred <- zonkLExpr env ePred
; new_cThen <- zonkLCmd env cThen
; new_cElse <- zonkLCmd env cElse
; return (HsCmdIf new_eCond new_ePred new_cThen new_cElse) }
zonkCmd env (HsCmdLet binds cmd)
= do (new_env, new_binds) <- zonkLocalBinds env binds
new_cmd <- zonkLCmd new_env cmd
return (HsCmdLet new_binds new_cmd)
zonkCmd env (HsCmdDo stmts ty)
= do (_, new_stmts) <- zonkStmts env zonkLCmd stmts
new_ty <- zonkTcTypeToType env ty
return (HsCmdDo new_stmts new_ty)
zonkCmdTop :: ZonkEnv -> LHsCmdTop TcId -> TcM (LHsCmdTop Id)
zonkCmdTop env cmd = wrapLocM (zonk_cmd_top env) cmd
zonk_cmd_top :: ZonkEnv -> HsCmdTop TcId -> TcM (HsCmdTop Id)
zonk_cmd_top env (HsCmdTop cmd stack_tys ty ids)
= do new_cmd <- zonkLCmd env cmd
new_stack_tys <- zonkTcTypeToType env stack_tys
new_ty <- zonkTcTypeToType env ty
new_ids <- mapSndM (zonkExpr env) ids
return (HsCmdTop new_cmd new_stack_tys new_ty new_ids)
-------------------------------------------------------------------------
zonkCoFn :: ZonkEnv -> HsWrapper -> TcM (ZonkEnv, HsWrapper)
zonkCoFn env WpHole = return (env, WpHole)
zonkCoFn env (WpCompose c1 c2) = do { (env1, c1') <- zonkCoFn env c1
; (env2, c2') <- zonkCoFn env1 c2
; return (env2, WpCompose c1' c2') }
zonkCoFn env (WpFun c1 c2 t1 t2) = do { (env1, c1') <- zonkCoFn env c1
; (env2, c2') <- zonkCoFn env1 c2
; t1' <- zonkTcTypeToType env2 t1
; t2' <- zonkTcTypeToType env2 t2
; return (env2, WpFun c1' c2' t1' t2') }
zonkCoFn env (WpCast co) = do { co' <- zonkTcCoToCo env co
; return (env, WpCast co') }
zonkCoFn env (WpEvLam ev) = do { (env', ev') <- zonkEvBndrX env ev
; return (env', WpEvLam ev') }
zonkCoFn env (WpEvApp arg) = do { arg' <- zonkEvTerm env arg
; return (env, WpEvApp arg') }
zonkCoFn env (WpTyLam tv) = ASSERT( isImmutableTyVar tv )
do { (env', tv') <- zonkTyBndrX env tv
; return (env', WpTyLam tv') }
zonkCoFn env (WpTyApp ty) = do { ty' <- zonkTcTypeToType env ty
; return (env, WpTyApp ty') }
zonkCoFn env (WpLet bs) = do { (env1, bs') <- zonkTcEvBinds env bs
; return (env1, WpLet bs') }
-------------------------------------------------------------------------
zonkOverLit :: ZonkEnv -> HsOverLit TcId -> TcM (HsOverLit Id)
zonkOverLit env lit@(OverLit { ol_witness = e, ol_type = ty })
= do { ty' <- zonkTcTypeToType env ty
; e' <- zonkExpr env e
; return (lit { ol_witness = e', ol_type = ty' }) }
-------------------------------------------------------------------------
zonkArithSeq :: ZonkEnv -> ArithSeqInfo TcId -> TcM (ArithSeqInfo Id)
zonkArithSeq env (From e)
= do new_e <- zonkLExpr env e
return (From new_e)
zonkArithSeq env (FromThen e1 e2)
= do new_e1 <- zonkLExpr env e1
new_e2 <- zonkLExpr env e2
return (FromThen new_e1 new_e2)
zonkArithSeq env (FromTo e1 e2)
= do new_e1 <- zonkLExpr env e1
new_e2 <- zonkLExpr env e2
return (FromTo new_e1 new_e2)
zonkArithSeq env (FromThenTo e1 e2 e3)
= do new_e1 <- zonkLExpr env e1
new_e2 <- zonkLExpr env e2
new_e3 <- zonkLExpr env e3
return (FromThenTo new_e1 new_e2 new_e3)
-------------------------------------------------------------------------
zonkStmts :: ZonkEnv
-> (ZonkEnv -> Located (body TcId) -> TcM (Located (body Id)))
-> [LStmt TcId (Located (body TcId))] -> TcM (ZonkEnv, [LStmt Id (Located (body Id))])
zonkStmts env _ [] = return (env, [])
zonkStmts env zBody (s:ss) = do { (env1, s') <- wrapLocSndM (zonkStmt env zBody) s
; (env2, ss') <- zonkStmts env1 zBody ss
; return (env2, s' : ss') }
zonkStmt :: ZonkEnv
-> (ZonkEnv -> Located (body TcId) -> TcM (Located (body Id)))
-> Stmt TcId (Located (body TcId)) -> TcM (ZonkEnv, Stmt Id (Located (body Id)))
zonkStmt env _ (ParStmt stmts_w_bndrs mzip_op bind_op)
= do { new_stmts_w_bndrs <- mapM zonk_branch stmts_w_bndrs
; let new_binders = [b | ParStmtBlock _ bs _ <- new_stmts_w_bndrs, b <- bs]
env1 = extendIdZonkEnv env new_binders
; new_mzip <- zonkExpr env1 mzip_op
; new_bind <- zonkExpr env1 bind_op
; return (env1, ParStmt new_stmts_w_bndrs new_mzip new_bind) }
where
zonk_branch (ParStmtBlock stmts bndrs return_op)
= do { (env1, new_stmts) <- zonkStmts env zonkLExpr stmts
; new_return <- zonkExpr env1 return_op
; return (ParStmtBlock new_stmts (zonkIdOccs env1 bndrs) new_return) }
zonkStmt env zBody (RecStmt { recS_stmts = segStmts, recS_later_ids = lvs, recS_rec_ids = rvs
, recS_ret_fn = ret_id, recS_mfix_fn = mfix_id, recS_bind_fn = bind_id
, recS_later_rets = later_rets, recS_rec_rets = rec_rets
, recS_ret_ty = ret_ty })
= do { new_rvs <- zonkIdBndrs env rvs
; new_lvs <- zonkIdBndrs env lvs
; new_ret_ty <- zonkTcTypeToType env ret_ty
; new_ret_id <- zonkExpr env ret_id
; new_mfix_id <- zonkExpr env mfix_id
; new_bind_id <- zonkExpr env bind_id
; let env1 = extendIdZonkEnv env new_rvs
; (env2, new_segStmts) <- zonkStmts env1 zBody segStmts
-- Zonk the ret-expressions in an envt that
-- has the polymorphic bindings in the envt
; new_later_rets <- mapM (zonkExpr env2) later_rets
; new_rec_rets <- mapM (zonkExpr env2) rec_rets
; return (extendIdZonkEnv env new_lvs, -- Only the lvs are needed
RecStmt { recS_stmts = new_segStmts, recS_later_ids = new_lvs
, recS_rec_ids = new_rvs, recS_ret_fn = new_ret_id
, recS_mfix_fn = new_mfix_id, recS_bind_fn = new_bind_id
, recS_later_rets = new_later_rets
, recS_rec_rets = new_rec_rets, recS_ret_ty = new_ret_ty }) }
zonkStmt env zBody (BodyStmt body then_op guard_op ty)
= do new_body <- zBody env body
new_then <- zonkExpr env then_op
new_guard <- zonkExpr env guard_op
new_ty <- zonkTcTypeToType env ty
return (env, BodyStmt new_body new_then new_guard new_ty)
zonkStmt env zBody (LastStmt body ret_op)
= do new_body <- zBody env body
new_ret <- zonkExpr env ret_op
return (env, LastStmt new_body new_ret)
zonkStmt env _ (TransStmt { trS_stmts = stmts, trS_bndrs = binderMap
, trS_by = by, trS_form = form, trS_using = using
, trS_ret = return_op, trS_bind = bind_op, trS_fmap = liftM_op })
= do { (env', stmts') <- zonkStmts env zonkLExpr stmts
; binderMap' <- mapM (zonkBinderMapEntry env') binderMap
; by' <- fmapMaybeM (zonkLExpr env') by
; using' <- zonkLExpr env using
; return_op' <- zonkExpr env' return_op
; bind_op' <- zonkExpr env' bind_op
; liftM_op' <- zonkExpr env' liftM_op
; let env'' = extendIdZonkEnv env' (map snd binderMap')
; return (env'', TransStmt { trS_stmts = stmts', trS_bndrs = binderMap'
, trS_by = by', trS_form = form, trS_using = using'
, trS_ret = return_op', trS_bind = bind_op', trS_fmap = liftM_op' }) }
where
zonkBinderMapEntry env (oldBinder, newBinder) = do
let oldBinder' = zonkIdOcc env oldBinder
newBinder' <- zonkIdBndr env newBinder
return (oldBinder', newBinder')
zonkStmt env _ (LetStmt binds)
= do (env1, new_binds) <- zonkLocalBinds env binds
return (env1, LetStmt new_binds)
zonkStmt env zBody (BindStmt pat body bind_op fail_op)
= do { new_body <- zBody env body
; (env1, new_pat) <- zonkPat env pat
; new_bind <- zonkExpr env bind_op
; new_fail <- zonkExpr env fail_op
; return (env1, BindStmt new_pat new_body new_bind new_fail) }
-------------------------------------------------------------------------
zonkRecFields :: ZonkEnv -> HsRecordBinds TcId -> TcM (HsRecordBinds TcId)
zonkRecFields env (HsRecFields flds dd)
= do { flds' <- mapM zonk_rbind flds
; return (HsRecFields flds' dd) }
where
zonk_rbind (L l fld)
= do { new_id <- wrapLocM (zonkIdBndr env) (hsRecFieldId fld)
; new_expr <- zonkLExpr env (hsRecFieldArg fld)
; return (L l (fld { hsRecFieldId = new_id
, hsRecFieldArg = new_expr })) }
-------------------------------------------------------------------------
mapIPNameTc :: (a -> TcM b) -> Either (Located HsIPName) a
-> TcM (Either (Located HsIPName) b)
mapIPNameTc _ (Left x) = return (Left x)
mapIPNameTc f (Right x) = do r <- f x
return (Right r)
{-
************************************************************************
* *
\subsection[BackSubst-Pats]{Patterns}
* *
************************************************************************
-}
zonkPat :: ZonkEnv -> OutPat TcId -> TcM (ZonkEnv, OutPat Id)
-- Extend the environment as we go, because it's possible for one
-- pattern to bind something that is used in another (inside or
-- to the right)
zonkPat env pat = wrapLocSndM (zonk_pat env) pat
zonk_pat :: ZonkEnv -> Pat TcId -> TcM (ZonkEnv, Pat Id)
zonk_pat env (ParPat p)
= do { (env', p') <- zonkPat env p
; return (env', ParPat p') }
zonk_pat env (WildPat ty)
= do { ty' <- zonkTcTypeToType env ty
; return (env, WildPat ty') }
zonk_pat env (VarPat v)
= do { v' <- zonkIdBndr env v
; return (extendIdZonkEnv1 env v', VarPat v') }
zonk_pat env (LazyPat pat)
= do { (env', pat') <- zonkPat env pat
; return (env', LazyPat pat') }
zonk_pat env (BangPat pat)
= do { (env', pat') <- zonkPat env pat
; return (env', BangPat pat') }
zonk_pat env (AsPat (L loc v) pat)
= do { v' <- zonkIdBndr env v
; (env', pat') <- zonkPat (extendIdZonkEnv1 env v') pat
; return (env', AsPat (L loc v') pat') }
zonk_pat env (ViewPat expr pat ty)
= do { expr' <- zonkLExpr env expr
; (env', pat') <- zonkPat env pat
; ty' <- zonkTcTypeToType env ty
; return (env', ViewPat expr' pat' ty') }
zonk_pat env (ListPat pats ty Nothing)
= do { ty' <- zonkTcTypeToType env ty
; (env', pats') <- zonkPats env pats
; return (env', ListPat pats' ty' Nothing) }
zonk_pat env (ListPat pats ty (Just (ty2,wit)))
= do { wit' <- zonkExpr env wit
; ty2' <- zonkTcTypeToType env ty2
; ty' <- zonkTcTypeToType env ty
; (env', pats') <- zonkPats env pats
; return (env', ListPat pats' ty' (Just (ty2',wit'))) }
zonk_pat env (PArrPat pats ty)
= do { ty' <- zonkTcTypeToType env ty
; (env', pats') <- zonkPats env pats
; return (env', PArrPat pats' ty') }
zonk_pat env (TuplePat pats boxed tys)
= do { tys' <- mapM (zonkTcTypeToType env) tys
; (env', pats') <- zonkPats env pats
; return (env', TuplePat pats' boxed tys') }
zonk_pat env p@(ConPatOut { pat_arg_tys = tys, pat_tvs = tyvars
, pat_dicts = evs, pat_binds = binds
, pat_args = args, pat_wrap = wrapper })
= ASSERT( all isImmutableTyVar tyvars )
do { new_tys <- mapM (zonkTcTypeToType env) tys
; (env0, new_tyvars) <- zonkTyBndrsX env tyvars
-- Must zonk the existential variables, because their
-- /kind/ need potential zonking.
-- cf typecheck/should_compile/tc221.hs
; (env1, new_evs) <- zonkEvBndrsX env0 evs
; (env2, new_binds) <- zonkTcEvBinds env1 binds
; (env3, new_wrapper) <- zonkCoFn env2 wrapper
; (env', new_args) <- zonkConStuff env3 args
; return (env', p { pat_arg_tys = new_tys,
pat_tvs = new_tyvars,
pat_dicts = new_evs,
pat_binds = new_binds,
pat_args = new_args,
pat_wrap = new_wrapper}) }
zonk_pat env (LitPat lit) = return (env, LitPat lit)
zonk_pat env (SigPatOut pat ty)
= do { ty' <- zonkTcTypeToType env ty
; (env', pat') <- zonkPat env pat
; return (env', SigPatOut pat' ty') }
zonk_pat env (NPat (L l lit) mb_neg eq_expr)
= do { lit' <- zonkOverLit env lit
; mb_neg' <- fmapMaybeM (zonkExpr env) mb_neg
; eq_expr' <- zonkExpr env eq_expr
; return (env, NPat (L l lit') mb_neg' eq_expr') }
zonk_pat env (NPlusKPat (L loc n) (L l lit) e1 e2)
= do { n' <- zonkIdBndr env n
; lit' <- zonkOverLit env lit
; e1' <- zonkExpr env e1
; e2' <- zonkExpr env e2
; return (extendIdZonkEnv1 env n',
NPlusKPat (L loc n') (L l lit') e1' e2') }
zonk_pat env (CoPat co_fn pat ty)
= do { (env', co_fn') <- zonkCoFn env co_fn
; (env'', pat') <- zonkPat env' (noLoc pat)
; ty' <- zonkTcTypeToType env'' ty
; return (env'', CoPat co_fn' (unLoc pat') ty') }
zonk_pat _ pat = pprPanic "zonk_pat" (ppr pat)
---------------------------
zonkConStuff :: ZonkEnv
-> HsConDetails (OutPat TcId) (HsRecFields id (OutPat TcId))
-> TcM (ZonkEnv,
HsConDetails (OutPat Id) (HsRecFields id (OutPat Id)))
zonkConStuff env (PrefixCon pats)
= do { (env', pats') <- zonkPats env pats
; return (env', PrefixCon pats') }
zonkConStuff env (InfixCon p1 p2)
= do { (env1, p1') <- zonkPat env p1
; (env', p2') <- zonkPat env1 p2
; return (env', InfixCon p1' p2') }
zonkConStuff env (RecCon (HsRecFields rpats dd))
= do { (env', pats') <- zonkPats env (map (hsRecFieldArg . unLoc) rpats)
; let rpats' = zipWith (\(L l rp) p' -> L l (rp { hsRecFieldArg = p' }))
rpats pats'
; return (env', RecCon (HsRecFields rpats' dd)) }
-- Field selectors have declared types; hence no zonking
---------------------------
zonkPats :: ZonkEnv -> [OutPat TcId] -> TcM (ZonkEnv, [OutPat Id])
zonkPats env [] = return (env, [])
zonkPats env (pat:pats) = do { (env1, pat') <- zonkPat env pat
; (env', pats') <- zonkPats env1 pats
; return (env', pat':pats') }
{-
************************************************************************
* *
\subsection[BackSubst-Foreign]{Foreign exports}
* *
************************************************************************
-}
zonkForeignExports :: ZonkEnv -> [LForeignDecl TcId] -> TcM [LForeignDecl Id]
zonkForeignExports env ls = mapM (wrapLocM (zonkForeignExport env)) ls
zonkForeignExport :: ZonkEnv -> ForeignDecl TcId -> TcM (ForeignDecl Id)
zonkForeignExport env (ForeignExport i _hs_ty co spec) =
return (ForeignExport (fmap (zonkIdOcc env) i) undefined co spec)
zonkForeignExport _ for_imp
= return for_imp -- Foreign imports don't need zonking
zonkRules :: ZonkEnv -> [LRuleDecl TcId] -> TcM [LRuleDecl Id]
zonkRules env rs = mapM (wrapLocM (zonkRule env)) rs
zonkRule :: ZonkEnv -> RuleDecl TcId -> TcM (RuleDecl Id)
zonkRule env (HsRule name act (vars{-::[RuleBndr TcId]-}) lhs fv_lhs rhs fv_rhs)
= do { unbound_tkv_set <- newMutVar emptyVarSet
; let env_rule = setZonkType env (zonkTvCollecting unbound_tkv_set)
-- See Note [Zonking the LHS of a RULE]
; (env_inside, new_bndrs) <- mapAccumLM zonk_bndr env_rule vars
; new_lhs <- zonkLExpr env_inside lhs
; new_rhs <- zonkLExpr env_inside rhs
; unbound_tkvs <- readMutVar unbound_tkv_set
; let final_bndrs :: [LRuleBndr Var]
final_bndrs = map (noLoc . RuleBndr . noLoc)
(varSetElemsKvsFirst unbound_tkvs)
++ new_bndrs
; return $
HsRule name act final_bndrs new_lhs fv_lhs new_rhs fv_rhs }
where
zonk_bndr env (L l (RuleBndr (L loc v)))
= do { (env', v') <- zonk_it env v
; return (env', L l (RuleBndr (L loc v'))) }
zonk_bndr _ (L _ (RuleBndrSig {})) = panic "zonk_bndr RuleBndrSig"
zonk_it env v
| isId v = do { v' <- zonkIdBndr env v
; return (extendIdZonkEnv1 env v', v') }
| otherwise = ASSERT( isImmutableTyVar v)
zonkTyBndrX env v
-- DV: used to be return (env,v) but that is plain
-- wrong because we may need to go inside the kind
-- of v and zonk there!
zonkVects :: ZonkEnv -> [LVectDecl TcId] -> TcM [LVectDecl Id]
zonkVects env = mapM (wrapLocM (zonkVect env))
zonkVect :: ZonkEnv -> VectDecl TcId -> TcM (VectDecl Id)
zonkVect env (HsVect s v e)
= do { v' <- wrapLocM (zonkIdBndr env) v
; e' <- zonkLExpr env e
; return $ HsVect s v' e'
}
zonkVect env (HsNoVect s v)
= do { v' <- wrapLocM (zonkIdBndr env) v
; return $ HsNoVect s v'
}
zonkVect _env (HsVectTypeOut s t rt)
= return $ HsVectTypeOut s t rt
zonkVect _ (HsVectTypeIn _ _ _ _) = panic "TcHsSyn.zonkVect: HsVectTypeIn"
zonkVect _env (HsVectClassOut c)
= return $ HsVectClassOut c
zonkVect _ (HsVectClassIn _ _) = panic "TcHsSyn.zonkVect: HsVectClassIn"
zonkVect _env (HsVectInstOut i)
= return $ HsVectInstOut i
zonkVect _ (HsVectInstIn _) = panic "TcHsSyn.zonkVect: HsVectInstIn"
{-
************************************************************************
* *
Constraints and evidence
* *
************************************************************************
-}
zonkEvTerm :: ZonkEnv -> EvTerm -> TcM EvTerm
zonkEvTerm env (EvId v) = ASSERT2( isId v, ppr v )
return (EvId (zonkIdOcc env v))
zonkEvTerm env (EvCoercion co) = do { co' <- zonkTcCoToCo env co
; return (EvCoercion co') }
zonkEvTerm env (EvCast tm co) = do { tm' <- zonkEvTerm env tm
; co' <- zonkTcCoToCo env co
; return (mkEvCast tm' co') }
zonkEvTerm env (EvTupleSel tm n) = do { tm' <- zonkEvTerm env tm
; return (EvTupleSel tm' n) }
zonkEvTerm env (EvTupleMk tms) = do { tms' <- mapM (zonkEvTerm env) tms
; return (EvTupleMk tms') }
zonkEvTerm _ (EvLit l) = return (EvLit l)
zonkEvTerm env (EvTypeable ev) =
fmap EvTypeable $
case ev of
EvTypeableTyCon tc ks -> return (EvTypeableTyCon tc ks)
EvTypeableTyApp t1 t2 -> do e1 <- zonk t1
e2 <- zonk t2
return (EvTypeableTyApp e1 e2)
EvTypeableTyLit t -> EvTypeableTyLit `fmap` zonkTcTypeToType env t
where
zonk (ev,t) = do ev' <- zonkEvTerm env ev
t' <- zonkTcTypeToType env t
return (ev',t')
zonkEvTerm env (EvCallStack cs)
= case cs of
EvCsEmpty -> return (EvCallStack cs)
EvCsTop n l tm -> do { tm' <- zonkEvTerm env tm
; return (EvCallStack (EvCsTop n l tm')) }
EvCsPushCall n l tm -> do { tm' <- zonkEvTerm env tm
; return (EvCallStack (EvCsPushCall n l tm')) }
zonkEvTerm env (EvSuperClass d n) = do { d' <- zonkEvTerm env d
; return (EvSuperClass d' n) }
zonkEvTerm env (EvDFunApp df tys tms)
= do { tys' <- zonkTcTypeToTypes env tys
; tms' <- mapM (zonkEvTerm env) tms
; return (EvDFunApp (zonkIdOcc env df) tys' tms') }
zonkEvTerm env (EvDelayedError ty msg)
= do { ty' <- zonkTcTypeToType env ty
; return (EvDelayedError ty' msg) }
zonkTcEvBinds_s :: ZonkEnv -> [TcEvBinds] -> TcM (ZonkEnv, [TcEvBinds])
zonkTcEvBinds_s env bs = do { (env, bs') <- mapAccumLM zonk_tc_ev_binds env bs
; return (env, [EvBinds (unionManyBags bs')]) }
zonkTcEvBinds :: ZonkEnv -> TcEvBinds -> TcM (ZonkEnv, TcEvBinds)
zonkTcEvBinds env bs = do { (env', bs') <- zonk_tc_ev_binds env bs
; return (env', EvBinds bs') }
zonk_tc_ev_binds :: ZonkEnv -> TcEvBinds -> TcM (ZonkEnv, Bag EvBind)
zonk_tc_ev_binds env (TcEvBinds var) = zonkEvBindsVar env var
zonk_tc_ev_binds env (EvBinds bs) = zonkEvBinds env bs
zonkEvBindsVar :: ZonkEnv -> EvBindsVar -> TcM (ZonkEnv, Bag EvBind)
zonkEvBindsVar env (EvBindsVar ref _) = do { bs <- readMutVar ref
; zonkEvBinds env (evBindMapBinds bs) }
zonkEvBinds :: ZonkEnv -> Bag EvBind -> TcM (ZonkEnv, Bag EvBind)
zonkEvBinds env binds
= {-# SCC "zonkEvBinds" #-}
fixM (\ ~( _, new_binds) -> do
{ let env1 = extendIdZonkEnv env (collect_ev_bndrs new_binds)
; binds' <- mapBagM (zonkEvBind env1) binds
; return (env1, binds') })
where
collect_ev_bndrs :: Bag EvBind -> [EvVar]
collect_ev_bndrs = foldrBag add []
add (EvBind { eb_lhs = var }) vars = var : vars
zonkEvBind :: ZonkEnv -> EvBind -> TcM EvBind
zonkEvBind env (EvBind { eb_lhs = var, eb_rhs = term, eb_is_given = is_given })
= do { var' <- {-# SCC "zonkEvBndr" #-} zonkEvBndr env var
-- Optimise the common case of Refl coercions
-- See Note [Optimise coercion zonking]
-- This has a very big effect on some programs (eg Trac #5030)
; term' <- case getEqPredTys_maybe (idType var') of
Just (r, ty1, ty2) | ty1 `eqType` ty2
-> return (EvCoercion (mkTcReflCo r ty1))
_other -> zonkEvTerm env term
; return (EvBind { eb_lhs = var', eb_rhs = term', eb_is_given = is_given }) }
{-
************************************************************************
* *
Zonking types
* *
************************************************************************
Note [Zonking the LHS of a RULE]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to gather the type variables mentioned on the LHS so we can
quantify over them. Example:
data T a = C
foo :: T a -> Int
foo C = 1
{-# RULES "myrule" foo C = 1 #-}
After type checking the LHS becomes (foo a (C a))
and we do not want to zap the unbound tyvar 'a' to (), because
that limits the applicability of the rule. Instead, we
want to quantify over it!
It's easiest to get zonkTvCollecting to gather the free tyvars
here. Attempts to do so earlier are tiresome, because (a) the data
type is big and (b) finding the free type vars of an expression is
necessarily monadic operation. (consider /\a -> f @ b, where b is
side-effected to a)
And that in turn is why ZonkEnv carries the function to use for
type variables!
Note [Zonking mutable unbound type or kind variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In zonkTypeZapping, we zonk mutable but unbound type or kind variables to an
arbitrary type. We know if they are unbound even though we don't carry an
environment, because at the binding site for a variable we bind the mutable
var to a fresh immutable one. So the mutable store plays the role of an
environment. If we come across a mutable variable that isn't so bound, it
must be completely free. We zonk the expected kind to make sure we don't get
some unbound meta variable as the kind.
Note that since we have kind polymorphism, zonk_unbound_tyvar will handle both
type and kind variables. Consider the following datatype:
data Phantom a = Phantom Int
The type of Phantom is (forall (k : BOX). forall (a : k). Int). Both `a` and
`k` are unbound variables. We want to zonk this to
(forall (k : AnyK). forall (a : Any AnyK). Int). For that we have to check if
we have a type or a kind variable; for kind variables we just return AnyK (and
not the ill-kinded Any BOX).
Note [Optimise coercion zonkind]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When optimising evidence binds we may come across situations where
a coercion looks like
cv = ReflCo ty
or cv1 = cv2
where the type 'ty' is big. In such cases it is a waste of time to zonk both
* The variable on the LHS
* The coercion on the RHS
Rather, we can zonk the variable, and if its type is (ty ~ ty), we can just
use Refl on the right, ignoring the actual coercion on the RHS.
This can have a very big effect, because the constraint solver sometimes does go
to a lot of effort to prove Refl! (Eg when solving 10+3 = 10+3; cf Trac #5030)
-}
zonkTyVarOcc :: ZonkEnv -> TyVar -> TcM TcType
zonkTyVarOcc env@(ZonkEnv zonk_unbound_tyvar tv_env _) tv
| isTcTyVar tv
= case tcTyVarDetails tv of
SkolemTv {} -> lookup_in_env
RuntimeUnk {} -> lookup_in_env
FlatSkol ty -> zonkTcTypeToType env ty
MetaTv { mtv_ref = ref }
-> do { cts <- readMutVar ref
; case cts of
Flexi -> do { kind <- {-# SCC "zonkKind1" #-}
zonkTcTypeToType env (tyVarKind tv)
; zonk_unbound_tyvar (setTyVarKind tv kind) }
Indirect ty -> do { zty <- zonkTcTypeToType env ty
-- Small optimisation: shortern-out indirect steps
-- so that the old type may be more easily collected.
; writeMutVar ref (Indirect zty)
; return zty } }
| otherwise
= lookup_in_env
where
lookup_in_env -- Look up in the env just as we do for Ids
= case lookupVarEnv tv_env tv of
Nothing -> return (mkTyVarTy tv)
Just tv' -> return (mkTyVarTy tv')
zonkTcTypeToType :: ZonkEnv -> TcType -> TcM Type
zonkTcTypeToType env ty
= go ty
where
go (TyConApp tc tys) = do tys' <- mapM go tys
return (mkTyConApp tc tys')
-- Establish Type invariants
-- See Note [Zonking inside the knot] in TcHsType
go (LitTy n) = return (LitTy n)
go (FunTy arg res) = do arg' <- go arg
res' <- go res
return (FunTy arg' res')
go (AppTy fun arg) = do fun' <- go fun
arg' <- go arg
return (mkAppTy fun' arg')
-- NB the mkAppTy; we might have instantiated a
-- type variable to a type constructor, so we need
-- to pull the TyConApp to the top.
-- The two interesting cases!
go (TyVarTy tv) = zonkTyVarOcc env tv
go (ForAllTy tv ty) = ASSERT( isImmutableTyVar tv )
do { (env', tv') <- zonkTyBndrX env tv
; ty' <- zonkTcTypeToType env' ty
; return (ForAllTy tv' ty') }
zonkTcTypeToTypes :: ZonkEnv -> [TcType] -> TcM [Type]
zonkTcTypeToTypes env tys = mapM (zonkTcTypeToType env) tys
zonkCoToCo :: ZonkEnv -> Coercion -> TcM Coercion
zonkCoToCo env co
= go co
where
go (Refl r ty) = mkReflCo r <$> zonkTcTypeToType env ty
go (TyConAppCo r tc args) = mkTyConAppCo r tc <$> mapM go args
go (AppCo co arg) = mkAppCo <$> go co <*> go arg
go (AxiomInstCo ax ind args) = AxiomInstCo ax ind <$> mapM go args
go (UnivCo s r ty1 ty2) = mkUnivCo s r <$> zonkTcTypeToType env ty1
<*> zonkTcTypeToType env ty2
go (SymCo co) = mkSymCo <$> go co
go (TransCo co1 co2) = mkTransCo <$> go co1 <*> go co2
go (NthCo n co) = mkNthCo n <$> go co
go (LRCo lr co) = mkLRCo lr <$> go co
go (InstCo co arg) = mkInstCo <$> go co <*> zonkTcTypeToType env arg
go (SubCo co) = mkSubCo <$> go co
go (AxiomRuleCo ax ts cs) = AxiomRuleCo ax <$> mapM (zonkTcTypeToType env) ts
<*> mapM go cs
-- The two interesting cases!
go (CoVarCo cv) = return (mkCoVarCo $ zonkIdOcc env cv)
go (ForAllCo tv co) = ASSERT( isImmutableTyVar tv )
do { (env', tv') <- zonkTyBndrX env tv
; co' <- zonkCoToCo env' co
; return (mkForAllCo tv' co') }
zonkTvCollecting :: TcRef TyVarSet -> UnboundTyVarZonker
-- This variant collects unbound type variables in a mutable variable
-- Works on both types and kinds
zonkTvCollecting unbound_tv_set tv
= do { poly_kinds <- xoptM Opt_PolyKinds
; if isKindVar tv && not poly_kinds then defaultKindVarToStar tv
else do
{ tv' <- zonkQuantifiedTyVar tv
; tv_set <- readMutVar unbound_tv_set
; writeMutVar unbound_tv_set (extendVarSet tv_set tv')
; return (mkTyVarTy tv') } }
zonkTypeZapping :: UnboundTyVarZonker
-- This variant is used for everything except the LHS of rules
-- It zaps unbound type variables to (), or some other arbitrary type
-- Works on both types and kinds
zonkTypeZapping tv
= do { let ty = if isKindVar tv
-- ty is actually a kind, zonk to AnyK
then anyKind
else anyTypeOfKind (defaultKind (tyVarKind tv))
; writeMetaTyVar tv ty
; return ty }
zonkTcCoToCo :: ZonkEnv -> TcCoercion -> TcM TcCoercion
-- NB: zonking often reveals that the coercion is an identity
-- in which case the Refl-ness can propagate up to the top
-- which in turn gives more efficient desugaring. So it's
-- worth using the 'mk' smart constructors on the RHS
zonkTcCoToCo env co
= go co
where
go (TcLetCo bs co) = do { (env', bs') <- zonkTcEvBinds env bs
; co' <- zonkTcCoToCo env' co
; return (TcLetCo bs' co') }
go (TcCoVarCo cv) = return (mkTcCoVarCo (zonkEvVarOcc env cv))
go (TcRefl r ty) = do { ty' <- zonkTcTypeToType env ty
; return (TcRefl r ty') }
go (TcTyConAppCo r tc cos)
= do { cos' <- mapM go cos; return (mkTcTyConAppCo r tc cos') }
go (TcAxiomInstCo ax ind cos)
= do { cos' <- mapM go cos; return (TcAxiomInstCo ax ind cos') }
go (TcAppCo co1 co2) = do { co1' <- go co1; co2' <- go co2
; return (mkTcAppCo co1' co2') }
go (TcCastCo co1 co2) = do { co1' <- go co1; co2' <- go co2
; return (TcCastCo co1' co2') }
go (TcPhantomCo ty1 ty2) = do { ty1' <- zonkTcTypeToType env ty1
; ty2' <- zonkTcTypeToType env ty2
; return (TcPhantomCo ty1' ty2') }
go (TcSymCo co) = do { co' <- go co; return (mkTcSymCo co') }
go (TcNthCo n co) = do { co' <- go co; return (mkTcNthCo n co') }
go (TcLRCo lr co) = do { co' <- go co; return (mkTcLRCo lr co') }
go (TcTransCo co1 co2) = do { co1' <- go co1; co2' <- go co2
; return (mkTcTransCo co1' co2') }
go (TcForAllCo tv co) = ASSERT( isImmutableTyVar tv )
do { co' <- go co; return (mkTcForAllCo tv co') }
go (TcSubCo co) = do { co' <- go co; return (mkTcSubCo co') }
go (TcAxiomRuleCo co ts cs) = do { ts' <- zonkTcTypeToTypes env ts
; cs' <- mapM go cs
; return (TcAxiomRuleCo co ts' cs')
}
go (TcCoercion co) = do { co' <- zonkCoToCo env co
; return (TcCoercion co') }
| {
"content_hash": "56bf70bac0cc835f5cd1a93e6d78ff24",
"timestamp": "",
"source": "github",
"line_count": 1545,
"max_line_length": 108,
"avg_line_length": 41.393527508090614,
"alnum_prop": 0.569590167779463,
"repo_name": "gcampax/ghc",
"id": "45f384ac01c483b2128effd02e4fd0283a7cdead",
"size": "63953",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "compiler/typecheck/TcHsSyn.hs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "5560"
},
{
"name": "Bison",
"bytes": "227078"
},
{
"name": "C",
"bytes": "2553206"
},
{
"name": "C++",
"bytes": "46638"
},
{
"name": "CSS",
"bytes": "984"
},
{
"name": "DTrace",
"bytes": "3887"
},
{
"name": "Diff",
"bytes": "58857"
},
{
"name": "Emacs Lisp",
"bytes": "734"
},
{
"name": "Game Maker Language",
"bytes": "14164"
},
{
"name": "Gnuplot",
"bytes": "103851"
},
{
"name": "Groff",
"bytes": "3840"
},
{
"name": "HTML",
"bytes": "6144"
},
{
"name": "Haskell",
"bytes": "17461519"
},
{
"name": "Haxe",
"bytes": "218"
},
{
"name": "Logos",
"bytes": "119251"
},
{
"name": "Makefile",
"bytes": "457630"
},
{
"name": "Objective-C",
"bytes": "19601"
},
{
"name": "Objective-C++",
"bytes": "535"
},
{
"name": "Pascal",
"bytes": "113781"
},
{
"name": "Perl",
"bytes": "195424"
},
{
"name": "Perl6",
"bytes": "236021"
},
{
"name": "PostScript",
"bytes": "63"
},
{
"name": "Python",
"bytes": "105062"
},
{
"name": "Shell",
"bytes": "70418"
},
{
"name": "TeX",
"bytes": "667"
}
],
"symlink_target": ""
} |
require 'spec_helper'
describe TournamentSeasonsHelper do
describe '#round_title' do
context 'tournament is single elimination' do
before :each do
assign :is_single_elimination, true
assign :winner_rounds, 5
end
context 'is rounds[-1]' do
it 'returns final' do
expect(helper.round_title(true, 5)).to be == t('tournament_seasons.general.rounds.final')
end
end
context 'is rounds[-2]' do
it 'returns semi-final' do
expect(helper.round_title(true, 4)).to be == t('tournament_seasons.general.rounds.semi_final')
end
end
context 'is rounds[-3]' do
it 'returns quarter-final' do
expect(helper.round_title(true, 3)).to be == t('tournament_seasons.general.rounds.quarter_final')
end
end
context 'is rounds[-4]' do
it 'returns round of 16' do
expect(helper.round_title(true, 2)).to be == t('tournament_seasons.general.rounds.round_of_16')
end
end
context 'is 1 of 5' do
it 'returns #{round.ordinalize} Round' do
expect(helper.round_title(true, 1)).to be == "1st #{t('tournament_seasons.general.round')}"
end
end
end
context 'tournament is double elimination' do
before :each do
assign :is_double_elimination, true
end
context 'winner round title' do
before :each do
assign :winner_rounds, 6
end
context 'is rounds[-1]' do
it 'returns grand finals' do
expect(helper.round_title(true, 6)).to be == t('tournament_seasons.general.rounds.grand_finals')
end
end
context 'is rounds[-2]' do
it 'returns winners finals' do
expect(helper.round_title(true, 5)).to be == t('tournament_seasons.general.rounds.winners_finals')
end
end
context 'is rounds[-3]' do
it 'returns semi-final' do
expect(helper.round_title(true, 4)).to be == t('tournament_seasons.general.rounds.semi_final')
end
end
context 'is rounds[-4]' do
it 'returns quarter-final' do
expect(helper.round_title(true, 3)).to be == t('tournament_seasons.general.rounds.quarter_final')
end
end
context 'is rounds[-5]' do
it 'returns round of 16' do
expect(helper.round_title(true, 2)).to be == t('tournament_seasons.general.rounds.round_of_16')
end
end
context 'is 1 of 6' do
it 'returns #{round.ordinalize} Round' do
expect(helper.round_title(true, 1)).to be == "1st #{t('tournament_seasons.general.round')}"
end
end
end
context 'loser round title' do
before :each do
assign :loser_rounds, 4
end
context 'is rounds[-1]' do
it 'returns losers finals' do
expect(helper.round_title(false, 3)).to be == t('tournament_seasons.general.rounds.losers_finals')
end
end
context 'is rounds[-2]' do
it 'returns losers semi-finals' do
expect(helper.round_title(false, 2)).to be == t('tournament_seasons.general.rounds.losers_semi_finals')
end
end
context 'is 1 of 3' do
it 'returns Losers Round 1' do
expect(helper.round_title(false, 1)).to be == "#{t('tournament_seasons.general.losers_round')} 1"
end
end
end
end
end
describe '#round_matches_for_competitors' do
it 'returns the matches of the round for a list of competitors' do
assign :matches, {
true => {
1 => {
1 => [FactoryGirl.build(:tournament_match, id: 1, home_competitor_id: 1, away_competitor_id: 2)]
},
2 => {
2 => [
FactoryGirl.build(:tournament_match, id: 2, home_competitor_id: 1, away_competitor_id: 2),
FactoryGirl.build(:tournament_match, id: 3, home_competitor_id: 3, away_competitor_id: 4)
],
3 => [
FactoryGirl.build(:tournament_match, id: 4, home_competitor_id: 2, away_competitor_id: 1),
FactoryGirl.build(:tournament_match, id: 5, home_competitor_id: 4, away_competitor_id: 3)
]
}
}
}
expect(helper.round_matches_for_competitors(true, 2, [3, 4]).map(&:id)).to be == [3, 5]
end
end
describe '#rowspan_for_round_connector' do
it 'doubles rowspan of 3 #{round}.times' do
expect(helper.rowspan_for_round_connector(2)).to be == 12
end
end
describe '#match_for_round_after_first_one?' do
it 'returns true if a match can be rendered in bracket for the given first_round_matches_index and round' do
expect(helper.match_for_round_after_first_one?(0, 2)).to be_truthy
expect(helper.match_for_round_after_first_one?(1, 2)).to be_falsey
expect(helper.match_for_round_after_first_one?(2, 2)).to be_truthy
expect(helper.match_for_round_after_first_one?(3, 2)).to be_falsey
expect(helper.match_for_round_after_first_one?(4, 2)).to be_truthy
end
end
describe '#first_round_matches_index_for_last_match_of_round' do
it 'does what the name says' do
assign :matches, {
true => {
1 => { 1 => 4.times.to_a.map{|t| FactoryGirl.build(:tournament_match) } }
}
}
expect(helper.first_round_matches_index_for_last_match_of_round(2)).to be == 2
end
end
end | {
"content_hash": "b0b75139e0651903de9179118de74ea3",
"timestamp": "",
"source": "github",
"line_count": 166,
"max_line_length": 115,
"avg_line_length": 34.16867469879518,
"alnum_prop": 0.5714033850493653,
"repo_name": "volontariat/voluntary_competition",
"id": "4541e9405960a37c3d37b6463ef418701dbdc16f",
"size": "5699",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dummy/spec/helpers/tournament_seasons_helper_spec.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "683"
},
{
"name": "CoffeeScript",
"bytes": "1904"
},
{
"name": "HTML",
"bytes": "194239"
},
{
"name": "JavaScript",
"bytes": "633"
},
{
"name": "Ruby",
"bytes": "290825"
}
],
"symlink_target": ""
} |
package org.broadinstitute.hellbender.engine.spark.datasources;
import htsjdk.samtools.BamFileIoUtils;
import htsjdk.samtools.SAMFileHeader;
import htsjdk.samtools.SAMRecord;
import htsjdk.samtools.cram.build.CramIO;
import htsjdk.samtools.util.IOUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapred.FileAlreadyExistsException;
import org.apache.hadoop.mapreduce.*;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.parquet.avro.AvroParquetOutputFormat;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.broadcast.Broadcast;
import org.bdgenomics.adam.models.RecordGroupDictionary;
import org.bdgenomics.adam.models.SequenceDictionary;
import org.bdgenomics.formats.avro.AlignmentRecord;
import org.broadinstitute.hellbender.exceptions.GATKException;
import org.broadinstitute.hellbender.exceptions.UserException;
import org.broadinstitute.hellbender.utils.gcs.BucketUtils;
import org.broadinstitute.hellbender.utils.io.IOUtils;
import org.broadinstitute.hellbender.utils.read.GATKRead;
import org.broadinstitute.hellbender.utils.read.GATKReadToBDGAlignmentRecordConverter;
import org.broadinstitute.hellbender.utils.read.HeaderlessSAMRecordCoordinateComparator;
import org.broadinstitute.hellbender.utils.read.ReadsWriteFormat;
import org.broadinstitute.hellbender.utils.spark.SparkUtils;
import org.seqdoop.hadoop_bam.*;
import org.seqdoop.hadoop_bam.util.SAMFileMerger;
import scala.Tuple2;
import java.io.File;
import java.io.IOException;
import java.util.Comparator;
/**
* ReadsSparkSink writes GATKReads to a file. This code lifts from the HadoopGenomics/Hadoop-BAM
* read writing code as well as from bigdatagenomics/adam.
*/
public final class ReadsSparkSink {
private final static Logger logger = LogManager.getLogger(ReadsSparkSink.class);
// Output format class for writing BAM files through saveAsNewAPIHadoopFile. Must be public.
public static class SparkBAMOutputFormat extends KeyIgnoringBAMOutputFormat<NullWritable> {
public static SAMFileHeader bamHeader = null;
public static void setHeader(final SAMFileHeader header) {
bamHeader = header;
}
@Override
public RecordWriter<NullWritable, SAMRecordWritable> getRecordWriter(TaskAttemptContext ctx) throws IOException {
setSAMHeader(bamHeader);
// use BAM extension for part files since they are valid BAM files
return getRecordWriter(ctx, getDefaultWorkFile(ctx, BamFileIoUtils.BAM_FILE_EXTENSION));
}
@Override
public void checkOutputSpecs(JobContext job) throws IOException {
try {
super.checkOutputSpecs(job);
} catch (FileAlreadyExistsException e) {
// delete existing files before overwriting them
final Path outDir = getOutputPath(job);
outDir.getFileSystem(job.getConfiguration()).delete(outDir, true);
}
}
}
public static class SparkHeaderlessBAMOutputFormat extends SparkBAMOutputFormat {
public SparkHeaderlessBAMOutputFormat() {
setWriteHeader(false);
}
}
// Output format class for writing SAM files through saveAsNewAPIHadoopFile. Must be public.
public static class SparkSAMOutputFormat extends KeyIgnoringAnySAMOutputFormat<NullWritable> {
public SparkSAMOutputFormat() {
super(SAMFormat.SAM);
}
public static SAMFileHeader samHeader = null;
public static void setHeader(final SAMFileHeader header) {
samHeader = header;
}
@Override
public RecordWriter<NullWritable, SAMRecordWritable> getRecordWriter(TaskAttemptContext ctx) throws IOException {
setSAMHeader(samHeader);
// use SAM extension for part files since they are valid SAM files
Path out = getDefaultWorkFile(ctx, IOUtil.SAM_FILE_EXTENSION);
return new KeyIgnoringSAMRecordWriter<NullWritable>(out, samHeader, true, ctx);
}
@Override
public void checkOutputSpecs(JobContext job) throws IOException {
try {
super.checkOutputSpecs(job);
} catch (FileAlreadyExistsException e) {
// delete existing files before overwriting them
final Path outDir = getOutputPath(job);
outDir.getFileSystem(job.getConfiguration()).delete(outDir, true);
}
}
}
public static class SparkHeaderlessSAMOutputFormat extends SparkSAMOutputFormat {
@Override
public RecordWriter<NullWritable, SAMRecordWritable> getRecordWriter(TaskAttemptContext ctx) throws IOException {
setSAMHeader(samHeader);
// use SAM extension for part files since they are valid SAM files
Path out = getDefaultWorkFile(ctx, IOUtil.SAM_FILE_EXTENSION);
return new KeyIgnoringSAMRecordWriter<NullWritable>(out, samHeader, false, ctx);
}
}
// Output format class for writing CRAM files through saveAsNewAPIHadoopFile. Must be public.
public static class SparkCRAMOutputFormat extends KeyIgnoringCRAMOutputFormat<NullWritable> {
public static SAMFileHeader bamHeader = null;
public static void setHeader(final SAMFileHeader header) {
bamHeader = header;
}
@Override
public RecordWriter<NullWritable, SAMRecordWritable> getRecordWriter(TaskAttemptContext ctx) throws IOException {
setSAMHeader(bamHeader);
// use CRAM extension for part files since they are valid CRAM files
return getRecordWriter(ctx, getDefaultWorkFile(ctx, CramIO.CRAM_FILE_EXTENSION));
}
@Override
public void checkOutputSpecs(JobContext job) throws IOException {
try {
super.checkOutputSpecs(job);
} catch (FileAlreadyExistsException e) {
// delete existing files before overwriting them
Path outDir = getOutputPath(job);
outDir.getFileSystem(job.getConfiguration()).delete(outDir, true);
}
}
}
public static class SparkHeaderlessCRAMOutputFormat extends SparkCRAMOutputFormat {
public SparkHeaderlessCRAMOutputFormat() {
setWriteHeader(false);
}
}
/**
* writeReads writes rddReads to outputFile with header as the file header.
* @param ctx the JavaSparkContext to write.
* @param outputFile path to the output bam.
* @param referenceFile path to the reference. required for cram output, otherwise may be null.
* @param reads reads to write.
* @param header the header to put at the top of the files
* @param format should the output be a single file, sharded, ADAM, etc.
*/
public static void writeReads(
final JavaSparkContext ctx, final String outputFile, final String referenceFile, final JavaRDD<GATKRead> reads,
final SAMFileHeader header, ReadsWriteFormat format) throws IOException {
writeReads(ctx, outputFile, referenceFile, reads, header, format, 0, null);
}
/**
* writeReads writes rddReads to outputFile with header as the file header.
* @param ctx the JavaSparkContext to write.
* @param outputFile path to the output bam.
* @param referenceFile path to the reference. required for cram output, otherwise may be null.
* @param reads reads to write.
* @param header the header to put at the top of the files
* @param format should the output be a single file, sharded, ADAM, etc.
* @param numReducers the number of reducers to use when writing a single file. A value of zero indicates that the default
* should be used.
* @param outputPartsDir directory for temporary files for SINGLE output format, should be null for default value of filename + .output
*/
public static void writeReads(
final JavaSparkContext ctx, final String outputFile, final String referenceFile, final JavaRDD<GATKRead> reads,
final SAMFileHeader header, ReadsWriteFormat format, final int numReducers, final String outputPartsDir) throws IOException {
SAMFormat samOutputFormat = SAMFormat.inferFromFilePath(outputFile);
if (samOutputFormat == null) {
samOutputFormat = SAMFormat.BAM; // default to BAM if output file is a directory
}
String absoluteOutputFile = BucketUtils.makeFilePathAbsolute(outputFile);
String absoluteReferenceFile = referenceFile != null ?
BucketUtils.makeFilePathAbsolute(referenceFile) :
referenceFile;
setHadoopBAMConfigurationProperties(ctx, absoluteOutputFile, absoluteReferenceFile, format);
// The underlying reads are required to be in SAMRecord format in order to be
// written out, so we convert them to SAMRecord explicitly here. If they're already
// SAMRecords, this will effectively be a no-op. The SAMRecords will be headerless
// for efficient serialization.
final JavaRDD<SAMRecord> samReads = reads.map(read -> read.convertToSAMRecord(null));
if (format == ReadsWriteFormat.SINGLE) {
writeReadsSingle(ctx, absoluteOutputFile, absoluteReferenceFile, samOutputFormat, samReads, header, numReducers, outputPartsDir);
} else if (format == ReadsWriteFormat.SHARDED) {
if (outputPartsDir!=null) {
throw new GATKException(String.format("You specified the bam output parts directory %s, but requested a sharded output format which does not use this option",outputPartsDir));
}
saveAsShardedHadoopFiles(ctx, absoluteOutputFile, absoluteReferenceFile, samOutputFormat, samReads, header, true);
} else if (format == ReadsWriteFormat.ADAM) {
if (outputPartsDir!=null) {
throw new GATKException(String.format("You specified the bam output parts directory %s, but requested an ADAM output format which does not use this option",outputPartsDir));
}
writeReadsADAM(ctx, absoluteOutputFile, samReads, header);
}
}
private static void writeReadsADAM(
final JavaSparkContext ctx, final String outputFile, final JavaRDD<SAMRecord> reads,
final SAMFileHeader header) throws IOException {
final SequenceDictionary seqDict = SequenceDictionary.fromSAMSequenceDictionary(header.getSequenceDictionary());
final RecordGroupDictionary readGroups = RecordGroupDictionary.fromSAMHeader(header);
final JavaPairRDD<Void, AlignmentRecord> rddAlignmentRecords =
reads.map(read -> {
read.setHeaderStrict(header);
AlignmentRecord alignmentRecord = GATKReadToBDGAlignmentRecordConverter.convert(read, seqDict, readGroups);
read.setHeaderStrict(null); // Restore the header to its previous state so as not to surprise the caller
return alignmentRecord;
}).mapToPair(alignmentRecord -> new Tuple2<>(null, alignmentRecord));
// instantiating a Job is necessary here in order to set the Hadoop Configuration...
final Job job = Job.getInstance(ctx.hadoopConfiguration());
// ...here, which sets a config property that the AvroParquetOutputFormat needs when writing data. Specifically,
// we are writing the Avro schema to the Configuration as a JSON string. The AvroParquetOutputFormat class knows
// how to translate objects in the Avro data model to the Parquet primitives that get written.
AvroParquetOutputFormat.setSchema(job, AlignmentRecord.getClassSchema());
deleteHadoopFile(outputFile, ctx.hadoopConfiguration());
rddAlignmentRecords.saveAsNewAPIHadoopFile(
outputFile, Void.class, AlignmentRecord.class, AvroParquetOutputFormat.class, job.getConfiguration());
}
private static void saveAsShardedHadoopFiles(
final JavaSparkContext ctx, final String outputFile, final String referenceFile,
final SAMFormat samOutputFormat, final JavaRDD<SAMRecord> reads, final SAMFileHeader header,
final boolean writeHeader) {
// Set the static header on the driver thread.
if (samOutputFormat == SAMFormat.CRAM) {
SparkCRAMOutputFormat.setHeader(header);
} else if (samOutputFormat == SAMFormat.SAM) {
SparkSAMOutputFormat.setHeader(header);
} else {
SparkBAMOutputFormat.setHeader(header);
}
final Broadcast<SAMFileHeader> headerBroadcast = ctx.broadcast(header);
// SparkBAM/CRAMOutputFormat are static classes, so we need to copy the header to each worker then call
final JavaRDD<SAMRecord> readsRDD = setHeaderForEachPartition(reads, samOutputFormat, headerBroadcast);
// The expected format for writing is JavaPairRDD where the key is ignored and the value is SAMRecordWritable.
final JavaPairRDD<SAMRecord, SAMRecordWritable> rddSamRecordWriteable = pairReadsWithSAMRecordWritables(headerBroadcast, readsRDD);
rddSamRecordWriteable.saveAsNewAPIHadoopFile(outputFile, SAMRecord.class, SAMRecordWritable.class, getOutputFormat(samOutputFormat, writeHeader), ctx.hadoopConfiguration());
}
/**
* SparkBAM/CRAMOutputFormat has a static header value which must be set on each executor.
*/
private static JavaRDD<SAMRecord> setHeaderForEachPartition(final JavaRDD<SAMRecord> reads, final SAMFormat samOutputFormat, final Broadcast<SAMFileHeader> headerBroadcast) {
if (samOutputFormat == SAMFormat.CRAM) {
return reads.mapPartitions(readIterator -> {
SparkCRAMOutputFormat.setHeader(headerBroadcast.getValue());
return readIterator;
});
}
else {
return reads.mapPartitions(readIterator -> {
SparkBAMOutputFormat.setHeader(headerBroadcast.getValue());
return readIterator;
});
}
}
private static void writeReadsSingle(
final JavaSparkContext ctx, final String outputFile, final String referenceFile, final SAMFormat samOutputFormat, final JavaRDD<SAMRecord> reads,
final SAMFileHeader header, final int numReducers, final String outputPartsDir) throws IOException {
final JavaRDD<SAMRecord> sortedReads = sortSamRecordsToMatchHeader(reads, header, numReducers);
final String outputPartsDirectory = (outputPartsDir == null)? getDefaultPartsDirectory(outputFile) : outputPartsDir;
saveAsShardedHadoopFiles(ctx, outputPartsDirectory, referenceFile, samOutputFormat, sortedReads, header, false);
logger.info("Finished sorting the bam file and dumping read shards to disk, proceeding to merge the shards into a single file using the master thread");
SAMFileMerger.mergeParts(outputPartsDirectory, outputFile, samOutputFormat, header);
logger.info("Finished merging shards into a single output bam");
}
private static Class<? extends OutputFormat<NullWritable, SAMRecordWritable>> getOutputFormat(final SAMFormat samFormat, final boolean writeHeader) {
if (samFormat == SAMFormat.CRAM) {
return writeHeader ? SparkCRAMOutputFormat.class : SparkHeaderlessCRAMOutputFormat.class;
} else if (samFormat == SAMFormat.SAM) {
return writeHeader ? SparkSAMOutputFormat.class : SparkHeaderlessSAMOutputFormat.class;
} else {
return writeHeader ? SparkBAMOutputFormat.class : SparkHeaderlessBAMOutputFormat.class;
}
}
private static JavaPairRDD<SAMRecord, SAMRecordWritable> pairReadsWithSAMRecordWritables(Broadcast<SAMFileHeader> headerBroadcast, JavaRDD<SAMRecord> records) {
return records.mapToPair(read -> {
read.setHeaderStrict(headerBroadcast.getValue());
final SAMRecordWritable samRecordWritable = new SAMRecordWritable();
samRecordWritable.set(read);
return new Tuple2<>(read, samRecordWritable);
});
}
private static void deleteHadoopFile(String fileToObliterate, Configuration conf) throws IOException {
final Path pathToDelete = new Path(fileToObliterate);
pathToDelete.getFileSystem(conf).delete(pathToDelete, true);
}
/**
* Propagate any values that need to be passed to Hadoop-BAM through configuration properties:
*
* - if the output file is a CRAM file, the reference value, which must be a URI which includes
* a scheme, will also be set
* - if the output file is not CRAM, the reference property is *unset* to prevent Hadoop-BAM
* from passing a stale value through to htsjdk when multiple calls are made serially
* with different outputs but the same Spark context
*/
private static void setHadoopBAMConfigurationProperties(final JavaSparkContext ctx, final String outputName,
final String referenceName, final ReadsWriteFormat format) {
final Configuration conf = ctx.hadoopConfiguration();
if (!IOUtils.isCramFileName(outputName)) { // only set the reference for CRAM output
conf.unset(CRAMInputFormat.REFERENCE_SOURCE_PATH_PROPERTY);
if (format == ReadsWriteFormat.SINGLE && IOUtils.isBamFileName(outputName)) {
conf.setBoolean(BAMOutputFormat.WRITE_SPLITTING_BAI, true);
} else {
conf.setBoolean(BAMOutputFormat.WRITE_SPLITTING_BAI, false);
}
}
else {
if (null == referenceName) {
throw new UserException.MissingReference("A reference is required for CRAM output");
}
else {
if ( ReferenceTwoBitSparkSource.isTwoBit(referenceName)) { // htsjdk can't handle 2bit reference files
throw new UserException("A 2bit file cannot be used as a CRAM file reference");
}
else { // Hadoop-BAM requires the reference to be a URI, including scheme
String referenceURI =
null == new Path(referenceName).toUri().getScheme() ?
"file://" + new File(referenceName).getAbsolutePath() :
referenceName;
conf.set(CRAMInputFormat.REFERENCE_SOURCE_PATH_PROPERTY, referenceURI);
}
}
}
}
/**
* Gets the default parts directory for a given file by appending .parts/ to the end of it
*/
public static String getDefaultPartsDirectory(String file) {
return file + ".parts/";
}
/**
* Sorts the given reads according to the sort order in the header.
* @param reads the reads to sort
* @param header the header specifying the sort order,
* if the header specifies {@link SAMFileHeader.SortOrder#unsorted} or {@link SAMFileHeader.SortOrder#unknown}
* then no sort will be performed
* @param numReducers the number of reducers to use; a value of 0 means use the default number of reducers
* @return a sorted RDD of reads
*/
private static JavaRDD<SAMRecord> sortSamRecordsToMatchHeader(final JavaRDD<SAMRecord> reads, final SAMFileHeader header, final int numReducers) {
final Comparator<SAMRecord> comparator = getSAMRecordComparator(header);
if ( comparator == null ) {
return reads;
} else {
return SparkUtils.sortUsingElementsAsKeys(reads, comparator, numReducers);
}
}
//Returns the comparator to use or null if no sorting is required.
private static Comparator<SAMRecord> getSAMRecordComparator(final SAMFileHeader header) {
switch (header.getSortOrder()){
case coordinate: return new HeaderlessSAMRecordCoordinateComparator(header);
//duplicate isn't supported because it doesn't work right on headerless SAMRecords
case duplicate: throw new UserException.UnimplementedFeature("The sort order \"duplicate\" is not supported in Spark.");
case queryname:
case unsorted: return header.getSortOrder().getComparatorInstance();
default: return null; //NOTE: javac warns if you have this (useless) default BUT it errors out if you remove this default.
}
}
}
| {
"content_hash": "6a22164e00b3c96036410b5dce7517af",
"timestamp": "",
"source": "github",
"line_count": 398,
"max_line_length": 192,
"avg_line_length": 52.37688442211055,
"alnum_prop": 0.692890722440756,
"repo_name": "magicDGS/gatk",
"id": "16c70f86367fba1acaa22aa406b0b0490c2102ae",
"size": "20846",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/org/broadinstitute/hellbender/engine/spark/datasources/ReadsSparkSink.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "2963"
},
{
"name": "HTML",
"bytes": "12420"
},
{
"name": "Java",
"bytes": "16875256"
},
{
"name": "JavaScript",
"bytes": "133"
},
{
"name": "Python",
"bytes": "575276"
},
{
"name": "R",
"bytes": "39186"
},
{
"name": "Shell",
"bytes": "116902"
},
{
"name": "wdl",
"bytes": "285334"
}
],
"symlink_target": ""
} |
<section class="my-content adm">
<div class="myContainer">
ADM HOME
</div>
</section>
<script src="<?=base_url("js/my_script-adm.js")?>" ></script> | {
"content_hash": "a1544cca956b43ea2e5648c99a197291",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 61,
"avg_line_length": 24,
"alnum_prop": 0.5773809523809523,
"repo_name": "andrebonetti/Gerensys_LojaVirtual",
"id": "219c62ab471b5b61285aca8b786d79d8574abf61",
"size": "168",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "application/views/adm/adm_home.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "151"
},
{
"name": "CSS",
"bytes": "65556"
},
{
"name": "HTML",
"bytes": "5905"
},
{
"name": "JavaScript",
"bytes": "575030"
},
{
"name": "PHP",
"bytes": "5890161"
}
],
"symlink_target": ""
} |
<?php
namespace Surfnet\ServiceProviderDashboard\Application\CommandHandler\Entity;
use Psr\Log\LoggerInterface;
use Surfnet\ServiceProviderDashboard\Application\Command\Entity\DeletePublishedProductionEntityCommand;
use Surfnet\ServiceProviderDashboard\Application\CommandHandler\CommandHandler;
use Surfnet\ServiceProviderDashboard\Application\Exception\EntityNotDeletedException;
use Surfnet\ServiceProviderDashboard\Application\Exception\UnableToDeleteEntityException;
use Surfnet\ServiceProviderDashboard\Domain\Repository\DeleteManageEntityRepository;
class DeletePublishedProductionEntityCommandHandler implements CommandHandler
{
/**
* @var DeleteManageEntityRepository
*/
private $deleteEntityRepository;
/**
* @var LoggerInterface
*/
private $logger;
public function __construct(
DeleteManageEntityRepository $deleteEntityRepository,
LoggerInterface $logger
) {
$this->deleteEntityRepository = $deleteEntityRepository;
$this->logger = $logger;
}
public function handle(DeletePublishedProductionEntityCommand $command)
{
$this->logger->info(
sprintf(
'Removing entity with manage id "%s" from production environment',
$command->getManageId()
)
);
try {
$response = $this->deleteEntityRepository->delete($command->getManageId(), $command->getProtocol());
} catch (UnableToDeleteEntityException $e) {
throw new EntityNotDeletedException(
sprintf(
'Deleting of entity with manage id "%s" from production environment failed.',
$command->getManageId()
),
0,
$e
);
}
if ($response !== DeleteManageEntityRepository::RESULT_SUCCESS) {
throw new EntityNotDeletedException('Deleting the entity yielded an non success response');
}
}
}
| {
"content_hash": "acd53f6f0860e8ae468bded8f0a25196",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 112,
"avg_line_length": 33.55,
"alnum_prop": 0.6696472925981123,
"repo_name": "SURFnet/sp-dashboard",
"id": "32afbf4a71d95b1a4e2d7d9cb83c4890dac6fdbc",
"size": "2609",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/Surfnet/ServiceProviderDashboard/Application/CommandHandler/Entity/DeletePublishedProductionEntityCommandHandler.php",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2042"
},
{
"name": "HTML",
"bytes": "25155"
},
{
"name": "JavaScript",
"bytes": "39063"
},
{
"name": "Jinja",
"bytes": "26207"
},
{
"name": "PHP",
"bytes": "1711717"
},
{
"name": "SCSS",
"bytes": "127506"
},
{
"name": "Shell",
"bytes": "6068"
},
{
"name": "Twig",
"bytes": "75873"
},
{
"name": "TypeScript",
"bytes": "51725"
}
],
"symlink_target": ""
} |
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// Управление общими сведениями о сборке осуществляется с помощью
// набора атрибутов. Измените значения этих атрибутов, чтобы изменить сведения,
// связанные со сборкой.
[assembly: AssemblyTitle("homework 5_3")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("homework 5_3")]
[assembly: AssemblyCopyright("Copyright © 2016")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Параметр ComVisible со значением FALSE делает типы в сборке невидимыми
// для COM-компонентов. Если требуется обратиться к типу в этой сборке через
// COM, задайте атрибуту ComVisible значение TRUE для этого типа.
[assembly: ComVisible(false)]
// Следующий GUID служит для идентификации библиотеки типов, если этот проект будет видимым для COM
[assembly: Guid("0377fb9e-236b-46a8-8c1b-ddd58aac526a")]
// Сведения о версии сборки состоят из следующих четырех значений:
//
// Основной номер версии
// Дополнительный номер версии
// Номер сборки
// Редакция
//
// Можно задать все значения или принять номера сборки и редакции по умолчанию
// используя "*", как показано ниже:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| {
"content_hash": "a7a927b8b3a2d77e676a68dadd341006",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 99,
"avg_line_length": 39.138888888888886,
"alnum_prop": 0.7565649396735273,
"repo_name": "AlexanderKiselyov/Homework",
"id": "76e0f7217c82c8ad1d141cf10b65c094f8c61e23",
"size": "1969",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "homework 5_3/homework 5_3/Properties/AssemblyInfo.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2122"
},
{
"name": "C#",
"bytes": "99727"
},
{
"name": "C++",
"bytes": "110695"
}
],
"symlink_target": ""
} |
using System.Collections.Generic;
using System.Linq;
using FluentAssertions;
using JSONAPI.Core;
using JSONAPI.Documents;
using JSONAPI.Documents.Builders;
using JSONAPI.Json;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Moq;
using Newtonsoft.Json.Linq;
namespace JSONAPI.Tests.Documents.Builders
{
[TestClass]
public class RegistryDrivenSingleResourceDocumentBuilderTests
{
class Country
{
public string Id { get; set; }
public string Name { get; set; }
public Continent Continent { get; set; }
public ICollection<Province> Provinces { get; set; }
public ICollection<City> Cities { get; set; }
}
class Province
{
public string Id { get; set; }
public string Name { get; set; }
public City Capital { get; set; }
}
class City
{
public string Id { get; set; }
public string Name { get; set; }
}
class Continent
{
public string Id { get; set; }
public string Name { get; set; }
public ICollection<Province> Countries { get; set; }
}
[TestMethod]
public void Returns_correct_document_for_resource()
{
// Arrange
var city1 = new City
{
Id = "10",
Name = "Madrid"
};
var city2 = new City
{
Id = "11",
Name = "Barcelona"
};
var city3 = new City
{
Id = "12",
Name = "Badajoz"
};
var province1 = new Province
{
Id = "506",
Name = "Badajoz",
Capital = city3
};
var province2 = new Province
{
Id = "507",
Name = "Cuenca",
Capital = null // Leaving null to test a null to-one
};
var continent = new Continent
{
Id = "1",
Name = "Europe"
};
var country = new Country
{
Id = "4",
Name = "Spain",
Continent = continent,
Provinces = new List<Province> { province1, province2 },
Cities = new List<City> { city1, city2, city3 }
};
// Country registration
var countryName =
new ResourceTypeAttribute(
new PrimitiveTypeAttributeValueConverter<string>(typeof(Country).GetProperty("Name")), null, "name");
var countryCities =
new ToManyResourceTypeRelationship(typeof(Country).GetProperty("Cities"), "cities", typeof(City), null, null);
var countryProvinces =
new ToManyResourceTypeRelationship(typeof(Country).GetProperty("Provinces"), "provinces", typeof(Province), null, null);
var countryContinent =
new ToOneResourceTypeRelationship(typeof(Country).GetProperty("Continent"), "continent", typeof(Continent), null, null);
var countryRegistration = new Mock<IResourceTypeRegistration>(MockBehavior.Strict);
countryRegistration.Setup(m => m.GetIdForResource(It.IsAny<Country>())).Returns((Country c) => country.Id);
countryRegistration.Setup(m => m.ResourceTypeName).Returns("countries");
countryRegistration.Setup(m => m.Attributes).Returns(new[] { countryName });
countryRegistration
.Setup(m => m.Relationships)
.Returns(() => new ResourceTypeRelationship[] { countryCities, countryProvinces, countryContinent });
// City registration
var cityName =
new ResourceTypeAttribute(
new PrimitiveTypeAttributeValueConverter<string>(typeof(City).GetProperty("Name")), null, "name");
var cityRegistration = new Mock<IResourceTypeRegistration>(MockBehavior.Strict);
cityRegistration.Setup(m => m.ResourceTypeName).Returns("cities");
cityRegistration.Setup(m => m.GetIdForResource(It.IsAny<City>())).Returns((City c) => c.Id);
cityRegistration.Setup(m => m.Attributes).Returns(new[] { cityName });
cityRegistration.Setup(m => m.Relationships).Returns(new ResourceTypeRelationship[] { });
// Province registration
var provinceName =
new ResourceTypeAttribute(
new PrimitiveTypeAttributeValueConverter<string>(typeof(Province).GetProperty("Name")), null, "name");
var provinceCapital = new ToOneResourceTypeRelationship(typeof(Province).GetProperty("Capital"), "capital", typeof(City), null, null);
var provinceRegistration = new Mock<IResourceTypeRegistration>(MockBehavior.Strict);
provinceRegistration.Setup(m => m.ResourceTypeName).Returns("provinces");
provinceRegistration.Setup(m => m.GetIdForResource(It.IsAny<Province>())).Returns((Province c) => c.Id);
provinceRegistration.Setup(m => m.Attributes).Returns(new[] { provinceName });
provinceRegistration
.Setup(m => m.Relationships)
.Returns(() => new ResourceTypeRelationship[] { provinceCapital });
// Continent registration
var continentName =
new ResourceTypeAttribute(
new PrimitiveTypeAttributeValueConverter<string>(typeof(Continent).GetProperty("Name")), null, "name");
var continentCountries =
new ToManyResourceTypeRelationship(typeof(Continent).GetProperty("Countries"), "countries", typeof(Country), null, null);
var continentRegistration = new Mock<IResourceTypeRegistration>(MockBehavior.Strict);
continentRegistration.Setup(m => m.ResourceTypeName).Returns("continents");
continentRegistration.Setup(m => m.GetIdForResource(It.IsAny<Continent>())).Returns((Continent c) => c.Id);
continentRegistration.Setup(m => m.Attributes).Returns(new[] { continentName });
continentRegistration
.Setup(m => m.Relationships)
.Returns(() => new ResourceTypeRelationship[] { continentCountries });
var mockRegistry = new Mock<IResourceTypeRegistry>(MockBehavior.Strict);
mockRegistry.Setup(r => r.GetRegistrationForType(typeof(Country))).Returns(countryRegistration.Object);
mockRegistry.Setup(r => r.GetRegistrationForType(typeof(City))).Returns(cityRegistration.Object);
mockRegistry.Setup(r => r.GetRegistrationForType(typeof(Province))).Returns(provinceRegistration.Object);
mockRegistry.Setup(r => r.GetRegistrationForType(typeof(Continent))).Returns(continentRegistration.Object);
var linkConventions = new DefaultLinkConventions();
var metadataObject = new JObject();
metadataObject["baz"] = "qux";
var metadata = new BasicMetadata(metadataObject);
// Act
var documentBuilder = new RegistryDrivenSingleResourceDocumentBuilder(mockRegistry.Object, linkConventions);
var document = documentBuilder.BuildDocument(country, "http://www.example.com", new[] { "provinces.capital", "continent" }, metadata, null);
// Assert
document.PrimaryData.Id.Should().Be("4");
document.PrimaryData.Type.Should().Be("countries");
((string) document.PrimaryData.Attributes["name"]).Should().Be("Spain");
document.PrimaryData.Relationships.Count.Should().Be(3);
var citiesRelationship = document.PrimaryData.Relationships.First();
citiesRelationship.Key.Should().Be("cities");
citiesRelationship.Value.SelfLink.Href.Should().Be("http://www.example.com/countries/4/relationships/cities");
citiesRelationship.Value.RelatedResourceLink.Href.Should().Be("http://www.example.com/countries/4/cities");
citiesRelationship.Value.Linkage.Should().BeNull();
var provincesRelationship = document.PrimaryData.Relationships.Skip(1).First();
provincesRelationship.Key.Should().Be("provinces");
provincesRelationship.Value.SelfLink.Href.Should().Be("http://www.example.com/countries/4/relationships/provinces");
provincesRelationship.Value.RelatedResourceLink.Href.Should().Be("http://www.example.com/countries/4/provinces");
provincesRelationship.Value.Linkage.IsToMany.Should().BeTrue();
provincesRelationship.Value.Linkage.Identifiers[0].Type.Should().Be("provinces");
provincesRelationship.Value.Linkage.Identifiers[0].Id.Should().Be("506");
provincesRelationship.Value.Linkage.Identifiers[1].Type.Should().Be("provinces");
provincesRelationship.Value.Linkage.Identifiers[1].Id.Should().Be("507");
var continentRelationship = document.PrimaryData.Relationships.Skip(2).First();
AssertToOneRelationship(continentRelationship, "continent",
"http://www.example.com/countries/4/relationships/continent",
"http://www.example.com/countries/4/continent",
"continents", "1");
document.RelatedData.Length.Should().Be(4); // 2 provinces, 1 city, and 1 continent
var province1RelatedData = document.RelatedData[0];
province1RelatedData.Id.Should().Be("506");
province1RelatedData.Attributes["name"].Value<string>().Should().Be("Badajoz");
province1RelatedData.Type.Should().Be("provinces");
province1RelatedData.Relationships.Count.Should().Be(1);
var province1CapitalRelationship = province1RelatedData.Relationships.First();
AssertToOneRelationship(province1CapitalRelationship, "capital",
"http://www.example.com/provinces/506/relationships/capital",
"http://www.example.com/provinces/506/capital",
"cities", "12");
var province2RelatedData = document.RelatedData[1];
province2RelatedData.Id.Should().Be("507");
province2RelatedData.Type.Should().Be("provinces");
province2RelatedData.Attributes["name"].Value<string>().Should().Be("Cuenca");
var province2CapitalRelationship = province2RelatedData.Relationships.First();
AssertEmptyToOneRelationship(province2CapitalRelationship, "capital",
"http://www.example.com/provinces/507/relationships/capital",
"http://www.example.com/provinces/507/capital");
var city3RelatedData = document.RelatedData[2];
city3RelatedData.Id.Should().Be("12");
city3RelatedData.Type.Should().Be("cities");
city3RelatedData.Attributes["name"].Value<string>().Should().Be("Badajoz");
var continentRelatedData = document.RelatedData[3];
continentRelatedData.Id.Should().Be("1");
continentRelatedData.Type.Should().Be("continents");
continentRelatedData.Attributes["name"].Value<string>().Should().Be("Europe");
continentRelatedData.Relationships.Count.Should().Be(1);
var continentCountriesRelationship = continentRelatedData.Relationships.First();
continentCountriesRelationship.Key.Should().Be("countries");
continentCountriesRelationship.Value.SelfLink.Href.Should().Be("http://www.example.com/continents/1/relationships/countries");
continentCountriesRelationship.Value.RelatedResourceLink.Href.Should().Be("http://www.example.com/continents/1/countries");
continentCountriesRelationship.Value.Linkage.Should().BeNull();
((string) document.Metadata.MetaObject["baz"]).Should().Be("qux");
}
[TestMethod]
public void Returns_correct_document_for_null_resource()
{
// Arrange
var mockRegistry = new Mock<IResourceTypeRegistry>(MockBehavior.Strict);
var linkConventions = new DefaultLinkConventions();
// Act
var documentBuilder = new RegistryDrivenSingleResourceDocumentBuilder(mockRegistry.Object, linkConventions);
var document = documentBuilder.BuildDocument(null, "http://www.example.com", null, null, null);
// Assert
document.PrimaryData.Should().BeNull();
}
private void AssertToOneRelationship(KeyValuePair<string, IRelationshipObject> relationshipPair, string keyName, string selfLink, string relatedResourceLink,
string linkageType, string linkageId)
{
relationshipPair.Key.Should().Be(keyName);
relationshipPair.Value.SelfLink.Href.Should().Be(selfLink);
relationshipPair.Value.RelatedResourceLink.Href.Should().Be(relatedResourceLink);
relationshipPair.Value.Linkage.IsToMany.Should().BeFalse();
relationshipPair.Value.Linkage.Identifiers.Length.Should().Be(1);
relationshipPair.Value.Linkage.Identifiers[0].Type.Should().Be(linkageType);
relationshipPair.Value.Linkage.Identifiers[0].Id.Should().Be(linkageId);
}
private void AssertEmptyToOneRelationship(KeyValuePair<string, IRelationshipObject> relationshipPair, string keyName, string selfLink, string relatedResourceLink)
{
relationshipPair.Key.Should().Be(keyName);
relationshipPair.Value.SelfLink.Href.Should().Be(selfLink);
relationshipPair.Value.RelatedResourceLink.Href.Should().Be(relatedResourceLink);
relationshipPair.Value.Linkage.IsToMany.Should().BeFalse();
relationshipPair.Value.Linkage.Identifiers.Length.Should().Be(0);
}
}
}
| {
"content_hash": "734a30c882e70137dcf99c7280aba833",
"timestamp": "",
"source": "github",
"line_count": 288,
"max_line_length": 170,
"avg_line_length": 48.49305555555556,
"alnum_prop": 0.62666475726765,
"repo_name": "SphtKr/JSONAPI.NET",
"id": "f5afb35330332bbb345eedbc8fbce68b06e3c6c7",
"size": "13968",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "JSONAPI.Tests/Documents/Builders/RegistryDrivenSingleResourceDocumentBuilderTests.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "657398"
}
],
"symlink_target": ""
} |
<?php
// include function files for this application
require_once('movie_sc_fns.php');
session_start();
do_html_header("Adding a movie");
if (check_admin_user()) {
if (filled_out($_POST)) {
$movieid = $_POST['movieid'];
$title = $_POST['title'];
$director = $_POST['director'];
$catid = $_POST['catid'];
$price = $_POST['price'];
$description = $_POST['description'];
if(insert_movie($movieid, $title, $director, $catid, $price, $description)) {
echo "<p>movie <em>".stripslashes($title)."</em> was added to the database.</p>";
} else {
echo "<p>movie <em>".stripslashes($title)."</em> could not be added to the database.</p>";
}
} else {
echo "<p>You have not filled out the form. Please try again.</p>";
}
do_html_url("admin.php", "Back to administration menu");
} else {
echo "<p>You are not authorised to view this page.</p>";
}
do_html_footer();
?>
| {
"content_hash": "331636f182d0a5932cf63f7e57c18d9c",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 96,
"avg_line_length": 28,
"alnum_prop": 0.6006493506493507,
"repo_name": "Sapphirine/Movie-Review-Sentiment-Analysis",
"id": "bf03c80d689d6df4b3c7a574f6609f5b1d4a9745",
"size": "926",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "insert_movie.php",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "47594"
},
{
"name": "Python",
"bytes": "16649"
}
],
"symlink_target": ""
} |
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build cgo
package runtime_test
import (
"runtime"
"strings"
"testing"
)
func TestCgoCrashHandler(t *testing.T) {
testCrashHandler(t, true)
}
func TestCgoSignalDeadlock(t *testing.T) {
if testing.Short() && runtime.GOOS == "windows" {
t.Skip("Skipping in short mode") // takes up to 64 seconds
}
got := executeTest(t, cgoSignalDeadlockSource, nil)
want := "OK\n"
if got != want {
t.Fatalf("expected %q, but got %q", want, got)
}
}
func TestCgoTraceback(t *testing.T) {
got := executeTest(t, cgoTracebackSource, nil)
want := "OK\n"
if got != want {
t.Fatalf("expected %q, but got %q", want, got)
}
}
func TestCgoExternalThreadPanic(t *testing.T) {
if runtime.GOOS == "plan9" {
t.Skipf("no pthreads on %s", runtime.GOOS)
}
csrc := cgoExternalThreadPanicC
if runtime.GOOS == "windows" {
csrc = cgoExternalThreadPanicC_windows
}
got := executeTest(t, cgoExternalThreadPanicSource, nil, "main.c", csrc)
want := "panic: BOOM"
if !strings.Contains(got, want) {
t.Fatalf("want failure containing %q. output:\n%s\n", want, got)
}
}
const cgoSignalDeadlockSource = `
package main
import "C"
import (
"fmt"
"runtime"
"time"
)
func main() {
runtime.GOMAXPROCS(100)
ping := make(chan bool)
go func() {
for i := 0; ; i++ {
runtime.Gosched()
select {
case done := <-ping:
if done {
ping <- true
return
}
ping <- true
default:
}
func() {
defer func() {
recover()
}()
var s *string
*s = ""
}()
}
}()
time.Sleep(time.Millisecond)
for i := 0; i < 64; i++ {
go func() {
runtime.LockOSThread()
select {}
}()
go func() {
runtime.LockOSThread()
select {}
}()
time.Sleep(time.Millisecond)
ping <- false
select {
case <-ping:
case <-time.After(time.Second):
fmt.Printf("HANG\n")
return
}
}
ping <- true
select {
case <-ping:
case <-time.After(time.Second):
fmt.Printf("HANG\n")
return
}
fmt.Printf("OK\n")
}
`
const cgoTracebackSource = `
package main
/* void foo(void) {} */
import "C"
import (
"fmt"
"runtime"
)
func main() {
C.foo()
buf := make([]byte, 1)
runtime.Stack(buf, true)
fmt.Printf("OK\n")
}
`
const cgoExternalThreadPanicSource = `
package main
// void start(void);
import "C"
func main() {
C.start()
select {}
}
//export gopanic
func gopanic() {
panic("BOOM")
}
`
const cgoExternalThreadPanicC = `
#include <stdlib.h>
#include <stdio.h>
#include <pthread.h>
void gopanic(void);
static void*
die(void* x)
{
gopanic();
return 0;
}
void
start(void)
{
pthread_t t;
if(pthread_create(&t, 0, die, 0) != 0)
printf("pthread_create failed\n");
}
`
const cgoExternalThreadPanicC_windows = `
#include <stdlib.h>
#include <stdio.h>
void gopanic(void);
static void*
die(void* x)
{
gopanic();
return 0;
}
void
start(void)
{
if(_beginthreadex(0, 0, die, 0, 0, 0) != 0)
printf("_beginthreadex failed\n");
}
`
| {
"content_hash": "880d6911f6fe44faa3c100ddde858601",
"timestamp": "",
"source": "github",
"line_count": 196,
"max_line_length": 73,
"avg_line_length": 15.576530612244898,
"alnum_prop": 0.627906976744186,
"repo_name": "wesolows/golang-old",
"id": "972eedc624ee3d192240d0b764cfa517f174a6ef",
"size": "3053",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "src/runtime/crash_cgo_test.go",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "712283"
},
{
"name": "Awk",
"bytes": "3993"
},
{
"name": "C",
"bytes": "4688139"
},
{
"name": "C++",
"bytes": "143458"
},
{
"name": "CSS",
"bytes": "3120"
},
{
"name": "Go",
"bytes": "18289650"
},
{
"name": "JavaScript",
"bytes": "13246"
},
{
"name": "Objective-C",
"bytes": "21732"
},
{
"name": "Perl",
"bytes": "33612"
},
{
"name": "Python",
"bytes": "123232"
},
{
"name": "Shell",
"bytes": "109661"
}
],
"symlink_target": ""
} |
require "berg/repository"
require "main/entities/home_page_featured_item"
module Main
module Persistence
module Repositories
class HomePageFeaturedItems < Berg::Repository[:home_page_featured_items]
relations :home_page_featured_items
def listing
home_page_featured_items
.order(:position)
.as(Entities::HomePageFeaturedItem)
end
end
end
end
end
| {
"content_hash": "28f749527d80922e7a20cb313cd02b2b",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 79,
"avg_line_length": 23.88888888888889,
"alnum_prop": 0.6627906976744186,
"repo_name": "icelab/berg",
"id": "06448c1dc7bdb476ee3ab8d71ff2424796e27383",
"size": "430",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/main/lib/main/persistence/repositories/home_page_featured_items.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1619"
},
{
"name": "HTML",
"bytes": "49007"
},
{
"name": "JavaScript",
"bytes": "14449"
},
{
"name": "PLpgSQL",
"bytes": "15973"
},
{
"name": "Ruby",
"bytes": "255937"
}
],
"symlink_target": ""
} |
package gnu.javax.sound.midi.alsa;
import gnu.classpath.Configuration;
import javax.sound.midi.MidiDevice;
import javax.sound.midi.MidiDevice.Info;
import javax.sound.midi.spi.MidiDeviceProvider;
/**
* Provide ALSA MIDI devices.
*
* @author Anthony Green ([email protected])
*
*/
public class AlsaMidiDeviceProvider extends MidiDeviceProvider
{
/**
* Abstract base for ALSA specific MIDI device info.
*
* @author Anthony Green ([email protected])
*
*/
private static abstract class AlsaInfo extends Info
{
/**
* Create an ALSA specific MIDI device info object.
*
* @param name the device name
* @param description the device description
*/
public AlsaInfo(String name, String description)
{
super(name, "Alsa", description, "0.0");
}
abstract MidiDevice getDevice ();
}
/**
* ALSA MIDI Port.
*
* @author Anthony Green ([email protected])
*
*/
public static abstract class AlsaPortInfo extends AlsaInfo
{
long client;
long port;
/**
* Create ALSA MIDI In Port.
*
* @param name the device name
* @param description the device description
* @param client the client ID
* @param port the port ID
*/
public AlsaPortInfo(String name, String description, long client, long port)
{
super(name, description);
this.client = client;
this.port = port;
}
}
/**
* ALSA Sequencer specific info.
*
* @author Anthony Green ([email protected])
*
*/
private static class AlsaSequencerInfo extends AlsaInfo
{
public AlsaSequencerInfo(String name, String description)
{
super(name, description);
}
MidiDevice getDevice()
{
return AlsaMidiSequencerDevice.getInstance();
}
}
/**
* ALSA MIDI In Port.
*
* @author Anthony Green ([email protected])
*
*/
private static class AlsaInputPortInfo extends AlsaPortInfo
{
public AlsaInputPortInfo(String name, String description, long client, long port)
{
super(name, description, client, port);
}
MidiDevice getDevice()
{
return new AlsaInputPortDevice(this);
}
}
/**
* ALSA MIDI Out Port.
*
* @author Anthony Green ([email protected])
*
*/
private static class AlsaOutputPortInfo extends AlsaPortInfo
{
public AlsaOutputPortInfo(String name, String description, long client, long port)
{
super(name, description, client, port);
}
MidiDevice getDevice()
{
return new AlsaOutputPortDevice(this);
}
}
private static AlsaInfo[] infos;
private static native AlsaInfo[] getInputDeviceInfo_();
private static native AlsaInfo[] getOutputDeviceInfo_();
/**
* Initialize the ALSA system
*/
private static native void init_();
static
{
if (Configuration.INIT_LOAD_LIBRARY)
{
System.loadLibrary("gjsmalsa");
}
init_();
AlsaInfo inputs[] = getInputDeviceInfo_();
AlsaInfo outputs[] = getOutputDeviceInfo_();
infos = new AlsaInfo[inputs.length + outputs.length + 1];
infos[0] = new AlsaSequencerInfo ("/dev/snd/seq", "ALSA Sequencer");
System.arraycopy(inputs, 0, infos, 1, inputs.length);
System.arraycopy(outputs, 0, infos, 1 + inputs.length, outputs.length);
}
public AlsaMidiDeviceProvider()
{
// Nothing.
}
/* (non-Javadoc)
* @see javax.sound.midi.spi.MidiDeviceProvider#getDeviceInfo()
*/
public Info[] getDeviceInfo()
{
return infos;
}
/* (non-Javadoc)
* @see javax.sound.midi.spi.MidiDeviceProvider#getDevice(javax.sound.midi.MidiDevice.Info)
*/
public MidiDevice getDevice(Info info)
{
for (int i = 0; i < infos.length; i++)
{
if (info.equals(infos[i]))
{
return infos[i].getDevice();
}
}
throw new IllegalArgumentException("Don't recognize MIDI device " + info);
}
}
| {
"content_hash": "f5591017527593e72dabdb432c20c69b",
"timestamp": "",
"source": "github",
"line_count": 181,
"max_line_length": 93,
"avg_line_length": 21.635359116022098,
"alnum_prop": 0.6424923391215526,
"repo_name": "the-linix-project/linix-kernel-source",
"id": "33181b6d54cda8cd40e01b0ec21a3e11210594ae",
"size": "5688",
"binary": false,
"copies": "153",
"ref": "refs/heads/master",
"path": "gccsrc/gcc-4.7.2/libjava/classpath/gnu/javax/sound/midi/alsa/AlsaMidiDeviceProvider.java",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Ada",
"bytes": "38139979"
},
{
"name": "Assembly",
"bytes": "3723477"
},
{
"name": "Awk",
"bytes": "83739"
},
{
"name": "C",
"bytes": "103607293"
},
{
"name": "C#",
"bytes": "55726"
},
{
"name": "C++",
"bytes": "38577421"
},
{
"name": "CLIPS",
"bytes": "6933"
},
{
"name": "CSS",
"bytes": "32588"
},
{
"name": "Emacs Lisp",
"bytes": "13451"
},
{
"name": "FORTRAN",
"bytes": "4294984"
},
{
"name": "GAP",
"bytes": "13089"
},
{
"name": "Go",
"bytes": "11277335"
},
{
"name": "Haskell",
"bytes": "2415"
},
{
"name": "Java",
"bytes": "45298678"
},
{
"name": "JavaScript",
"bytes": "6265"
},
{
"name": "Matlab",
"bytes": "56"
},
{
"name": "OCaml",
"bytes": "148372"
},
{
"name": "Objective-C",
"bytes": "995127"
},
{
"name": "Objective-C++",
"bytes": "436045"
},
{
"name": "PHP",
"bytes": "12361"
},
{
"name": "Pascal",
"bytes": "40318"
},
{
"name": "Perl",
"bytes": "358808"
},
{
"name": "Python",
"bytes": "60178"
},
{
"name": "SAS",
"bytes": "1711"
},
{
"name": "Scilab",
"bytes": "258457"
},
{
"name": "Shell",
"bytes": "2610907"
},
{
"name": "Tcl",
"bytes": "17983"
},
{
"name": "TeX",
"bytes": "1455571"
},
{
"name": "XSLT",
"bytes": "156419"
}
],
"symlink_target": ""
} |
module Locomotive
module Steam
module Middlewares
class Cache
attr_reader :app
CACHEABLE_REQUEST_METHODS = %w(GET HEAD).freeze
def initialize(app)
@app = app
end
def call(env)
if cacheable?(env)
fetch_cached_response(env)
else
app.call(env)
end
end
private
def fetch_cached_response(env)
key = cache_key(env)
if marshaled = Rails.cache.read(key)
Marshal.load(marshaled)
else
app.call(env).tap do |response|
Rails.cache.write(key, marshal(response))
end
end
end
def marshal(response)
code, headers, body = response
_headers = headers.dup.reject! { |key, val| key =~ /[^0-9A-Z_]/ || !val.respond_to?(:to_str) }
Marshal.dump([code, _headers, body])
end
def cacheable?(env)
CACHEABLE_REQUEST_METHODS.include?(env['REQUEST_METHOD']) &&
!env['steam.live_editing'] &&
env['steam.site'].try(:cache_enabled) &&
env['steam.page'].try(:cache_enabled)
end
def cache_key(env)
site, path, query = env['steam.site'], env['PATH_INFO'], env['QUERY_STRING']
key = "site/#{site._id}/#{site.last_modified_at.to_i}/page/#{path}/#{query}"
Digest::MD5.hexdigest(key)
end
end
end
end
end
| {
"content_hash": "c4e834329742f32e73129e9ef7ea18ee",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 104,
"avg_line_length": 23.822580645161292,
"alnum_prop": 0.5179417738659445,
"repo_name": "ipmobiletech/locomotivecms-engine",
"id": "b7232c0083111ced6d18109011835f658c7331a4",
"size": "1477",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/locomotive/steam/middlewares/cache.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "95989"
},
{
"name": "CoffeeScript",
"bytes": "58225"
},
{
"name": "Cucumber",
"bytes": "179565"
},
{
"name": "HTML",
"bytes": "63115"
},
{
"name": "JavaScript",
"bytes": "3542"
},
{
"name": "Ruby",
"bytes": "691422"
},
{
"name": "Shell",
"bytes": "70"
}
],
"symlink_target": ""
} |
package io.jboot.test.seata.commons;
import com.jfinal.plugin.activerecord.IBean;
import io.jboot.db.annotation.Table;
import io.jboot.db.model.JbootModel;
@Table(tableName = "seata_stock", primaryKey = "ID")
public class Stock extends JbootModel<Stock> implements IBean {
/**
*
*/
private static final long serialVersionUID = 1L;
}
| {
"content_hash": "63c13aef021c35c52140cfc75ca692d7",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 63,
"avg_line_length": 23.933333333333334,
"alnum_prop": 0.7158774373259053,
"repo_name": "yangfuhai/jboot",
"id": "ffcacdf64b0dbd752e2ee48c73ca505246ae3f09",
"size": "359",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test/java/io/jboot/test/seata/commons/Stock.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "10366"
},
{
"name": "Java",
"bytes": "2394166"
}
],
"symlink_target": ""
} |
// Visual Studio Shared Project
// Copyright(c) Microsoft Corporation
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the License); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
// OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY
// IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABILITY OR NON-INFRINGEMENT.
//
// See the Apache Version 2.0 License for specific language governing
// permissions and limitations under the License.
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Input;
namespace Microsoft.VisualStudioTools.Wpf {
public static class Converters {
public static IValueConverter Scale055 { get; } = LambdaConverter.Create<double>(x => x * 0.55);
public static IValueConverter Scale122 { get; } = LambdaConverter.Create<double>(x => x * 1.22);
public static IValueConverter Scale155 { get; } = LambdaConverter.Create<double>(x => x * 1.55);
public static IValueConverter Scale190 { get; } = LambdaConverter.Create<double>(x => x * 1.90);
public static IValueConverter StringJoin { get; } = LambdaConverter.Create<IEnumerable<string>>(x => string.Join(", ", x));
public static IValueConverter NullIsTrue { get; } = LambdaConverter.Create<object>(x => x == null);
public static IValueConverter NullIsFalse { get; } = LambdaConverter.Create<object>(x => x != null);
public static IValueConverter NullIsCollapsed { get; } = LambdaConverter.Create<object>(x => x == null ? Visibility.Collapsed : Visibility.Visible);
public static IValueConverter NullIsNotCollapsed { get; } = LambdaConverter.Create<object>(x => x == null ? Visibility.Visible : Visibility.Collapsed);
public static IValueConverter TrueIsCollapsed { get; } = LambdaConverter.Create<bool>(x => x ? Visibility.Collapsed : Visibility.Visible);
public static IValueConverter TrueIsNotCollapsed { get; } = LambdaConverter.Create<bool>(x => x ? Visibility.Visible : Visibility.Collapsed);
public static IValueConverter FalseIsCollapsed { get; } = LambdaConverter.Create<bool>(x => !x ? Visibility.Collapsed : Visibility.Visible);
public static IValueConverter FalseIsNotCollapsed { get; } = LambdaConverter.Create<bool>(x => !x ? Visibility.Visible : Visibility.Collapsed);
public static IValueConverter TrueIsHidden { get; } = LambdaConverter.Create<bool>(x => x ? Visibility.Hidden : Visibility.Visible);
public static IValueConverter TrueIsNotHidden { get; } = LambdaConverter.Create<bool>(x => x ? Visibility.Visible : Visibility.Hidden);
public static IValueConverter FalseIsHidden { get; } = LambdaConverter.Create<bool>(x => !x ? Visibility.Hidden : Visibility.Visible);
public static IValueConverter FalseIsNotHidden { get; } = LambdaConverter.Create<bool>(x => !x ? Visibility.Visible : Visibility.Hidden);
public static IValueConverter Not { get; } = LambdaConverter.Create<bool>(x => !x);
public static IValueConverter NullOrEmptyIsTrue { get; } = LambdaConverter.Create<IEnumerable>(x => x == null || !x.GetEnumerator().MoveNext());
public static IValueConverter NullOrEmptyIsFalse { get; } = LambdaConverter.Create<IEnumerable>(x => x != null && x.GetEnumerator().MoveNext());
public static IValueConverter NullOrEmptyIsCollapsed { get; } = LambdaConverter.Create<IEnumerable>(x => x == null || !x.GetEnumerator().MoveNext() ? Visibility.Collapsed : Visibility.Visible);
public static IValueConverter NullOrEmptyIsNotCollapsed { get; } = LambdaConverter.Create<IEnumerable>(x => x == null || !x.GetEnumerator().MoveNext() ? Visibility.Visible : Visibility.Collapsed);
public static IValueConverter EmptyIsCollapsed { get; } = LambdaConverter.Create<IEnumerable>(x => x != null && !x.GetEnumerator().MoveNext() ? Visibility.Collapsed : Visibility.Visible);
public static IValueConverter EmptyIsNotCollapsed { get; } = LambdaConverter.Create<IEnumerable>(x => x != null && !x.GetEnumerator().MoveNext() ? Visibility.Visible : Visibility.Collapsed);
public static IValueConverter TrueIsCrossCursor { get; } = LambdaConverter.Create<bool>(x => x ? Cursors.Cross : Cursors.Arrow);
public static IValueConverter TrueIsBold { get; } = LambdaConverter.Create<bool>(x => x ? FontWeights.Bold : FontWeights.Normal);
public static IMultiValueConverter Any { get; } = LambdaConverter.CreateMulti<bool>(args => args.Any(x => x));
public static IMultiValueConverter AnyIsHidden { get; } = LambdaConverter.CreateMulti<bool>(args => args.Any(x => x) ? Visibility.Hidden : Visibility.Visible);
public static IMultiValueConverter AnyIsNotHidden { get; } = LambdaConverter.CreateMulti<bool>(args => args.Any(x => x) ? Visibility.Visible : Visibility.Hidden);
public static IMultiValueConverter AnyIsCollapsed { get; } = LambdaConverter.CreateMulti<bool>(args => args.Any(x => x) ? Visibility.Collapsed : Visibility.Visible);
public static IMultiValueConverter AnyIsNotCollapsed { get; } = LambdaConverter.CreateMulti<bool>(args => args.Any(x => x) ? Visibility.Visible : Visibility.Collapsed);
public static IMultiValueConverter All { get; } = LambdaConverter.CreateMulti<bool>(args => args.All(x => x));
public static IMultiValueConverter AllIsHidden { get; } = LambdaConverter.CreateMulti<bool>(args => args.All(x => x) ? Visibility.Hidden : Visibility.Visible);
public static IMultiValueConverter AllIsNotHidden { get; } = LambdaConverter.CreateMulti<bool>(args => args.All(x => x) ? Visibility.Visible : Visibility.Hidden);
public static IMultiValueConverter AllIsCollapsed { get; } = LambdaConverter.CreateMulti<bool>(args => args.All(x => x) ? Visibility.Collapsed : Visibility.Visible);
public static IMultiValueConverter AllIsNotCollapsed { get; } = LambdaConverter.CreateMulti<bool>(args => args.All(x => x) ? Visibility.Visible : Visibility.Collapsed);
public static IMultiValueConverter Max { get; } = LambdaConverter.CreateMulti<double>(x => x.Max());
public static IMultiValueConverter Min { get; } = LambdaConverter.CreateMulti<double>(x => x.Min());
public static IMultiValueConverter ShowScrollbarForMinWidth { get; } = LambdaConverter.Create<double, double>((x, y) => x < y ? ScrollBarVisibility.Auto : ScrollBarVisibility.Disabled);
}
}
| {
"content_hash": "7d56f5187369798f09516a362d03ebac",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 204,
"avg_line_length": 99.42647058823529,
"alnum_prop": 0.724744860227777,
"repo_name": "int19h/PTVS",
"id": "a010605dc9ea2bf3b452eb87690416ab1b6226d7",
"size": "6763",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "Common/Product/SharedProject/Wpf/LambdaConverter/Converters.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP.NET",
"bytes": "109"
},
{
"name": "Batchfile",
"bytes": "7975"
},
{
"name": "C",
"bytes": "21444"
},
{
"name": "C#",
"bytes": "11297254"
},
{
"name": "C++",
"bytes": "175131"
},
{
"name": "CSS",
"bytes": "4109"
},
{
"name": "HTML",
"bytes": "213660"
},
{
"name": "JavaScript",
"bytes": "44401"
},
{
"name": "PowerShell",
"bytes": "18157"
},
{
"name": "Pug",
"bytes": "2807"
},
{
"name": "Python",
"bytes": "620501"
},
{
"name": "Rich Text Format",
"bytes": "260880"
},
{
"name": "Smarty",
"bytes": "3663"
},
{
"name": "Tcl",
"bytes": "24968"
},
{
"name": "Vim Snippet",
"bytes": "17303"
}
],
"symlink_target": ""
} |
<?php
namespace Contentinum\View\Helper\News;
use ContentinumComponents\Images\CalculateResize;
class Categories extends Images
{
const VIEW_TEMPLATE = 'newsactual';
public function __invoke($entries, $template, $media)
{
$templateKey = static::VIEW_TEMPLATE;
if (isset($entries['modulFormat'])){
$templateKey = $entries['modulFormat'];
}
$this->setTemplate($template->plugins->$templateKey);
$labelReadMore = $this->labelReadMore->toArray();
$publishDate = $this->publishDate->toArray();
$publishDate['grid']['format']['dateFormat']['attr'] = $this->getDateFormat($entries);
$filter = new \Zend\Filter\HtmlEntities();
$newstemplate = $this->news->toArray();
$newstemplate['grid']['attr']['class'] .= ' categoryview';
$html = '';
foreach ($entries['modulContent']['news'] as $entry) {
$article = '';
$head = '';
$arr = preg_split('/[\s]+/', $entry['publish_date']);
$lnPublishDate = $arr[0];
$publishDate['grid']['attr']['datetime'] = $entry['publish_date'];
$head .= $this->deployRow($publishDate, $entry['publish_date']);
if (isset($entry['publish_author'])) {
$head .= $this->deployRow($this->publishAuthor, $entry['publish_author']);
}
$blogId = 'blog' . $entry['id'];
if (null !== $this->toolbar) {
$links['pdf'] = array(
'href' => '/' . $entry['id']
);
$links['facebook'] = array(
'href' => '?u=' . urlencode($this->view->protocol . '://' . $this->view->host . '/' . $entry['url'] . '/' . $entry['source'] . '/' . $entry['lnPublishDate'] . '#'.$blogId)
);
$links['sendmail'] = array(
'href' => '/' . $entry['id']
);
$head .= $this->view->contenttoolbar($links, $this->toolbar->toArray());
}
if ('onlyheadline' === $entries['modulLink']){
$head .= $this->deployRow($this->headline, '<a href="/' . $entry['url'] . '/' . $entry['source'] . '/' . $entry['lnPublishDate'] . '#'.$blogId . '">' . $entry['headline'] . '</a>');
} else {
$head .= $this->deployRow($this->headline, $entry['headline']);
}
$article .= $this->deployRow($this->header, $head);
if ('onlyheadline' !== $entries['modulLink'] ){
if ('nopictures' !== $entries['modulLink'] && 1 !== (int) $entry['web_medias_id'] && 'no' !== $this->displayimage) {
if ('mediateaserright' == $entry['htmlwidgets']) {
$mediaTemplate = $this->mediateaserright->toArray();
} else {
$mediaTemplate = $this->mediateaserleft->toArray();
}
$setSize = array(
'landscape' => $this->teaserLandscapeSize,
'portrait' => $this->teaserPortraitSize
);
$mediaMetas = $this->setConvertparams($entry['media_metas'], true);
$img = '<img src="' . $entry['media_link'] . '"';
if (is_array($setSize) && isset($setSize['landscape']) && false !== $setSize['landscape']) {
$landscape = $setSize['landscape'];
$styleAttr = ' landscape';
if (isset($setSize['portrait']) && false !== $setSize['portrait']) {
$styleAttr = ' portrait';
$portrait = $setSize['portrait'];
} else {
$styleAttr = ' portrait';
$portrait = $landscape;
}
} else {
$portrait = $landscape = $setSize;
}
$resize = new CalculateResize($landscape);
$resize->setFile(DOCUMENT_ROOT . DS . $entry['media_link']);
$resize->getNewSize();
if ('portrait' == $resize->getFormat()) {
$resize->setTarget($portrait);
$resize->getNewSize();
}
$img .= ' ' . $resize->getHtmlString();
if (isset($mediaMetas['alt'])) {
$img .= ' alt="' . $mediaMetas['alt'] . '"';
}
if (false !== ($title = $this->hasValue($mediaMetas, 'title'))) {
$img .= ' title="' . $title . '"';
}
$img .= ' />';
$mediaTemplate['row']['attr']['class'] .= ' teaser-imageitem-size';
if (false !== ($caption = $this->hasValue($mediaMetas, 'caption'))) {
$mediaTemplate['row']['content:before'] = $img;
$images = $this->deployRow($mediaTemplate, $caption);
} else {
$images = $this->deployRow(array(
'grid' => $mediaTemplate['row']
), $img);
}
$article .= $images;
}
$labelReadMore["grid"]["attr"]['href'] = '/' . $entry['url'] . '/' . $entry['source'] . '/' . $entry['lnPublishDate'] . '#'.$blogId;
$labelReadMore["grid"]["attr"]['title'] = $entry['label_read_more'] . ' zu ' . $filter->filter($entry['headline']);
if (strlen($entry['content_teaser']) > 1) {
$article .= $entry['content_teaser'];
$article .= $this->deployRow($labelReadMore, $entry['label_read_more']);
} else {
$content = $entry['content'];
if ($entry['number_character_teaser'] > 0 && strlen($content) > $entry['number_character_teaser']) {
$content = substr($content, 0, $entry['number_character_teaser']);
$content = substr($content, 0, strrpos($content, " "));
$content = $content . ' ...</p>';
$article .= $content;
$article .= $this->deployRow($labelReadMore, $entry['label_read_more']);
} else {
$article .= $content;
}
}
}
$html .= $this->deployRow($newstemplate, $article);
}
if (null !== $this->wrapper) {
$html = $this->deployRow($this->wrapper, $html);
}
return $html;
}
} | {
"content_hash": "0703943762eb29372bcda8ac7258aa50",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 200,
"avg_line_length": 51.91095890410959,
"alnum_prop": 0.3872542551787835,
"repo_name": "jochum-mediaservices/contentinum5.5",
"id": "33ad6c5bb986f480dd637baee708b844440d4afb",
"size": "8857",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "module/Contentinum/src/Contentinum/View/Helper/News/Categories.php",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1155776"
},
{
"name": "HTML",
"bytes": "519302"
},
{
"name": "JavaScript",
"bytes": "687645"
},
{
"name": "PHP",
"bytes": "4412705"
}
],
"symlink_target": ""
} |
using namespace llvm;
namespace {
// Used to build addressing modes.
struct RISCVAddressingMode {
// The shape of the address.
enum AddrForm {
// base+offset
FormBO
};
AddrForm Form;
// The type of displacement.
enum OffRange {
Off12Only
};
OffRange OffR;
// The parts of the address. The address is equivalent to:
//
// Base + Offset + Index + (IncludesDynAlloc ? ADJDYNALLOC : 0)
SDValue Base;
int64_t Offset;
RISCVAddressingMode(AddrForm form, OffRange offr)
: Form(form), OffR(offr), Base(), Offset(0) {}
void dump() {
errs() << "RISCVAddressingMode " << this << '\n';
errs() << " Base ";
if (Base.getNode() != 0)
Base.getNode()->dump();
else
errs() << "null\n";
errs() << " Offset " << Offset;
}
};
class RISCVDAGToDAGISel : public SelectionDAGISel {
const RISCVTargetLowering &Lowering;
const RISCVSubtarget &Subtarget;
// Used by RISCVOperands.td to create integer constants.
inline SDValue getImm(const SDNode *Node, uint64_t Imm) {
return CurDAG->getTargetConstant(Imm, Node->getValueType(0));
}
/// getI32Imm - Return a target constant with the specified value, of type
/// i32.
SDValue getI32Imm(unsigned Imm) {
return CurDAG->getTargetConstant(Imm, MVT::i32);
}
// Try to fold more of the base or index of AM into AM, where IsBase
// selects between the base and index.
bool expandAddress(RISCVAddressingMode &AM, bool IsBase);
// Try to describe N in AM, returning true on success.
bool selectAddress(SDValue N, RISCVAddressingMode &AM);
// Extract individual target operands from matched address AM.
void getAddressOperands(const RISCVAddressingMode &AM, EVT VT,
SDValue &Base, SDValue &Disp);
void getAddressOperands(const RISCVAddressingMode &AM, EVT VT,
SDValue &Base, SDValue &Disp, SDValue &Index);
//RISCV
bool selectMemRegAddr(SDValue Addr, SDValue &Base, SDValue &Offset) {
EVT ValTy = Addr.getValueType();
// if Address is FI, get the TargetFrameIndex.
if (FrameIndexSDNode *FIN = dyn_cast<FrameIndexSDNode>(Addr)) {
Base = CurDAG->getTargetFrameIndex(FIN->getIndex(), ValTy);
Offset = CurDAG->getTargetConstant(0, ValTy);
return true;
}
if (TM.getRelocationModel() != Reloc::PIC_) {
if ((Addr.getOpcode() == ISD::TargetExternalSymbol ||
Addr.getOpcode() == ISD::TargetGlobalAddress))
return false;
}
// Addresses of the form FI+const or FI|const
if (CurDAG->isBaseWithConstantOffset(Addr)) {
ConstantSDNode *CN = dyn_cast<ConstantSDNode>(Addr.getOperand(1));
if (isInt<12>(CN->getSExtValue())) {
// If the first operand is a FI, get the TargetFI Node
if (FrameIndexSDNode *FIN = dyn_cast<FrameIndexSDNode>
(Addr.getOperand(0)))
Base = CurDAG->getTargetFrameIndex(FIN->getIndex(), ValTy);
else
Base = Addr.getOperand(0);
Offset = CurDAG->getTargetConstant(CN->getZExtValue(), ValTy);
return true;
}
}
//Last case
Base = Addr;
Offset = CurDAG->getTargetConstant(0, Addr.getValueType());
return true;
}
bool selectRegAddr(SDValue Addr, SDValue &Base) {
//always just register
Base = Addr;
//Offset = CurDAG->getTargetConstant(0, Addr.getValueType());
return true;
}
#if 0
//-- DEPRECATING THIS FUNCTION AS IT IS NOT CURRENTLY USED
bool replaceUsesWithZeroReg(MachineRegisterInfo *MRI,
const MachineInstr& MI) {
unsigned DstReg = 0, ZeroReg = 0;
// Check if MI is "addiu $dst, $zero, 0" or "daddiu $dst, $zero, 0".
if ((MI.getOpcode() == RISCV::ADDI) &&
(MI.getOperand(1).isReg()) && //avoid frame-index
(MI.getOperand(1).getReg() == RISCV::zero) &&
(MI.getOperand(2).getImm() == 0)) {
DstReg = MI.getOperand(0).getReg();
ZeroReg = RISCV::zero;
} else if ((MI.getOpcode() == RISCV::ADDI64) &&
(MI.getOperand(1).isReg()) && //avoid frame-index
(MI.getOperand(1).getReg() == RISCV::zero_64) &&
(MI.getOperand(2).getImm() == 0)) {
DstReg = MI.getOperand(0).getReg();
ZeroReg = RISCV::zero_64;
} else if ((MI.getOpcode() == RISCV::ADDIW) &&
(MI.getOperand(1).isReg()) && //avoid frame-index
(MI.getOperand(1).getReg() == RISCV::zero_64) &&
(MI.getOperand(2).getImm() == 0)) {
DstReg = MI.getOperand(0).getReg();
ZeroReg = RISCV::zero_64;
}
if (!DstReg)
return false;
// Replace uses with ZeroReg.
for (MachineRegisterInfo::use_iterator U = MRI->use_begin(DstReg),
E = MRI->use_end(); U != E;) {
MachineOperand &MO = U.getOperand();
unsigned OpNo = U.getOperandNo();
MachineInstr *MI = MO.getParent();
++U;
// Do not replace if it is a phi's operand or is tied to def operand.
if (MI->isPHI() || MI->isRegTiedToDefOperand(OpNo) || MI->isPseudo())
continue;
MO.setReg(ZeroReg);
}
return true;
}
#endif
//End RISCV
// PC-relative address matching routines used by RISCVOperands.td.
bool selectPCRelAddress(SDValue Addr, SDValue &Target) {
if (Addr.getOpcode() == RISCVISD::PCREL_WRAPPER) {
Target = Addr.getOperand(0);
return true;
}
return false;
}
// If Op0 is null, then Node is a constant that can be loaded using:
//
// (Opcode UpperVal LowerVal)
//
// If Op0 is nonnull, then Node can be implemented using:
//
// (Opcode (Opcode Op0 UpperVal) LowerVal)
SDNode *splitLargeImmediate(unsigned Opcode, SDNode *Node, SDValue Op0,
uint64_t UpperVal, uint64_t LowerVal);
public:
RISCVDAGToDAGISel(RISCVTargetMachine &TM, CodeGenOpt::Level OptLevel)
: SelectionDAGISel(TM, OptLevel),
Lowering(*TM.getTargetLowering()),
Subtarget(*TM.getSubtargetImpl()) { }
// Override MachineFunctionPass.
const char *getPassName() const override {
return "RISCV DAG->DAG Pattern Instruction Selection";
}
// Override SelectionDAGISel.
virtual bool runOnMachineFunction(MachineFunction &MF);
virtual SDNode *Select(SDNode *Node) override;
virtual void processFunctionAfterISel(MachineFunction &MF);
virtual bool SelectInlineAsmMemoryOperand(const SDValue &Op,
char ConstraintCode,
std::vector<SDValue> &OutOps)
override;
// Include the pieces autogenerated from the target description.
#include "RISCVGenDAGISel.inc"
};
} // end anonymous namespace
bool RISCVDAGToDAGISel::runOnMachineFunction(MachineFunction &MF) {
bool ret = SelectionDAGISel::runOnMachineFunction(MF);
processFunctionAfterISel(MF);
return ret;
}
FunctionPass *llvm::createRISCVISelDag(RISCVTargetMachine &TM,
CodeGenOpt::Level OptLevel) {
return new RISCVDAGToDAGISel(TM, OptLevel);
}
// Return true if Val should be selected as a displacement for an address
// with range DR. Here we're interested in the range of both the instruction
// described by DR and of any pairing instruction.
static bool selectOffset(RISCVAddressingMode::OffRange OffR, int64_t Val) {
switch (OffR) {
case RISCVAddressingMode::Off12Only:
return isInt<12>(Val);
}
llvm_unreachable("Unhandled offset range");
}
// The base or index of AM is equivalent to Op0 + Op1, where IsBase selects
// between the base and index. Try to fold Op1 into AM's displacement.
static bool expandOffset(RISCVAddressingMode &AM, bool IsBase,
SDValue Op0, ConstantSDNode *Op1) {
// First try adjusting the displacement.
int64_t TestOffset = AM.Offset + Op1->getSExtValue();
if (selectOffset(AM.OffR, TestOffset)) {
//changeComponent(AM, IsBase, Op0);
AM.Base = Op0;
AM.Offset = TestOffset;
return true;
}
// We could consider forcing the displacement into a register and
// using it as an index, but it would need to be carefully tuned.
return false;
}
bool RISCVDAGToDAGISel::expandAddress(RISCVAddressingMode &AM,
bool IsBase) {
//SDValue N = IsBase ? AM.Base : AM.Index;
SDValue N = AM.Base;
unsigned Opcode = N.getOpcode();
if (Opcode == ISD::TRUNCATE) {
N = N.getOperand(0);
Opcode = N.getOpcode();
}
if (Opcode == ISD::ADD || CurDAG->isBaseWithConstantOffset(N)) {
SDValue Op0 = N.getOperand(0);
SDValue Op1 = N.getOperand(1);
unsigned Op0Code = Op0->getOpcode();
unsigned Op1Code = Op1->getOpcode();
if (Op0Code == ISD::Constant)
return expandOffset(AM, IsBase, Op1, cast<ConstantSDNode>(Op0));
if (Op1Code == ISD::Constant)
return expandOffset(AM, IsBase, Op0, cast<ConstantSDNode>(Op1));
}
return false;
}
// Return true if an instruction with displacement range DR should be
// used for displacement value Val. selectDisp(DR, Val) must already hold.
static bool isValidOffset(RISCVAddressingMode::OffRange OffR, int64_t Val) {
assert(selectOffset(OffR, Val) && "Invalid displacement");
switch (OffR) {
case RISCVAddressingMode::Off12Only:
return true;
}
llvm_unreachable("Unhandled displacement range");
}
// Return true if Addr is suitable for AM, updating AM if so.
bool RISCVDAGToDAGISel::selectAddress(SDValue Addr,
RISCVAddressingMode &AM) {
// Start out assuming that the address will need to be loaded separately,
// then try to extend it as much as we can.
AM.Base = Addr;
// First try treating the address as a constant.
if (Addr.getOpcode() == ISD::Constant &&
expandOffset(AM, true, SDValue(), cast<ConstantSDNode>(Addr)))
{ }
// Reject cases where the other instruction in a pair should be used.
if (!isValidOffset(AM.OffR, AM.Offset))
return false;
DEBUG(AM.dump());
return true;
}
// Insert a node into the DAG at least before Pos. This will reposition
// the node as needed, and will assign it a node ID that is <= Pos's ID.
// Note that this does *not* preserve the uniqueness of node IDs!
// The selection DAG must no longer depend on their uniqueness when this
// function is used.
static void insertDAGNode(SelectionDAG *DAG, SDNode *Pos, SDValue N) {
if (N.getNode()->getNodeId() == -1 ||
N.getNode()->getNodeId() > Pos->getNodeId()) {
DAG->RepositionNode(Pos, N.getNode());
N.getNode()->setNodeId(Pos->getNodeId());
}
}
void RISCVDAGToDAGISel::getAddressOperands(const RISCVAddressingMode &AM,
EVT VT, SDValue &Base,
SDValue &Offset) {
Base = AM.Base;
if (!Base.getNode())
// Register 0 means "no base". This is mostly useful for shifts.
Base = CurDAG->getRegister(0, VT);
else if (Base.getOpcode() == ISD::FrameIndex) {
// Lower a FrameIndex to a TargetFrameIndex.
int64_t FrameIndex = cast<FrameIndexSDNode>(Base)->getIndex();
Base = CurDAG->getTargetFrameIndex(FrameIndex, VT);
} else if (Base.getValueType() != VT) {
// Truncate values from i64 to i32, for shifts.
assert(VT == MVT::i32 && Base.getValueType() == MVT::i64 &&
"Unexpected truncation");
SDLoc SL(Base.getNode());
SDValue Trunc = CurDAG->getNode(ISD::TRUNCATE, SL, VT, Base);
insertDAGNode(CurDAG, Base.getNode(), Trunc);
Base = Trunc;
}
// Lower the displacement to a TargetConstant.
Offset = CurDAG->getTargetConstant(AM.Offset, VT);
}
SDNode *RISCVDAGToDAGISel::splitLargeImmediate(unsigned Opcode, SDNode *Node,
SDValue Op0, uint64_t UpperVal,
uint64_t LowerVal) {
EVT VT = Node->getValueType(0);
SDLoc SL( Node );
SDValue Upper = CurDAG->getConstant(UpperVal, VT);
if (Op0.getNode())
Upper = CurDAG->getNode(Opcode, SL, VT, Op0, Upper);
Upper = SDValue(Select(Upper.getNode()), 0);
SDValue Lower = CurDAG->getConstant(LowerVal, VT);
SDValue Or = CurDAG->getNode(Opcode, SL, VT, Upper, Lower);
return Or.getNode();
}
SDNode *RISCVDAGToDAGISel::Select(SDNode *Node) {
SDLoc SL( Node );
// Dump information about the Node being selected
DEBUG(errs() << "Selecting: "; Node->dump(CurDAG); errs() << "\n");
// If we have a custom node, we already have selected!
if (Node->isMachineOpcode()) {
DEBUG(errs() << "== "; Node->dump(CurDAG); errs() << "\n");
return 0;
}
unsigned Opcode = Node->getOpcode();
switch (Opcode) {
case ISD::FrameIndex: {
SDValue imm = CurDAG->getTargetConstant(0, Subtarget.isRV64() ? MVT::i64 : MVT::i32);
int FI = cast<FrameIndexSDNode>(Node)->getIndex();
unsigned Opc = Subtarget.isRV64() ? RISCV::ADDI64 : RISCV::ADDI;
EVT VT = Subtarget.isRV64() ? MVT::i64 : MVT::i32;
SDValue TFI = CurDAG->getTargetFrameIndex(FI, VT );
if(Node->hasOneUse()) //don't create a new node just morph this one
return CurDAG->SelectNodeTo(Node, Opc, VT, TFI, imm);
return CurDAG->getMachineNode(Opc, SL, VT, TFI, imm);
}
}//end special selections
// Select the default instruction
SDNode *ResNode = SelectCode(Node);
DEBUG(errs() << "=> ";
if (ResNode == NULL || ResNode == Node)
Node->dump(CurDAG);
else
ResNode->dump(CurDAG);
errs() << "\n";
);
return ResNode;
}
bool RISCVDAGToDAGISel::
SelectInlineAsmMemoryOperand(const SDValue &Op,
char ConstraintCode,
std::vector<SDValue> &OutOps) {
assert(ConstraintCode == 'm' && "Unexpected constraint code");
SDValue Base, Offset;
selectMemRegAddr(Op, Base, Offset);
OutOps.push_back(Base);
OutOps.push_back(Offset);
return false;
}
void RISCVDAGToDAGISel::processFunctionAfterISel(MachineFunction &MF) {
for (MachineFunction::iterator MFI = MF.begin(), MFE = MF.end(); MFI != MFE;
++MFI)
for (MachineBasicBlock::iterator I = MFI->begin(); I != MFI->end(); ++I) {
//replaceUsesWithZeroReg(MRI, *I);
}
}
| {
"content_hash": "5a4271bd6493ac7d8d205b84d28e7a97",
"timestamp": "",
"source": "github",
"line_count": 423,
"max_line_length": 89,
"avg_line_length": 33.59574468085106,
"alnum_prop": 0.634156639223137,
"repo_name": "dededong/goblin-core",
"id": "aaac1fde6ff4cdbc6aa019e72afd6a31ea1fc8b8",
"size": "14970",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "riscv/llvm/3.5/llvm-3.5.0.src/lib/Target/RISCV/RISCVISelDAGToDAG.cpp",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "1429"
},
{
"name": "Assembly",
"bytes": "37219664"
},
{
"name": "Awk",
"bytes": "1296"
},
{
"name": "Batchfile",
"bytes": "31924"
},
{
"name": "C",
"bytes": "121615973"
},
{
"name": "C#",
"bytes": "12418"
},
{
"name": "C++",
"bytes": "125512310"
},
{
"name": "CMake",
"bytes": "710738"
},
{
"name": "CSS",
"bytes": "43924"
},
{
"name": "Common Lisp",
"bytes": "65656"
},
{
"name": "Cuda",
"bytes": "12393"
},
{
"name": "D",
"bytes": "21699665"
},
{
"name": "DIGITAL Command Language",
"bytes": "53633"
},
{
"name": "DTrace",
"bytes": "8508630"
},
{
"name": "E",
"bytes": "3290"
},
{
"name": "Eiffel",
"bytes": "2314"
},
{
"name": "Elixir",
"bytes": "314"
},
{
"name": "Emacs Lisp",
"bytes": "41146"
},
{
"name": "FORTRAN",
"bytes": "377751"
},
{
"name": "Forth",
"bytes": "4188"
},
{
"name": "GAP",
"bytes": "21991"
},
{
"name": "GDScript",
"bytes": "54941"
},
{
"name": "Gnuplot",
"bytes": "446"
},
{
"name": "Groff",
"bytes": "1974816"
},
{
"name": "HTML",
"bytes": "1118040"
},
{
"name": "JavaScript",
"bytes": "24233"
},
{
"name": "LLVM",
"bytes": "48362057"
},
{
"name": "Lex",
"bytes": "598400"
},
{
"name": "Limbo",
"bytes": "755"
},
{
"name": "M",
"bytes": "2548"
},
{
"name": "Makefile",
"bytes": "6901261"
},
{
"name": "Mathematica",
"bytes": "5497"
},
{
"name": "Matlab",
"bytes": "54444"
},
{
"name": "Mercury",
"bytes": "1222"
},
{
"name": "OCaml",
"bytes": "748821"
},
{
"name": "Objective-C",
"bytes": "4995681"
},
{
"name": "Objective-C++",
"bytes": "1419213"
},
{
"name": "Perl",
"bytes": "881547"
},
{
"name": "Perl6",
"bytes": "80156"
},
{
"name": "PicoLisp",
"bytes": "31994"
},
{
"name": "Pure Data",
"bytes": "22171"
},
{
"name": "Python",
"bytes": "1375992"
},
{
"name": "R",
"bytes": "627855"
},
{
"name": "Rebol",
"bytes": "51929"
},
{
"name": "Scheme",
"bytes": "4296232"
},
{
"name": "Shell",
"bytes": "1994645"
},
{
"name": "Standard ML",
"bytes": "5682"
},
{
"name": "SuperCollider",
"bytes": "734239"
},
{
"name": "Tcl",
"bytes": "2234"
},
{
"name": "TeX",
"bytes": "601780"
},
{
"name": "VimL",
"bytes": "26411"
},
{
"name": "Yacc",
"bytes": "769886"
}
],
"symlink_target": ""
} |
package org.opencb.opencga.storage.hadoop.variant.stats;
import org.opencb.biodata.models.variant.metadata.Aggregation;
import org.opencb.commons.datastore.core.ObjectMap;
import org.opencb.commons.datastore.core.QueryOptions;
import org.opencb.opencga.storage.core.exceptions.StorageEngineException;
import org.opencb.opencga.storage.core.metadata.VariantStorageMetadataManager;
import org.opencb.opencga.storage.core.metadata.models.StudyMetadata;
import org.opencb.opencga.storage.core.variant.VariantStorageOptions;
import org.opencb.opencga.storage.core.variant.adaptors.VariantQueryException;
import org.opencb.opencga.storage.core.variant.stats.VariantStatisticsManager;
import org.opencb.opencga.storage.hadoop.variant.adaptors.VariantHadoopDBAdaptor;
import org.opencb.opencga.storage.hadoop.variant.executors.MRExecutor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
/**
* Created on 14/11/17.
*
* @author Jacobo Coll <[email protected]>
*/
public class HadoopMRVariantStatisticsManager extends VariantStatisticsManager {
private final VariantHadoopDBAdaptor dbAdaptor;
private final Logger logger = LoggerFactory.getLogger(HadoopMRVariantStatisticsManager.class);
private final MRExecutor mrExecutor;
private final ObjectMap baseOptions;
public HadoopMRVariantStatisticsManager(VariantHadoopDBAdaptor dbAdaptor, MRExecutor mrExecutor, ObjectMap options) {
this.dbAdaptor = dbAdaptor;
this.mrExecutor = mrExecutor;
baseOptions = options;
}
@Override
public void calculateStatistics(String study, List<String> cohorts, QueryOptions inputOptions)
throws IOException, StorageEngineException {
QueryOptions options = new QueryOptions(baseOptions);
if (inputOptions != null) {
options.putAll(inputOptions);
}
VariantStorageMetadataManager metadataManager = dbAdaptor.getMetadataManager();
StudyMetadata sm = metadataManager.getStudyMetadata(study);
if (sm == null) {
throw VariantQueryException.studyNotFound(study);
}
if (isAggregated(sm, options)) {
Aggregation aggregation = getAggregation(sm, options);
VariantStatsMapper.setAggregation(options, aggregation);
VariantStatsMapper.setAggregationMappingProperties(options, VariantStatisticsManager.getAggregationMappingProperties(options));
// throw new StorageEngineException("Unsupported calculate aggregated statistics with map-reduce. Please, use "
// + HadoopVariantStorageEngine.STATS_LOCAL + '=' + true);
}
boolean overwriteStats = options.getBoolean(VariantStorageOptions.STATS_OVERWRITE.key(), false);
//
// DefaultVariantStatisticsManager.checkAndUpdateStudyConfigurationCohorts(sc, cohorts.stream()
// .collect(Collectors.toMap(c -> c, c -> Collections.emptySet())), null, updateStats, overwriteStats);
// dbAdaptor.getStudyConfigurationManager().updateStudyConfiguration(sc, options);
preCalculateStats(metadataManager, sm, cohorts, overwriteStats, options);
options.put(VariantStatsDriver.COHORTS, cohorts);
options.remove(VariantStatsDriver.OUTPUT);
boolean error = false;
try {
String[] args = VariantStatsDriver.buildArgs(
dbAdaptor.getTableNameGenerator().getArchiveTableName(sm.getId()),
dbAdaptor.getTableNameGenerator().getVariantTableName(),
sm.getId(), Collections.emptyList(), options);
mrExecutor.run(VariantStatsDriver.class, args, "Calculate stats of cohorts " + cohorts);
} catch (Exception e) {
error = true;
throw e;
} finally {
postCalculateStats(metadataManager, sm, cohorts, error);
}
dbAdaptor.updateStatsColumns(sm);
}
}
| {
"content_hash": "c239b95487906b04ea155cd514f3ba0b",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 139,
"avg_line_length": 46.43023255813954,
"alnum_prop": 0.7275231655396944,
"repo_name": "opencb/opencga",
"id": "6efa3f3fadc6a1c2fe65e1c14cb46a5638b59db6",
"size": "3993",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "opencga-storage/opencga-storage-hadoop/opencga-storage-hadoop-core/src/main/java/org/opencb/opencga/storage/hadoop/variant/stats/HadoopMRVariantStatisticsManager.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "13743"
},
{
"name": "HTML",
"bytes": "1001"
},
{
"name": "Java",
"bytes": "18854348"
},
{
"name": "JavaScript",
"bytes": "631302"
},
{
"name": "Jupyter Notebook",
"bytes": "6290799"
},
{
"name": "Makefile",
"bytes": "3567"
},
{
"name": "Mustache",
"bytes": "9851"
},
{
"name": "Python",
"bytes": "599379"
},
{
"name": "R",
"bytes": "430511"
},
{
"name": "Shell",
"bytes": "131224"
},
{
"name": "Smarty",
"bytes": "2215"
}
],
"symlink_target": ""
} |
package gov.nara.nwts.ftapp.filter;
/**
* Filter for AV files
* @author TBrady
*
*/
public class AVFileTestFilter extends DefaultFileTestFilter {
public String getSuffix() {
return ".*(\\.wav|\\.mov|\\.mp3|\\.avi|\\.dpx|\\.mxf)$";
}
public boolean isReSuffix() {
return true;
}
public String getName(){return "AV Files";}
}
| {
"content_hash": "bfd051508554add96f3d2af18646bfa5",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 61,
"avg_line_length": 20.055555555555557,
"alnum_prop": 0.6149584487534626,
"repo_name": "Georgetown-University-Libraries/File-Analyzer",
"id": "3f3c2ceb97d72abf97b6346e946278eb00c863ef",
"size": "361",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core/src/main/gov/nara/nwts/ftapp/filter/AVFileTestFilter.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "10840"
},
{
"name": "Java",
"bytes": "517007"
},
{
"name": "XSLT",
"bytes": "43920"
}
],
"symlink_target": ""
} |
require "set"
require_relative "thor/base"
class Bundler::Thor
class << self
# Allows for custom "Command" package naming.
#
# === Parameters
# name<String>
# options<Hash>
#
def package_name(name, _ = {})
@package_name = name.nil? || name == "" ? nil : name
end
# Sets the default command when thor is executed without an explicit command to be called.
#
# ==== Parameters
# meth<Symbol>:: name of the default command
#
def default_command(meth = nil)
if meth
@default_command = meth == :none ? "help" : meth.to_s
else
@default_command ||= from_superclass(:default_command, "help")
end
end
alias_method :default_task, :default_command
# Registers another Bundler::Thor subclass as a command.
#
# ==== Parameters
# klass<Class>:: Bundler::Thor subclass to register
# command<String>:: Subcommand name to use
# usage<String>:: Short usage for the subcommand
# description<String>:: Description for the subcommand
def register(klass, subcommand_name, usage, description, options = {})
if klass <= Bundler::Thor::Group
desc usage, description, options
define_method(subcommand_name) { |*args| invoke(klass, args) }
else
desc usage, description, options
subcommand subcommand_name, klass
end
end
# Defines the usage and the description of the next command.
#
# ==== Parameters
# usage<String>
# description<String>
# options<String>
#
def desc(usage, description, options = {})
if options[:for]
command = find_and_refresh_command(options[:for])
command.usage = usage if usage
command.description = description if description
else
@usage = usage
@desc = description
@hide = options[:hide] || false
end
end
# Defines the long description of the next command.
#
# ==== Parameters
# long description<String>
#
def long_desc(long_description, options = {})
if options[:for]
command = find_and_refresh_command(options[:for])
command.long_description = long_description if long_description
else
@long_desc = long_description
end
end
# Maps an input to a command. If you define:
#
# map "-T" => "list"
#
# Running:
#
# thor -T
#
# Will invoke the list command.
#
# ==== Parameters
# Hash[String|Array => Symbol]:: Maps the string or the strings in the array to the given command.
#
def map(mappings = nil, **kw)
@map ||= from_superclass(:map, {})
if mappings && !kw.empty?
mappings = kw.merge!(mappings)
else
mappings ||= kw
end
if mappings
mappings.each do |key, value|
if key.respond_to?(:each)
key.each { |subkey| @map[subkey] = value }
else
@map[key] = value
end
end
end
@map
end
# Declares the options for the next command to be declared.
#
# ==== Parameters
# Hash[Symbol => Object]:: The hash key is the name of the option and the value
# is the type of the option. Can be :string, :array, :hash, :boolean, :numeric
# or :required (string). If you give a value, the type of the value is used.
#
def method_options(options = nil)
@method_options ||= {}
build_options(options, @method_options) if options
@method_options
end
alias_method :options, :method_options
# Adds an option to the set of method options. If :for is given as option,
# it allows you to change the options from a previous defined command.
#
# def previous_command
# # magic
# end
#
# method_option :foo => :bar, :for => :previous_command
#
# def next_command
# # magic
# end
#
# ==== Parameters
# name<Symbol>:: The name of the argument.
# options<Hash>:: Described below.
#
# ==== Options
# :desc - Description for the argument.
# :required - If the argument is required or not.
# :default - Default value for this argument. It cannot be required and have default values.
# :aliases - Aliases for this option.
# :type - The type of the argument, can be :string, :hash, :array, :numeric or :boolean.
# :banner - String to show on usage notes.
# :hide - If you want to hide this option from the help.
#
def method_option(name, options = {})
scope = if options[:for]
find_and_refresh_command(options[:for]).options
else
method_options
end
build_option(name, options, scope)
end
alias_method :option, :method_option
# Prints help information for the given command.
#
# ==== Parameters
# shell<Bundler::Thor::Shell>
# command_name<String>
#
def command_help(shell, command_name)
meth = normalize_command_name(command_name)
command = all_commands[meth]
handle_no_command_error(meth) unless command
shell.say "Usage:"
shell.say " #{banner(command).split("\n").join("\n ")}"
shell.say
class_options_help(shell, nil => command.options.values)
if command.long_description
shell.say "Description:"
shell.print_wrapped(command.long_description, :indent => 2)
else
shell.say command.description
end
end
alias_method :task_help, :command_help
# Prints help information for this class.
#
# ==== Parameters
# shell<Bundler::Thor::Shell>
#
def help(shell, subcommand = false)
list = printable_commands(true, subcommand)
Bundler::Thor::Util.thor_classes_in(self).each do |klass|
list += klass.printable_commands(false)
end
list.sort! { |a, b| a[0] <=> b[0] }
if defined?(@package_name) && @package_name
shell.say "#{@package_name} commands:"
else
shell.say "Commands:"
end
shell.print_table(list, :indent => 2, :truncate => true)
shell.say
class_options_help(shell)
end
# Returns commands ready to be printed.
def printable_commands(all = true, subcommand = false)
(all ? all_commands : commands).map do |_, command|
next if command.hidden?
item = []
item << banner(command, false, subcommand)
item << (command.description ? "# #{command.description.gsub(/\s+/m, ' ')}" : "")
item
end.compact
end
alias_method :printable_tasks, :printable_commands
def subcommands
@subcommands ||= from_superclass(:subcommands, [])
end
alias_method :subtasks, :subcommands
def subcommand_classes
@subcommand_classes ||= {}
end
def subcommand(subcommand, subcommand_class)
subcommands << subcommand.to_s
subcommand_class.subcommand_help subcommand
subcommand_classes[subcommand.to_s] = subcommand_class
define_method(subcommand) do |*args|
args, opts = Bundler::Thor::Arguments.split(args)
invoke_args = [args, opts, {:invoked_via_subcommand => true, :class_options => options}]
invoke_args.unshift "help" if opts.delete("--help") || opts.delete("-h")
invoke subcommand_class, *invoke_args
end
subcommand_class.commands.each do |_meth, command|
command.ancestor_name = subcommand
end
end
alias_method :subtask, :subcommand
# Extend check unknown options to accept a hash of conditions.
#
# === Parameters
# options<Hash>: A hash containing :only and/or :except keys
def check_unknown_options!(options = {})
@check_unknown_options ||= {}
options.each do |key, value|
if value
@check_unknown_options[key] = Array(value)
else
@check_unknown_options.delete(key)
end
end
@check_unknown_options
end
# Overwrite check_unknown_options? to take subcommands and options into account.
def check_unknown_options?(config) #:nodoc:
options = check_unknown_options
return false unless options
command = config[:current_command]
return true unless command
name = command.name
if subcommands.include?(name)
false
elsif options[:except]
!options[:except].include?(name.to_sym)
elsif options[:only]
options[:only].include?(name.to_sym)
else
true
end
end
# Stop parsing of options as soon as an unknown option or a regular
# argument is encountered. All remaining arguments are passed to the command.
# This is useful if you have a command that can receive arbitrary additional
# options, and where those additional options should not be handled by
# Bundler::Thor.
#
# ==== Example
#
# To better understand how this is useful, let's consider a command that calls
# an external command. A user may want to pass arbitrary options and
# arguments to that command. The command itself also accepts some options,
# which should be handled by Bundler::Thor.
#
# class_option "verbose", :type => :boolean
# stop_on_unknown_option! :exec
# check_unknown_options! :except => :exec
#
# desc "exec", "Run a shell command"
# def exec(*args)
# puts "diagnostic output" if options[:verbose]
# Kernel.exec(*args)
# end
#
# Here +exec+ can be called with +--verbose+ to get diagnostic output,
# e.g.:
#
# $ thor exec --verbose echo foo
# diagnostic output
# foo
#
# But if +--verbose+ is given after +echo+, it is passed to +echo+ instead:
#
# $ thor exec echo --verbose foo
# --verbose foo
#
# ==== Parameters
# Symbol ...:: A list of commands that should be affected.
def stop_on_unknown_option!(*command_names)
stop_on_unknown_option.merge(command_names)
end
def stop_on_unknown_option?(command) #:nodoc:
command && stop_on_unknown_option.include?(command.name.to_sym)
end
# Disable the check for required options for the given commands.
# This is useful if you have a command that does not need the required options
# to work, like help.
#
# ==== Parameters
# Symbol ...:: A list of commands that should be affected.
def disable_required_check!(*command_names)
disable_required_check.merge(command_names)
end
def disable_required_check?(command) #:nodoc:
command && disable_required_check.include?(command.name.to_sym)
end
def deprecation_warning(message) #:nodoc:
unless ENV['THOR_SILENCE_DEPRECATION']
warn "Deprecation warning: #{message}\n" +
'You can silence deprecations warning by setting the environment variable THOR_SILENCE_DEPRECATION.'
end
end
protected
def stop_on_unknown_option #:nodoc:
@stop_on_unknown_option ||= Set.new
end
# help command has the required check disabled by default.
def disable_required_check #:nodoc:
@disable_required_check ||= Set.new([:help])
end
# The method responsible for dispatching given the args.
def dispatch(meth, given_args, given_opts, config) #:nodoc: # rubocop:disable MethodLength
meth ||= retrieve_command_name(given_args)
command = all_commands[normalize_command_name(meth)]
if !command && config[:invoked_via_subcommand]
# We're a subcommand and our first argument didn't match any of our
# commands. So we put it back and call our default command.
given_args.unshift(meth)
command = all_commands[normalize_command_name(default_command)]
end
if command
args, opts = Bundler::Thor::Options.split(given_args)
if stop_on_unknown_option?(command) && !args.empty?
# given_args starts with a non-option, so we treat everything as
# ordinary arguments
args.concat opts
opts.clear
end
else
args = given_args
opts = nil
command = dynamic_command_class.new(meth)
end
opts = given_opts || opts || []
config[:current_command] = command
config[:command_options] = command.options
instance = new(args, opts, config)
yield instance if block_given?
args = instance.args
trailing = args[Range.new(arguments.size, -1)]
instance.invoke_command(command, trailing || [])
end
# The banner for this class. You can customize it if you are invoking the
# thor class by another ways which is not the Bundler::Thor::Runner. It receives
# the command that is going to be invoked and a boolean which indicates if
# the namespace should be displayed as arguments.
#
def banner(command, namespace = nil, subcommand = false)
$thor_runner ||= false
command.formatted_usage(self, $thor_runner, subcommand).split("\n").map do |formatted_usage|
"#{basename} #{formatted_usage}"
end.join("\n")
end
def baseclass #:nodoc:
Bundler::Thor
end
def dynamic_command_class #:nodoc:
Bundler::Thor::DynamicCommand
end
def create_command(meth) #:nodoc:
@usage ||= nil
@desc ||= nil
@long_desc ||= nil
@hide ||= nil
if @usage && @desc
base_class = @hide ? Bundler::Thor::HiddenCommand : Bundler::Thor::Command
commands[meth] = base_class.new(meth, @desc, @long_desc, @usage, method_options)
@usage, @desc, @long_desc, @method_options, @hide = nil
true
elsif all_commands[meth] || meth == "method_missing"
true
else
puts "[WARNING] Attempted to create command #{meth.inspect} without usage or description. " \
"Call desc if you want this method to be available as command or declare it inside a " \
"no_commands{} block. Invoked from #{caller[1].inspect}."
false
end
end
alias_method :create_task, :create_command
def initialize_added #:nodoc:
class_options.merge!(method_options)
@method_options = nil
end
# Retrieve the command name from given args.
def retrieve_command_name(args) #:nodoc:
meth = args.first.to_s unless args.empty?
args.shift if meth && (map[meth] || meth !~ /^\-/)
end
alias_method :retrieve_task_name, :retrieve_command_name
# receives a (possibly nil) command name and returns a name that is in
# the commands hash. In addition to normalizing aliases, this logic
# will determine if a shortened command is an unambiguous substring of
# a command or alias.
#
# +normalize_command_name+ also converts names like +animal-prison+
# into +animal_prison+.
def normalize_command_name(meth) #:nodoc:
return default_command.to_s.tr("-", "_") unless meth
possibilities = find_command_possibilities(meth)
raise AmbiguousTaskError, "Ambiguous command #{meth} matches [#{possibilities.join(', ')}]" if possibilities.size > 1
if possibilities.empty?
meth ||= default_command
elsif map[meth]
meth = map[meth]
else
meth = possibilities.first
end
meth.to_s.tr("-", "_") # treat foo-bar as foo_bar
end
alias_method :normalize_task_name, :normalize_command_name
# this is the logic that takes the command name passed in by the user
# and determines whether it is an unambiguous substrings of a command or
# alias name.
def find_command_possibilities(meth)
len = meth.to_s.length
possibilities = all_commands.merge(map).keys.select { |n| meth == n[0, len] }.sort
unique_possibilities = possibilities.map { |k| map[k] || k }.uniq
if possibilities.include?(meth)
[meth]
elsif unique_possibilities.size == 1
unique_possibilities
else
possibilities
end
end
alias_method :find_task_possibilities, :find_command_possibilities
def subcommand_help(cmd)
desc "help [COMMAND]", "Describe subcommands or one specific subcommand"
class_eval "
def help(command = nil, subcommand = true); super; end
"
end
alias_method :subtask_help, :subcommand_help
end
include Bundler::Thor::Base
map HELP_MAPPINGS => :help
desc "help [COMMAND]", "Describe available commands or one specific command"
def help(command = nil, subcommand = false)
if command
if self.class.subcommands.include? command
self.class.subcommand_classes[command].help(shell, true)
else
self.class.command_help(shell, command)
end
else
self.class.help(shell, subcommand)
end
end
end
| {
"content_hash": "187b476cfc159d4a47bb985e67c4d99c",
"timestamp": "",
"source": "github",
"line_count": 524,
"max_line_length": 123,
"avg_line_length": 31.982824427480917,
"alnum_prop": 0.6217554746703264,
"repo_name": "bundler/bundler",
"id": "01c0b2f83c8f68401e50c4209d7d75999a2d8d41",
"size": "16759",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "lib/bundler/vendor/thor/lib/thor.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "1848561"
},
{
"name": "Shell",
"bytes": "131"
}
],
"symlink_target": ""
} |
<?php
class Documents_DetailView_Model extends Vtiger_DetailView_Model {
/**
* Function to get the detail view links (links and widgets)
* @param <array> $linkParams - parameters which will be used to calicaulate the params
* @return <array> - array of link models in the format as below
* array('linktype'=>list of link models);
*/
public function getDetailViewLinks($linkParams) {
$currentUserModel = Users_Privileges_Model::getCurrentUserPrivilegesModel();
$linkModelList = parent::getDetailViewLinks($linkParams);
$recordModel = $this->getRecord();
if ($recordModel->get('filestatus') && $recordModel->get('filename') && $recordModel->get('filelocationtype') === 'I') {
$basicActionLink = array(
'linktype' => 'DETAILVIEW',
'linklabel' => 'LBL_DOWNLOAD_FILE',
'linkurl' => $recordModel->getDownloadFileURL(),
'linkicon' => ''
);
$linkModelList['DETAILVIEW'][] = Vtiger_Link_Model::getInstanceFromValues($basicActionLink);
}
$basicActionLink = array(
'linktype' => 'DETAILVIEW',
'linklabel' => 'LBL_CHECK_FILE_INTEGRITY',
'linkurl' => $recordModel->checkFileIntegrityURL(),
'linkicon' => ''
);
$linkModelList['DETAILVIEW'][] = Vtiger_Link_Model::getInstanceFromValues($basicActionLink);
if ($recordModel->get('filestatus') && $recordModel->get('filename') && $recordModel->get('filelocationtype') === 'I') {
$emailModuleModel = Vtiger_Module_Model::getInstance('Emails');
if($currentUserModel->hasModulePermission($emailModuleModel->getId())) {
$basicActionLink = array(
'linktype' => 'DETAILVIEW',
'linklabel' => 'LBL_EMAIL_FILE_AS_ATTACHMENT',
'linkurl' => "javascript:Documents_Detail_Js.triggerSendEmail('". ZEND_JSON::encode(array($recordModel->getId())) ."')",
'linkicon' => ''
);
$linkModelList['DETAILVIEW'][] = Vtiger_Link_Model::getInstanceFromValues($basicActionLink);
}
}
return $linkModelList;
}
}
| {
"content_hash": "bbda8f86991718606e02c89b30d54f6c",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 126,
"avg_line_length": 37.80769230769231,
"alnum_prop": 0.6703967446592065,
"repo_name": "basiljose1/byjcrm",
"id": "f4a74de54f8d01710ddf20b7257e0ef537fa5f57",
"size": "2492",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "modules/Documents/models/DetailView.php",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "30817"
},
{
"name": "ApacheConf",
"bytes": "1321"
},
{
"name": "Batchfile",
"bytes": "20387"
},
{
"name": "C",
"bytes": "492107"
},
{
"name": "C++",
"bytes": "18023"
},
{
"name": "CSS",
"bytes": "1199491"
},
{
"name": "CoffeeScript",
"bytes": "1232"
},
{
"name": "Groff",
"bytes": "60690"
},
{
"name": "HTML",
"bytes": "1498811"
},
{
"name": "JavaScript",
"bytes": "4770826"
},
{
"name": "Makefile",
"bytes": "8221"
},
{
"name": "PHP",
"bytes": "39287363"
},
{
"name": "Perl",
"bytes": "50950"
},
{
"name": "Ruby",
"bytes": "1074"
},
{
"name": "Shell",
"bytes": "53700"
},
{
"name": "Smarty",
"bytes": "1908263"
},
{
"name": "XSLT",
"bytes": "27654"
},
{
"name": "Yacc",
"bytes": "14820"
}
],
"symlink_target": ""
} |
module Azure::MixedReality::Mgmt::V2019_02_28_preview
module Models
#
# Spatial Anchors Account Keys
#
class SpatialAnchorsAccountKeys
include MsRestAzure
# @return [String] value of primary key.
attr_accessor :primary_key
# @return [String] value of secondary key.
attr_accessor :secondary_key
#
# Mapper for SpatialAnchorsAccountKeys class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'SpatialAnchorsAccountKeys',
type: {
name: 'Composite',
class_name: 'SpatialAnchorsAccountKeys',
model_properties: {
primary_key: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'primaryKey',
type: {
name: 'String'
}
},
secondary_key: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'secondaryKey',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| {
"content_hash": "a7c6c1f49bea971c9e4b02a50a1390e0",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 64,
"avg_line_length": 26.074074074074073,
"alnum_prop": 0.4921875,
"repo_name": "Azure/azure-sdk-for-ruby",
"id": "a76f84d62e4c2d244b658b7482b415287e07e123",
"size": "1572",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "management/azure_mgmt_mixedreality/lib/2019-02-28-preview/generated/azure_mgmt_mixedreality/models/spatial_anchors_account_keys.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "345216400"
},
{
"name": "Shell",
"bytes": "305"
}
],
"symlink_target": ""
} |
package io.aos.address;
import static org.junit.Assert.assertNotNull;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import org.junit.Ignore;
import org.junit.Test;
import org.xbill.DNS.DClass;
import org.xbill.DNS.ExtendedResolver;
import org.xbill.DNS.Message;
import org.xbill.DNS.Name;
import org.xbill.DNS.Record;
import org.xbill.DNS.Resolver;
import org.xbill.DNS.ReverseMap;
import org.xbill.DNS.Section;
import org.xbill.DNS.Type;
@Ignore
public class AddressDnsTest {
@Test
public void test1() throws UnknownHostException {
InetAddress addr = InetAddress.getByName("127.0.0.1");
System.out.println(addr.getHostName());
}
@Test
public void test2() throws UnknownHostException {
long before = System.currentTimeMillis();
InetAddress addr = InetAddress.getByName("127.0.0.1");
System.out.println(addr.getHostName());
long after = System.currentTimeMillis();
System.out.println((after - before) + " ms");
}
@Test
public void test3() throws IOException {
long now = System.currentTimeMillis();
System.out.println(reverseDns("192.222.1.13"));
System.out.println(reverseDns("208.201.239.36"));
long after = System.currentTimeMillis();
System.out.println((after - now) + " ms");
}
private String reverseDns(String hostIp) throws IOException {
Resolver res = new ExtendedResolver();
Name name = ReverseMap.fromAddress(hostIp);
int type = Type.PTR;
int dclass = DClass.IN;
Record rec = Record.newRecord(name, type, dclass);
Message query = Message.newQuery(rec);
Message response = res.send(query);
Record[] answers = response.getSectionArray(Section.ANSWER);
assertNotNull(answers[0].rdataToString());
return answers[0].rdataToString();
}
}
| {
"content_hash": "6f1b7fcfc86576d855d5b3c46a7c3dd7",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 68,
"avg_line_length": 30.967741935483872,
"alnum_prop": 0.6796875,
"repo_name": "XClouded/t4f-core",
"id": "0faea3e4ae6674f62b039c316175758662375e64",
"size": "3108",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "java/io/src/test/java/io/aos/address/AddressDnsTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package com.yueny.demo.capture;
import org.junit.After;
import org.junit.Before;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.AbstractJUnit4SpringContextTests;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = { "classpath*:config/config-test.xml" })
@ActiveProfiles("dev")
public class BaseBizTest extends AbstractJUnit4SpringContextTests {
protected final Logger log = LoggerFactory.getLogger(getClass());
@Before
public void setUp() throws Exception {
log.info("测试开始");
}
@After
public void tearDown() throws Exception {
log.info("测试结束");
}
}
| {
"content_hash": "c1b4247898655983352949cd0ea661d0",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 80,
"avg_line_length": 30.93103448275862,
"alnum_prop": 0.7792642140468228,
"repo_name": "yueny/pra",
"id": "6c8f33f98b2d2475320339236da68411b37a4d00",
"size": "913",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fileReader/src/test/java/com/yueny/demo/capture/BaseBizTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "721477"
},
{
"name": "FreeMarker",
"bytes": "16071"
},
{
"name": "HTML",
"bytes": "72010"
},
{
"name": "Java",
"bytes": "1399374"
},
{
"name": "JavaScript",
"bytes": "824745"
},
{
"name": "Shell",
"bytes": "5967"
}
],
"symlink_target": ""
} |
FROM balenalib/orbitty-tx2-alpine:3.12-build
# remove several traces of python
RUN apk del python*
# http://bugs.python.org/issue19846
# > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK.
ENV LANG C.UTF-8
# key 63C7CC90: public key "Simon McVittie <[email protected]>" imported
# key 3372DCFA: public key "Donald Stufft (dstufft) <[email protected]>" imported
RUN gpg --keyserver keyring.debian.org --recv-keys 4DE8FF2A63C7CC90 \
&& gpg --keyserver keyserver.ubuntu.com --recv-key 6E3CBCE93372DCFA \
&& gpg --keyserver keyserver.ubuntu.com --recv-keys 0x52a43a1e4b77b059
# point Python at a system-provided certificate database. Otherwise, we might hit CERTIFICATE_VERIFY_FAILED.
# https://www.python.org/dev/peps/pep-0476/#trust-database
ENV SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt
ENV PYTHON_VERSION 2.7.18
# if this is called "PIP_VERSION", pip explodes with "ValueError: invalid truth value '<VERSION>'"
ENV PYTHON_PIP_VERSION 21.0.1
ENV SETUPTOOLS_VERSION 56.0.0
RUN set -x \
&& curl -SLO "http://resin-packages.s3.amazonaws.com/python/v$PYTHON_VERSION/Python-$PYTHON_VERSION.linux-alpine-aarch64-openssl1.1.tar.gz" \
&& echo "99099d8226533b505cb3ec47c1b948d3d8b0ce575a23a925405983ebf3792bce Python-$PYTHON_VERSION.linux-alpine-aarch64-openssl1.1.tar.gz" | sha256sum -c - \
&& tar -xzf "Python-$PYTHON_VERSION.linux-alpine-aarch64-openssl1.1.tar.gz" --strip-components=1 \
&& rm -rf "Python-$PYTHON_VERSION.linux-alpine-aarch64-openssl1.1.tar.gz" \
&& if [ ! -e /usr/local/bin/pip ]; then : \
&& curl -SLO "https://raw.githubusercontent.com/pypa/get-pip/430ba37776ae2ad89f794c7a43b90dc23bac334c/get-pip.py" \
&& echo "19dae841a150c86e2a09d475b5eb0602861f2a5b7761ec268049a662dbd2bd0c get-pip.py" | sha256sum -c - \
&& python get-pip.py \
&& rm get-pip.py \
; fi \
&& pip install --no-cache-dir --upgrade --force-reinstall pip=="$PYTHON_PIP_VERSION" setuptools=="$SETUPTOOLS_VERSION" \
&& find /usr/local \
\( -type d -a -name test -o -name tests \) \
-o \( -type f -a -name '*.pyc' -o -name '*.pyo' \) \
-exec rm -rf '{}' + \
&& cd / \
&& rm -rf /usr/src/python ~/.cache
# install "virtualenv", since the vast majority of users of this image will want it
RUN pip install --no-cache-dir virtualenv
ENV PYTHON_DBUS_VERSION 1.2.8
# install dbus-python dependencies
RUN apk add --no-cache \
dbus-dev \
dbus-glib-dev
# install dbus-python
RUN set -x \
&& mkdir -p /usr/src/dbus-python \
&& curl -SL "http://dbus.freedesktop.org/releases/dbus-python/dbus-python-$PYTHON_DBUS_VERSION.tar.gz" -o dbus-python.tar.gz \
&& curl -SL "http://dbus.freedesktop.org/releases/dbus-python/dbus-python-$PYTHON_DBUS_VERSION.tar.gz.asc" -o dbus-python.tar.gz.asc \
&& gpg --verify dbus-python.tar.gz.asc \
&& tar -xzC /usr/src/dbus-python --strip-components=1 -f dbus-python.tar.gz \
&& rm dbus-python.tar.gz* \
&& cd /usr/src/dbus-python \
&& PYTHON_VERSION=$(expr match "$PYTHON_VERSION" '\([0-9]*\.[0-9]*\)') ./configure \
&& make -j$(nproc) \
&& make install -j$(nproc) \
&& cd / \
&& rm -rf /usr/src/dbus-python
# https://github.com/docker-library/python/issues/147
ENV PYTHONIOENCODING UTF-8
# set PYTHONPATH to point to dist-packages
ENV PYTHONPATH /usr/lib/python2.7/site-packages:$PYTHONPATH
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'As of January 1st, 2020, Python 2 was end-of-life, we will change the latest tag for Balenalib Python base image to Python 3.x and drop support for Python 2 soon. So after 1st July, 2020, all the balenalib Python latest tag will point to the latest Python 3 version and no changes, or fixes will be made to balenalib Python 2 base image. If you are using Python 2 for your application, please upgrade to Python 3 before 1st July.' > /.balena/messages/python-deprecation-warnin
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/[email protected]" \
&& echo "Running test-stack@python" \
&& chmod +x [email protected] \
&& bash [email protected] \
&& rm -rf [email protected]
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v8 \nOS: Alpine Linux 3.12 \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nPython v2.7.18, Pip v21.0.1, Setuptools v56.0.0 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo $'#!/bin/bash\nbalena-info\nbusybox ln -sf /bin/busybox /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& ln -f /bin/sh /bin/sh.real \
&& ln -f /bin/sh-shim /bin/sh | {
"content_hash": "3b38c0df9b35f6c241838787648f1317",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 716,
"avg_line_length": 57.17391304347826,
"alnum_prop": 0.7203422053231939,
"repo_name": "nghiant2710/base-images",
"id": "3061066616c69c893375fb6912fb49f495523fd1",
"size": "5281",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "balena-base-images/python/orbitty-tx2/alpine/3.12/2.7.18/build/Dockerfile",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "144558581"
},
{
"name": "JavaScript",
"bytes": "16316"
},
{
"name": "Shell",
"bytes": "368690"
}
],
"symlink_target": ""
} |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.skyframe;
import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadSafe;
import java.util.Map;
import javax.annotation.Nullable;
/**
* A graph that exposes its entries and structure, for use by classes that must traverse it.
*
* <p>Certain graph implementations can throw {@link InterruptedException} when trying to retrieve
* node entries. Such exceptions should not be caught locally -- they should be allowed to propagate
* up.
*/
@ThreadSafe
public interface QueryableGraph {
/**
* Returns the node with the given {@code key}, or {@code null} if the node does not exist.
*
* @param requestor if non-{@code null}, the node on behalf of which {@code key} is being
* requested.
* @param reason the reason the node is being requested.
*/
@Nullable
NodeEntry get(@Nullable SkyKey requestor, Reason reason, SkyKey key) throws InterruptedException;
/**
* Fetches all the given nodes. Returns a map {@code m} such that, for all {@code k} in {@code
* keys}, {@code m.get(k).equals(e)} iff {@code get(k) == e} and {@code e != null}, and {@code
* !m.containsKey(k)} iff {@code get(k) == null}.
*
* @param requestor if non-{@code null}, the node on behalf of which the given {@code keys} are
* being requested.
* @param reason the reason the nodes are being requested.
*/
Map<SkyKey, ? extends NodeEntry> getBatch(
@Nullable SkyKey requestor, Reason reason, Iterable<SkyKey> keys) throws InterruptedException;
/**
* Examines all the given keys. Returns an iterable of keys whose corresponding nodes are
* currently available to be fetched.
*
* <p>Note: An unavailable node does not mean it is not in the graph. It only means it's not ready
* to be fetched immediately.
*
* @param reason the reason the nodes are being requested.
*/
Iterable<SkyKey> getCurrentlyAvailableNodes(Iterable<SkyKey> keys, Reason reason);
/**
* The reason that a node is being looked up in the Skyframe graph.
*
* <p>Alternate graph implementations may wish to make use of this information.
*/
enum Reason {
/**
* The node is being fetched in order to see if it needs to be evaluated or because it was just
* evaluated, but *not* because it was just requested during evaluation of a SkyFunction
* (see {@link #DEP_REQUESTED}).
*/
PRE_OR_POST_EVALUATION,
/**
* The node is being looked up as part of the prefetch step before evaluation of a SkyFunction.
*/
PREFETCH,
/**
* The node is being fetched because it is about to be evaluated, but *not* because it was just
* requested during evaluation of a SkyFunction (see {@link #DEP_REQUESTED}).
*/
EVALUATION,
/** The node is being looked up because it was requested during evaluation of a SkyFunction. */
DEP_REQUESTED,
/** The node is being looked up during the invalidation phase of Skyframe evaluation. */
INVALIDATION,
/** The node is being looked up during the cycle checking phase of Skyframe evaluation. */
CYCLE_CHECKING,
/** The node is being looked up so that an rdep can be added to it. */
RDEP_ADDITION,
/** The node is being looked up so that an rdep can be removed from it. */
RDEP_REMOVAL,
/** The node is being looked up for any graph clean-up effort that may be necessary. */
CLEAN_UP,
/** The node is being looked up so it can be enqueued for evaluation or change pruning. */
ENQUEUING_CHILD,
/**
* The node is being looked up so that it can be signaled that a dependency is now complete.
*/
SIGNAL_DEP,
/**
* The node is being looking up as part of the error bubbling phase of fail-fast Skyframe
* evaluation.
*/
ERROR_BUBBLING,
/** The node is being looked up merely for an existence check. */
EXISTENCE_CHECKING,
/**
* The node is being looked up to service {@link WalkableGraph#getValue},
* {@link WalkableGraph#getException}, {@link WalkableGraph#getMissingAndExceptions}, or
* {@link WalkableGraph#getSuccessfulValues}.
*/
WALKABLE_GRAPH_VALUE,
/** The node is being looked up to service {@link WalkableGraph#getDirectDeps}. */
WALKABLE_GRAPH_DEPS,
/** The node is being looked up to service {@link WalkableGraph#getReverseDeps}. */
WALKABLE_GRAPH_RDEPS,
/** Some other reason than one of the above. */
OTHER,
}
}
| {
"content_hash": "98d351e1dde31a3616b110b65471462e",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 100,
"avg_line_length": 37.065693430656935,
"alnum_prop": 0.6898385191020087,
"repo_name": "kchodorow/bazel",
"id": "07f8fafb896676328e07ae440d404bbfc8dc162c",
"size": "5078",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "src/main/java/com/google/devtools/build/skyframe/QueryableGraph.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "24765"
},
{
"name": "C++",
"bytes": "807610"
},
{
"name": "HTML",
"bytes": "18163"
},
{
"name": "Java",
"bytes": "20635120"
},
{
"name": "JavaScript",
"bytes": "6860"
},
{
"name": "Makefile",
"bytes": "248"
},
{
"name": "PowerShell",
"bytes": "7559"
},
{
"name": "Protocol Buffer",
"bytes": "118919"
},
{
"name": "Python",
"bytes": "292233"
},
{
"name": "Shell",
"bytes": "767720"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Class LINE\LINEBot\Event\MessageEvent\LocationMessage | line-bot-sdk-php</title>
<link rel="stylesheet" href="resources/style.css?c2f33731c1948fbed7c333554678bfa68d4817da">
</head>
<body>
<div id="left">
<div id="menu">
<a href="index.html" title="Overview"><span>Overview</span></a>
<div id="groups">
<h3>Namespaces</h3>
<ul>
<li class="active">
<a href="namespace-LINE.html">
LINE<span></span>
</a>
<ul>
<li class="active">
<a href="namespace-LINE.LINEBot.html">
LINEBot<span></span>
</a>
<ul>
<li>
<a href="namespace-LINE.LINEBot.Constant.html">
Constant </a>
</li>
<li class="active">
<a href="namespace-LINE.LINEBot.Event.html">
Event<span></span>
</a>
<ul>
<li class="active">
<a href="namespace-LINE.LINEBot.Event.MessageEvent.html">
MessageEvent </a>
</li>
<li>
<a href="namespace-LINE.LINEBot.Event.Parser.html">
Parser </a>
</li>
</ul></li>
<li>
<a href="namespace-LINE.LINEBot.Exception.html">
Exception </a>
</li>
<li>
<a href="namespace-LINE.LINEBot.HTTPClient.html">
HTTPClient </a>
</li>
<li>
<a href="namespace-LINE.LINEBot.ImagemapActionBuilder.html">
ImagemapActionBuilder </a>
</li>
<li>
<a href="namespace-LINE.LINEBot.MessageBuilder.html">
MessageBuilder<span></span>
</a>
<ul>
<li>
<a href="namespace-LINE.LINEBot.MessageBuilder.Imagemap.html">
Imagemap </a>
</li>
<li>
<a href="namespace-LINE.LINEBot.MessageBuilder.TemplateBuilder.html">
TemplateBuilder </a>
</li>
</ul></li>
<li>
<a href="namespace-LINE.LINEBot.TemplateActionBuilder.html">
TemplateActionBuilder </a>
</li>
</ul></li></ul></li>
</ul>
</div>
<hr>
<div id="elements">
<h3>Classes</h3>
<ul>
<li><a href="class-LINE.LINEBot.Event.MessageEvent.AudioMessage.html">AudioMessage</a></li>
<li><a href="class-LINE.LINEBot.Event.MessageEvent.FileMessage.html">FileMessage</a></li>
<li><a href="class-LINE.LINEBot.Event.MessageEvent.ImageMessage.html">ImageMessage</a></li>
<li class="active"><a href="class-LINE.LINEBot.Event.MessageEvent.LocationMessage.html">LocationMessage</a></li>
<li><a href="class-LINE.LINEBot.Event.MessageEvent.StickerMessage.html">StickerMessage</a></li>
<li><a href="class-LINE.LINEBot.Event.MessageEvent.TextMessage.html">TextMessage</a></li>
<li><a href="class-LINE.LINEBot.Event.MessageEvent.UnknownMessage.html">UnknownMessage</a></li>
<li><a href="class-LINE.LINEBot.Event.MessageEvent.VideoMessage.html">VideoMessage</a></li>
</ul>
</div>
</div>
</div>
<div id="splitter"></div>
<div id="right">
<div id="rightInner">
<form id="search">
<input type="hidden" name="cx" value="">
<input type="hidden" name="ie" value="UTF-8">
<input type="text" name="q" class="text" placeholder="Search">
</form>
<div id="navigation">
<ul>
<li>
<a href="index.html" title="Overview"><span>Overview</span></a>
</li>
<li>
<a href="namespace-LINE.LINEBot.Event.MessageEvent.html" title="Summary of LINE\LINEBot\Event\MessageEvent"><span>Namespace</span></a>
</li>
<li class="active">
<span>Class</span> </li>
</ul>
<ul>
</ul>
<ul>
</ul>
</div>
<div id="content" class="class">
<h1>Class LocationMessage</h1>
<div class="description">
<p>A class that represents the message event of location.</p>
</div>
<dl class="tree">
<dd style="padding-left:0px">
<a href="class-LINE.LINEBot.Event.BaseEvent.html"><span>LINE\LINEBot\Event\BaseEvent</span></a>
</dd>
<dd style="padding-left:30px">
<img src="resources/inherit.png" alt="Extended by">
<a href="class-LINE.LINEBot.Event.MessageEvent.html"><span>LINE\LINEBot\Event\MessageEvent</span></a>
</dd>
<dd style="padding-left:60px">
<img src="resources/inherit.png" alt="Extended by">
<b><span>LINE\LINEBot\Event\MessageEvent\LocationMessage</span></b>
</dd>
</dl>
<div class="info">
<b>Namespace:</b> <a href="namespace-LINE.html">LINE</a>\<a href="namespace-LINE.LINEBot.html">LINEBot</a>\<a href="namespace-LINE.LINEBot.Event.html">Event</a>\<a href="namespace-LINE.LINEBot.Event.MessageEvent.html">MessageEvent</a><br>
<b>Package:</b> LINE\LINEBot\Event\MessageEvent<br>
<b>Located at</b> <a href="source-class-LINE.LINEBot.Event.MessageEvent.LocationMessage.html#23-79" title="Go to source code">LINEBot/Event/MessageEvent/LocationMessage.php</a>
<br>
</div>
<table class="summary methods" id="methods">
<caption>Methods summary</caption>
<tr data-order="__construct" id="___construct">
<td class="attributes"><code>
public
</code>
</td>
<td class="name"><div>
<a class="anchor" href="#___construct">#</a>
<code><a href="source-class-LINE.LINEBot.Event.MessageEvent.LocationMessage.html#30-38" title="Go to source code">__construct</a>( <span>array <var>$event</var></span> )</code>
<div class="description short">
<p>LocationMessage constructor.</p>
</div>
<div class="description detailed hidden">
<p>LocationMessage constructor.</p>
<h4>Parameters</h4>
<div class="list"><dl>
<dt><var>$event</var></dt>
<dd></dd>
</dl></div>
<h4>Overrides</h4>
<div class="list"><code><a href="class-LINE.LINEBot.Event.MessageEvent.html#___construct">LINE\LINEBot\Event\MessageEvent::__construct()</a></code></div>
</div>
</div></td>
</tr>
<tr data-order="getTitle" id="_getTitle">
<td class="attributes"><code>
public
string
</code>
</td>
<td class="name"><div>
<a class="anchor" href="#_getTitle">#</a>
<code><a href="source-class-LINE.LINEBot.Event.MessageEvent.LocationMessage.html#40-48" title="Go to source code">getTitle</a>( )</code>
<div class="description short">
<p>Returns title of the location message.</p>
</div>
<div class="description detailed hidden">
<p>Returns title of the location message.</p>
<h4>Returns</h4>
<div class="list">
string
</div>
</div>
</div></td>
</tr>
<tr data-order="getAddress" id="_getAddress">
<td class="attributes"><code>
public
string
</code>
</td>
<td class="name"><div>
<a class="anchor" href="#_getAddress">#</a>
<code><a href="source-class-LINE.LINEBot.Event.MessageEvent.LocationMessage.html#50-58" title="Go to source code">getAddress</a>( )</code>
<div class="description short">
<p>Returns address of the location message.</p>
</div>
<div class="description detailed hidden">
<p>Returns address of the location message.</p>
<h4>Returns</h4>
<div class="list">
string
</div>
</div>
</div></td>
</tr>
<tr data-order="getLatitude" id="_getLatitude">
<td class="attributes"><code>
public
float
</code>
</td>
<td class="name"><div>
<a class="anchor" href="#_getLatitude">#</a>
<code><a href="source-class-LINE.LINEBot.Event.MessageEvent.LocationMessage.html#60-68" title="Go to source code">getLatitude</a>( )</code>
<div class="description short">
<p>Returns latitude of the location message.</p>
</div>
<div class="description detailed hidden">
<p>Returns latitude of the location message.</p>
<h4>Returns</h4>
<div class="list">
float
</div>
</div>
</div></td>
</tr>
<tr data-order="getLongitude" id="_getLongitude">
<td class="attributes"><code>
public
float
</code>
</td>
<td class="name"><div>
<a class="anchor" href="#_getLongitude">#</a>
<code><a href="source-class-LINE.LINEBot.Event.MessageEvent.LocationMessage.html#70-78" title="Go to source code">getLongitude</a>( )</code>
<div class="description short">
<p>Returns longitude of the location message.</p>
</div>
<div class="description detailed hidden">
<p>Returns longitude of the location message.</p>
<h4>Returns</h4>
<div class="list">
float
</div>
</div>
</div></td>
</tr>
</table>
<table class="summary inherited">
<caption>Methods inherited from <a href="class-LINE.LINEBot.Event.MessageEvent.html#methods">LINE\LINEBot\Event\MessageEvent</a></caption>
<tr>
<td><code>
<a href="class-LINE.LINEBot.Event.MessageEvent.html#_getMessageId">getMessageId()</a>,
<a href="class-LINE.LINEBot.Event.MessageEvent.html#_getMessageType">getMessageType()</a>
</code></td>
</tr>
</table>
<table class="summary inherited">
<caption>Methods inherited from <a href="class-LINE.LINEBot.Event.BaseEvent.html#methods">LINE\LINEBot\Event\BaseEvent</a></caption>
<tr>
<td><code>
<a href="class-LINE.LINEBot.Event.BaseEvent.html#_getEventSourceId">getEventSourceId()</a>,
<a href="class-LINE.LINEBot.Event.BaseEvent.html#_getGroupId">getGroupId()</a>,
<a href="class-LINE.LINEBot.Event.BaseEvent.html#_getReplyToken">getReplyToken()</a>,
<a href="class-LINE.LINEBot.Event.BaseEvent.html#_getRoomId">getRoomId()</a>,
<a href="class-LINE.LINEBot.Event.BaseEvent.html#_getTimestamp">getTimestamp()</a>,
<a href="class-LINE.LINEBot.Event.BaseEvent.html#_getType">getType()</a>,
<a href="class-LINE.LINEBot.Event.BaseEvent.html#_getUserId">getUserId()</a>,
<a href="class-LINE.LINEBot.Event.BaseEvent.html#_isGroupEvent">isGroupEvent()</a>,
<a href="class-LINE.LINEBot.Event.BaseEvent.html#_isRoomEvent">isRoomEvent()</a>,
<a href="class-LINE.LINEBot.Event.BaseEvent.html#_isUnknownEvent">isUnknownEvent()</a>,
<a href="class-LINE.LINEBot.Event.BaseEvent.html#_isUserEvent">isUserEvent()</a>
</code></td>
</tr>
</table>
<table class="summary inherited">
<caption>Properties inherited from <a href="class-LINE.LINEBot.Event.MessageEvent.html#properties">LINE\LINEBot\Event\MessageEvent</a></caption>
<tr>
<td><code>
<a href="class-LINE.LINEBot.Event.MessageEvent.html#$message"><var>$message</var></a>
</code></td>
</tr>
</table>
<table class="summary inherited">
<caption>Properties inherited from <a href="class-LINE.LINEBot.Event.BaseEvent.html#properties">LINE\LINEBot\Event\BaseEvent</a></caption>
<tr>
<td><code>
<a href="class-LINE.LINEBot.Event.BaseEvent.html#$event"><var>$event</var></a>
</code></td>
</tr>
</table>
</div>
<div id="footer">
line-bot-sdk-php API documentation generated by <a href="http://apigen.org">ApiGen</a>
</div>
</div>
</div>
<script src="resources/combined.js"></script>
<script src="elementlist.js"></script>
</body>
</html>
| {
"content_hash": "a00fb94df9f0def068a49a574944222c",
"timestamp": "",
"source": "github",
"line_count": 443,
"max_line_length": 240,
"avg_line_length": 24.20316027088036,
"alnum_prop": 0.6418578623391158,
"repo_name": "peatpon/line-bot-sdk",
"id": "63777467328b9982c6c152c42ec14174a1852d0e",
"size": "10722",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "docs/class-LINE.LINEBot.Event.MessageEvent.LocationMessage.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "2847"
},
{
"name": "PHP",
"bytes": "200676"
},
{
"name": "Shell",
"bytes": "1025"
}
],
"symlink_target": ""
} |
this.workbox = this.workbox || {};
this.workbox.strategies = (function (logger_mjs,assert_mjs,cacheNames_mjs,cacheWrapper_mjs,fetchWrapper_mjs,getFriendlyURL_mjs) {
'use strict';
try {
self.workbox.v['workbox:strategies:3.4.1'] = 1;
} catch (e) {} // eslint-disable-line
const getFriendlyURL = url => {
const urlObj = new URL(url, location);
if (urlObj.origin === location.origin) {
return urlObj.pathname;
}
return urlObj.href;
};
var messages = {
strategyStart: (strategyName, request) => `Using ${strategyName} to ` + `respond to '${getFriendlyURL(request.url)}'`,
printFinalResponse: response => {
if (response) {
logger_mjs.logger.groupCollapsed(`View the final response here.`);
logger_mjs.logger.unprefixed.log(response);
logger_mjs.logger.groupEnd();
}
}
};
/*
Copyright 2018 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* An implementation of a [cache-first]{@link https://developers.google.com/web/fundamentals/instant-and-offline/offline-cookbook/#cache-falling-back-to-network}
* request strategy.
*
* A cache first strategy is useful for assets that have been revisioned,
* such as URLs like `/styles/example.a8f5f1.css`, since they
* can be cached for long periods of time.
*
* @memberof workbox.strategies
*/
class CacheFirst {
/**
* @param {Object} options
* @param {string} options.cacheName Cache name to store and retrieve
* requests. Defaults to cache names provided by
* [workbox-core]{@link workbox.core.cacheNames}.
* @param {Array<Object>} options.plugins [Plugins]{@link https://developers.google.com/web/tools/workbox/guides/using-plugins}
* to use in conjunction with this caching strategy.
* @param {Object} options.fetchOptions Values passed along to the
* [`init`](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/fetch#Parameters)
* of all fetch() requests made by this strategy.
* @param {Object} options.matchOptions [`CacheQueryOptions`](https://w3c.github.io/ServiceWorker/#dictdef-cachequeryoptions)
*/
constructor(options = {}) {
this._cacheName = cacheNames_mjs.cacheNames.getRuntimeName(options.cacheName);
this._plugins = options.plugins || [];
this._fetchOptions = options.fetchOptions || null;
this._matchOptions = options.matchOptions || null;
}
/**
* This method will perform a request strategy and follows an API that
* will work with the
* [Workbox Router]{@link workbox.routing.Router}.
*
* @param {Object} input
* @param {FetchEvent} input.event The fetch event to run this strategy
* against.
* @return {Promise<Response>}
*/
handle({ event }) {
var _this = this;
return babelHelpers.asyncToGenerator(function* () {
{
assert_mjs.assert.isInstance(event, FetchEvent, {
moduleName: 'workbox-strategies',
className: 'CacheFirst',
funcName: 'handle',
paramName: 'event'
});
}
return _this.makeRequest({
event,
request: event.request
});
})();
}
/**
* This method can be used to perform a make a standalone request outside the
* context of the [Workbox Router]{@link workbox.routing.Router}.
*
* See "[Advanced Recipes](https://developers.google.com/web/tools/workbox/guides/advanced-recipes#make-requests)"
* for more usage information.
*
* @param {Object} input
* @param {Request|string} input.request Either a
* [`Request`]{@link https://developer.mozilla.org/en-US/docs/Web/API/Request}
* object, or a string URL, corresponding to the request to be made.
* @param {FetchEvent} [input.event] If provided, `event.waitUntil()` will be
* called automatically to extend the service worker's lifetime.
* @return {Promise<Response>}
*/
makeRequest({ event, request }) {
var _this2 = this;
return babelHelpers.asyncToGenerator(function* () {
const logs = [];
if (typeof request === 'string') {
request = new Request(request);
}
{
assert_mjs.assert.isInstance(request, Request, {
moduleName: 'workbox-strategies',
className: 'CacheFirst',
funcName: 'makeRequest',
paramName: 'request'
});
}
let response = yield cacheWrapper_mjs.cacheWrapper.match(_this2._cacheName, request, _this2._matchOptions, _this2._plugins);
let error;
if (!response) {
{
logs.push(`No response found in the '${_this2._cacheName}' cache. ` + `Will respond with a network request.`);
}
try {
response = yield _this2._getFromNetwork(request, event);
} catch (err) {
error = err;
}
{
if (response) {
logs.push(`Got response from network.`);
} else {
logs.push(`Unable to get a response from the network.`);
}
}
} else {
{
logs.push(`Found a cached response in the '${_this2._cacheName}' cache.`);
}
}
{
logger_mjs.logger.groupCollapsed(messages.strategyStart('CacheFirst', request));
for (let log of logs) {
logger_mjs.logger.log(log);
}
messages.printFinalResponse(response);
logger_mjs.logger.groupEnd();
}
if (error) {
// Don't swallow error as we'll want it to throw and enable catch
// handlers in router.
throw error;
}
return response;
})();
}
/**
* Handles the network and cache part of CacheFirst.
*
* @param {Request} request
* @param {FetchEvent} [event]
* @return {Promise<Response>}
*
* @private
*/
_getFromNetwork(request, event) {
var _this3 = this;
return babelHelpers.asyncToGenerator(function* () {
const response = yield fetchWrapper_mjs.fetchWrapper.fetch(request, _this3._fetchOptions, _this3._plugins, event ? event.preloadResponse : undefined);
// Keep the service worker while we put the request to the cache
const responseClone = response.clone();
const cachePutPromise = cacheWrapper_mjs.cacheWrapper.put(_this3._cacheName, request, responseClone, _this3._plugins);
if (event) {
try {
event.waitUntil(cachePutPromise);
} catch (error) {
{
logger_mjs.logger.warn(`Unable to ensure service worker stays alive when ` + `updating cache for '${getFriendlyURL_mjs.getFriendlyURL(event.request.url)}'.`);
}
}
}
return response;
})();
}
}
/*
Copyright 2018 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* An implementation of a
* [cache-only]{@link https://developers.google.com/web/fundamentals/instant-and-offline/offline-cookbook/#cache-only}
* request strategy.
*
* This class is useful if you want to take advantage of any [Workbox plugins]{@link https://developers.google.com/web/tools/workbox/guides/using-plugins}.
*
* @memberof workbox.strategies
*/
class CacheOnly {
/**
* @param {Object} options
* @param {string} options.cacheName Cache name to store and retrieve
* requests. Defaults to cache names provided by
* [workbox-core]{@link workbox.core.cacheNames}.
* @param {Array<Object>} options.plugins [Plugins]{@link https://developers.google.com/web/tools/workbox/guides/using-plugins}
* to use in conjunction with this caching strategy.
* @param {Object} options.matchOptions [`CacheQueryOptions`](https://w3c.github.io/ServiceWorker/#dictdef-cachequeryoptions)
*/
constructor(options = {}) {
this._cacheName = cacheNames_mjs.cacheNames.getRuntimeName(options.cacheName);
this._plugins = options.plugins || [];
this._matchOptions = options.matchOptions || null;
}
/**
* This method will perform a request strategy and follows an API that
* will work with the
* [Workbox Router]{@link workbox.routing.Router}.
*
* @param {Object} input
* @param {FetchEvent} input.event The fetch event to run this strategy
* against.
* @return {Promise<Response>}
*/
handle({ event }) {
var _this = this;
return babelHelpers.asyncToGenerator(function* () {
{
assert_mjs.assert.isInstance(event, FetchEvent, {
moduleName: 'workbox-strategies',
className: 'CacheOnly',
funcName: 'handle',
paramName: 'event'
});
}
return _this.makeRequest({
event,
request: event.request
});
})();
}
/**
* This method can be used to perform a make a standalone request outside the
* context of the [Workbox Router]{@link workbox.routing.Router}.
*
* See "[Advanced Recipes](https://developers.google.com/web/tools/workbox/guides/advanced-recipes#make-requests)"
* for more usage information.
*
* @param {Object} input
* @param {Request|string} input.request Either a
* [`Request`]{@link https://developer.mozilla.org/en-US/docs/Web/API/Request}
* object, or a string URL, corresponding to the request to be made.
* @param {FetchEvent} [input.event] If provided, `event.waitUntil()` will be
* called automatically to extend the service worker's lifetime.
* @return {Promise<Response>}
*/
makeRequest({ event, request }) {
var _this2 = this;
return babelHelpers.asyncToGenerator(function* () {
if (typeof request === 'string') {
request = new Request(request);
}
{
assert_mjs.assert.isInstance(request, Request, {
moduleName: 'workbox-strategies',
className: 'CacheOnly',
funcName: 'makeRequest',
paramName: 'request'
});
}
const response = yield cacheWrapper_mjs.cacheWrapper.match(_this2._cacheName, request, _this2._matchOptions, _this2._plugins);
{
logger_mjs.logger.groupCollapsed(messages.strategyStart('CacheOnly', request));
if (response) {
logger_mjs.logger.log(`Found a cached response in the '${_this2._cacheName}'` + ` cache.`);
messages.printFinalResponse(response);
} else {
logger_mjs.logger.log(`No response found in the '${_this2._cacheName}' cache.`);
}
logger_mjs.logger.groupEnd();
}
return response;
})();
}
}
/*
Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
var cacheOkAndOpaquePlugin = {
/**
* Return return a response (i.e. allow caching) if the
* response is ok (i.e. 200) or is opaque.
*
* @param {Object} input
* @param {Response} input.response
* @return {Response|null}
*
* @private
*/
cacheWillUpdate: ({ response }) => {
if (response.ok || response.status === 0) {
return response;
}
return null;
}
};
/*
Copyright 2018 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* An implementation of a
* [network first]{@link https://developers.google.com/web/fundamentals/instant-and-offline/offline-cookbook/#network-falling-back-to-cache}
* request strategy.
*
* By default, this strategy will cache responses with a 200 status code as
* well as [opaque responses]{@link https://developers.google.com/web/tools/workbox/guides/handle-third-party-requests}.
* Opaque responses are are cross-origin requests where the response doesn't
* support [CORS]{@link https://enable-cors.org/}.
*
* @memberof workbox.strategies
*/
class NetworkFirst {
/**
* @param {Object} options
* @param {string} options.cacheName Cache name to store and retrieve
* requests. Defaults to cache names provided by
* [workbox-core]{@link workbox.core.cacheNames}.
* @param {Array<Object>} options.plugins [Plugins]{@link https://developers.google.com/web/tools/workbox/guides/using-plugins}
* to use in conjunction with this caching strategy.
* @param {Object} options.fetchOptions Values passed along to the
* [`init`](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/fetch#Parameters)
* of all fetch() requests made by this strategy.
* @param {Object} options.matchOptions [`CacheQueryOptions`](https://w3c.github.io/ServiceWorker/#dictdef-cachequeryoptions)
* @param {number} options.networkTimeoutSeconds If set, any network requests
* that fail to respond within the timeout will fallback to the cache.
*
* This option can be used to combat
* "[lie-fi]{@link https://developers.google.com/web/fundamentals/performance/poor-connectivity/#lie-fi}"
* scenarios.
*/
constructor(options = {}) {
this._cacheName = cacheNames_mjs.cacheNames.getRuntimeName(options.cacheName);
if (options.plugins) {
let isUsingCacheWillUpdate = options.plugins.some(plugin => !!plugin.cacheWillUpdate);
this._plugins = isUsingCacheWillUpdate ? options.plugins : [cacheOkAndOpaquePlugin, ...options.plugins];
} else {
// No plugins passed in, use the default plugin.
this._plugins = [cacheOkAndOpaquePlugin];
}
this._networkTimeoutSeconds = options.networkTimeoutSeconds;
{
if (this._networkTimeoutSeconds) {
assert_mjs.assert.isType(this._networkTimeoutSeconds, 'number', {
moduleName: 'workbox-strategies',
className: 'NetworkFirst',
funcName: 'constructor',
paramName: 'networkTimeoutSeconds'
});
}
}
this._fetchOptions = options.fetchOptions || null;
this._matchOptions = options.matchOptions || null;
}
/**
* This method will perform a request strategy and follows an API that
* will work with the
* [Workbox Router]{@link workbox.routing.Router}.
*
* @param {Object} input
* @param {FetchEvent} input.event The fetch event to run this strategy
* against.
* @return {Promise<Response>}
*/
handle({ event }) {
var _this = this;
return babelHelpers.asyncToGenerator(function* () {
{
assert_mjs.assert.isInstance(event, FetchEvent, {
moduleName: 'workbox-strategies',
className: 'NetworkFirst',
funcName: 'handle',
paramName: 'event'
});
}
return _this.makeRequest({
event,
request: event.request
});
})();
}
/**
* This method can be used to perform a make a standalone request outside the
* context of the [Workbox Router]{@link workbox.routing.Router}.
*
* See "[Advanced Recipes](https://developers.google.com/web/tools/workbox/guides/advanced-recipes#make-requests)"
* for more usage information.
*
* @param {Object} input
* @param {Request|string} input.request Either a
* [`Request`]{@link https://developer.mozilla.org/en-US/docs/Web/API/Request}
* object, or a string URL, corresponding to the request to be made.
* @param {FetchEvent} [input.event] If provided, `event.waitUntil()` will be
* called automatically to extend the service worker's lifetime.
* @return {Promise<Response>}
*/
makeRequest({ event, request }) {
var _this2 = this;
return babelHelpers.asyncToGenerator(function* () {
const logs = [];
if (typeof request === 'string') {
request = new Request(request);
}
{
assert_mjs.assert.isInstance(request, Request, {
moduleName: 'workbox-strategies',
className: 'NetworkFirst',
funcName: 'handle',
paramName: 'makeRequest'
});
}
const promises = [];
let timeoutId;
if (_this2._networkTimeoutSeconds) {
const { id, promise } = _this2._getTimeoutPromise(request, logs);
timeoutId = id;
promises.push(promise);
}
const networkPromise = _this2._getNetworkPromise(timeoutId, event, request, logs);
promises.push(networkPromise);
// Promise.race() will resolve as soon as the first promise resolves.
let response = yield Promise.race(promises);
// If Promise.race() resolved with null, it might be due to a network
// timeout + a cache miss. If that were to happen, we'd rather wait until
// the networkPromise resolves instead of returning null.
// Note that it's fine to await an already-resolved promise, so we don't
// have to check to see if it's still "in flight".
if (!response) {
response = yield networkPromise;
}
{
logger_mjs.logger.groupCollapsed(messages.strategyStart('NetworkFirst', request));
for (let log of logs) {
logger_mjs.logger.log(log);
}
messages.printFinalResponse(response);
logger_mjs.logger.groupEnd();
}
return response;
})();
}
/**
* @param {Request} request
* @param {Array} logs A reference to the logs array
* @return {Promise<Response>}
*
* @private
*/
_getTimeoutPromise(request, logs) {
var _this3 = this;
let timeoutId;
const timeoutPromise = new Promise(resolve => {
const onNetworkTimeout = (() => {
var _ref = babelHelpers.asyncToGenerator(function* () {
{
logs.push(`Timing out the network response at ` + `${_this3._networkTimeoutSeconds} seconds.`);
}
resolve((yield _this3._respondFromCache(request)));
});
return function onNetworkTimeout() {
return _ref.apply(this, arguments);
};
})();
timeoutId = setTimeout(onNetworkTimeout, this._networkTimeoutSeconds * 1000);
});
return {
promise: timeoutPromise,
id: timeoutId
};
}
/**
* @param {number} timeoutId
* @param {FetchEvent|null} event
* @param {Request} request
* @param {Array} logs A reference to the logs Array.
* @return {Promise<Response>}
*
* @private
*/
_getNetworkPromise(timeoutId, event, request, logs) {
var _this4 = this;
return babelHelpers.asyncToGenerator(function* () {
let error;
let response;
try {
response = yield fetchWrapper_mjs.fetchWrapper.fetch(request, _this4._fetchOptions, _this4._plugins, event ? event.preloadResponse : undefined);
} catch (err) {
error = err;
}
if (timeoutId) {
clearTimeout(timeoutId);
}
{
if (response) {
logs.push(`Got response from network.`);
} else {
logs.push(`Unable to get a response from the network. Will respond ` + `with a cached response.`);
}
}
if (error || !response) {
response = yield _this4._respondFromCache(request);
{
if (response) {
logs.push(`Found a cached response in the '${_this4._cacheName}'` + ` cache.`);
} else {
logs.push(`No response found in the '${_this4._cacheName}' cache.`);
}
}
} else {
// Keep the service worker alive while we put the request in the cache
const responseClone = response.clone();
const cachePut = cacheWrapper_mjs.cacheWrapper.put(_this4._cacheName, request, responseClone, _this4._plugins);
if (event) {
try {
// The event has been responded to so we can keep the SW alive to
// respond to the request
event.waitUntil(cachePut);
} catch (err) {
{
logger_mjs.logger.warn(`Unable to ensure service worker stays alive when ` + `updating cache for '${getFriendlyURL_mjs.getFriendlyURL(event.request.url)}'.`);
}
}
}
}
return response;
})();
}
/**
* Used if the network timeouts or fails to make the request.
*
* @param {Request} request The fetchEvent request to match in the cache
* @return {Promise<Object>}
*
* @private
*/
_respondFromCache(request) {
return cacheWrapper_mjs.cacheWrapper.match(this._cacheName, request, this._matchOptions, this._plugins);
}
}
/*
Copyright 2018 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* An implementation of a
* [network-only]{@link https://developers.google.com/web/fundamentals/instant-and-offline/offline-cookbook/#network-only}
* request strategy.
*
* This class is useful if you want to take advantage of any [Workbox plugins]{@link https://developers.google.com/web/tools/workbox/guides/using-plugins}.
*
* @memberof workbox.strategies
*/
class NetworkOnly {
/**
* @param {Object} options
* @param {string} options.cacheName Cache name to store and retrieve
* requests. Defaults to cache names provided by
* [workbox-core]{@link workbox.core.cacheNames}.
* @param {Array<Object>} options.plugins [Plugins]{@link https://developers.google.com/web/tools/workbox/guides/using-plugins}
* to use in conjunction with this caching strategy.
* @param {Object} options.fetchOptions Values passed along to the
* [`init`](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/fetch#Parameters)
* of all fetch() requests made by this strategy.
*/
constructor(options = {}) {
this._cacheName = cacheNames_mjs.cacheNames.getRuntimeName(options.cacheName);
this._plugins = options.plugins || [];
this._fetchOptions = options.fetchOptions || null;
}
/**
* This method will perform a request strategy and follows an API that
* will work with the
* [Workbox Router]{@link workbox.routing.Router}.
*
* @param {Object} input
* @param {FetchEvent} input.event The fetch event to run this strategy
* against.
* @return {Promise<Response>}
*/
handle({ event }) {
var _this = this;
return babelHelpers.asyncToGenerator(function* () {
{
assert_mjs.assert.isInstance(event, FetchEvent, {
moduleName: 'workbox-strategies',
className: 'NetworkOnly',
funcName: 'handle',
paramName: 'event'
});
}
return _this.makeRequest({
event,
request: event.request
});
})();
}
/**
* This method can be used to perform a make a standalone request outside the
* context of the [Workbox Router]{@link workbox.routing.Router}.
*
* See "[Advanced Recipes](https://developers.google.com/web/tools/workbox/guides/advanced-recipes#make-requests)"
* for more usage information.
*
* @param {Object} input
* @param {Request|string} input.request Either a
* [`Request`]{@link https://developer.mozilla.org/en-US/docs/Web/API/Request}
* object, or a string URL, corresponding to the request to be made.
* @param {FetchEvent} [input.event] If provided, `event.waitUntil()` will be
* called automatically to extend the service worker's lifetime.
* @return {Promise<Response>}
*/
makeRequest({ event, request }) {
var _this2 = this;
return babelHelpers.asyncToGenerator(function* () {
if (typeof request === 'string') {
request = new Request(request);
}
{
assert_mjs.assert.isInstance(request, Request, {
moduleName: 'workbox-strategies',
className: 'NetworkOnly',
funcName: 'handle',
paramName: 'request'
});
}
let error;
let response;
try {
response = yield fetchWrapper_mjs.fetchWrapper.fetch(request, _this2._fetchOptions, _this2._plugins, event ? event.preloadResponse : undefined);
} catch (err) {
error = err;
}
{
logger_mjs.logger.groupCollapsed(messages.strategyStart('NetworkOnly', request));
if (response) {
logger_mjs.logger.log(`Got response from network.`);
} else {
logger_mjs.logger.log(`Unable to get a response from the network.`);
}
messages.printFinalResponse(response);
logger_mjs.logger.groupEnd();
}
// If there was an error thrown, re-throw it to ensure the Routers
// catch handler is triggered.
if (error) {
throw error;
}
return response;
})();
}
}
/*
Copyright 2018 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* An implementation of a
* [stale-while-revalidate]{@link https://developers.google.com/web/fundamentals/instant-and-offline/offline-cookbook/#stale-while-revalidate}
* request strategy.
*
* Resources are requested from both the cache and the network in parallel.
* The strategy will respond with the cached version if available, otherwise
* wait for the network response. The cache is updated with the network response
* with each successful request.
*
* By default, this strategy will cache responses with a 200 status code as
* well as [opaque responses]{@link https://developers.google.com/web/tools/workbox/guides/handle-third-party-requests}.
* Opaque responses are are cross-origin requests where the response doesn't
* support [CORS]{@link https://enable-cors.org/}.
*
* @memberof workbox.strategies
*/
class StaleWhileRevalidate {
/**
* @param {Object} options
* @param {string} options.cacheName Cache name to store and retrieve
* requests. Defaults to cache names provided by
* [workbox-core]{@link workbox.core.cacheNames}.
* @param {Array<Object>} options.plugins [Plugins]{@link https://developers.google.com/web/tools/workbox/guides/using-plugins}
* to use in conjunction with this caching strategy.
* @param {Object} options.fetchOptions Values passed along to the
* [`init`](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/fetch#Parameters)
* of all fetch() requests made by this strategy.
* @param {Object} options.matchOptions [`CacheQueryOptions`](https://w3c.github.io/ServiceWorker/#dictdef-cachequeryoptions)
*/
constructor(options = {}) {
this._cacheName = cacheNames_mjs.cacheNames.getRuntimeName(options.cacheName);
this._plugins = options.plugins || [];
if (options.plugins) {
let isUsingCacheWillUpdate = options.plugins.some(plugin => !!plugin.cacheWillUpdate);
this._plugins = isUsingCacheWillUpdate ? options.plugins : [cacheOkAndOpaquePlugin, ...options.plugins];
} else {
// No plugins passed in, use the default plugin.
this._plugins = [cacheOkAndOpaquePlugin];
}
this._fetchOptions = options.fetchOptions || null;
this._matchOptions = options.matchOptions || null;
}
/**
* This method will perform a request strategy and follows an API that
* will work with the
* [Workbox Router]{@link workbox.routing.Router}.
*
* @param {Object} input
* @param {FetchEvent} input.event The fetch event to run this strategy
* against.
* @return {Promise<Response>}
*/
handle({ event }) {
var _this = this;
return babelHelpers.asyncToGenerator(function* () {
{
assert_mjs.assert.isInstance(event, FetchEvent, {
moduleName: 'workbox-strategies',
className: 'StaleWhileRevalidate',
funcName: 'handle',
paramName: 'event'
});
}
return _this.makeRequest({
event,
request: event.request
});
})();
}
/**
* This method can be used to perform a make a standalone request outside the
* context of the [Workbox Router]{@link workbox.routing.Router}.
*
* See "[Advanced Recipes](https://developers.google.com/web/tools/workbox/guides/advanced-recipes#make-requests)"
* for more usage information.
*
* @param {Object} input
* @param {Request|string} input.request Either a
* [`Request`]{@link https://developer.mozilla.org/en-US/docs/Web/API/Request}
* object, or a string URL, corresponding to the request to be made.
* @param {FetchEvent} [input.event] If provided, `event.waitUntil()` will be
* called automatically to extend the service worker's lifetime.
* @return {Promise<Response>}
*/
makeRequest({ event, request }) {
var _this2 = this;
return babelHelpers.asyncToGenerator(function* () {
const logs = [];
if (typeof request === 'string') {
request = new Request(request);
}
{
assert_mjs.assert.isInstance(request, Request, {
moduleName: 'workbox-strategies',
className: 'StaleWhileRevalidate',
funcName: 'handle',
paramName: 'request'
});
}
const fetchAndCachePromise = _this2._getFromNetwork(request, event);
let response = yield cacheWrapper_mjs.cacheWrapper.match(_this2._cacheName, request, _this2._matchOptions, _this2._plugins);
if (response) {
{
logs.push(`Found a cached response in the '${_this2._cacheName}'` + ` cache. Will update with the network response in the background.`);
}
if (event) {
try {
event.waitUntil(fetchAndCachePromise);
} catch (error) {
{
logger_mjs.logger.warn(`Unable to ensure service worker stays alive when ` + `updating cache for '${getFriendlyURL_mjs.getFriendlyURL(event.request.url)}'.`);
}
}
}
} else {
{
logs.push(`No response found in the '${_this2._cacheName}' cache. ` + `Will wait for the network response.`);
}
response = yield fetchAndCachePromise;
}
{
logger_mjs.logger.groupCollapsed(messages.strategyStart('StaleWhileRevalidate', request));
for (let log of logs) {
logger_mjs.logger.log(log);
}
messages.printFinalResponse(response);
logger_mjs.logger.groupEnd();
}
return response;
})();
}
/**
* @param {Request} request
* @param {FetchEvent} [event]
* @return {Promise<Response>}
*
* @private
*/
_getFromNetwork(request, event) {
var _this3 = this;
return babelHelpers.asyncToGenerator(function* () {
const response = yield fetchWrapper_mjs.fetchWrapper.fetch(request, _this3._fetchOptions, _this3._plugins, event ? event.preloadResponse : undefined);
const cachePutPromise = cacheWrapper_mjs.cacheWrapper.put(_this3._cacheName, request, response.clone(), _this3._plugins);
if (event) {
try {
event.waitUntil(cachePutPromise);
} catch (error) {
{
logger_mjs.logger.warn(`Unable to ensure service worker stays alive when ` + `updating cache for '${getFriendlyURL_mjs.getFriendlyURL(event.request.url)}'.`);
}
}
}
return response;
})();
}
}
/*
Copyright 2017 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
var publicAPI = /*#__PURE__*/Object.freeze({
CacheFirst: CacheFirst,
CacheOnly: CacheOnly,
NetworkFirst: NetworkFirst,
NetworkOnly: NetworkOnly,
StaleWhileRevalidate: StaleWhileRevalidate
});
/*
Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* @function workbox.strategies.cacheFirst
* @param {workbox.strategies.StrategyOptions} options
*/
/**
* @function workbox.strategies.cacheOnly
* @param {workbox.strategies.StrategyOptions} options
*/
/**
* @function workbox.strategies.networkFirst
* @param {workbox.strategies.StrategyOptions} options
*/
/**
* @function workbox.strategies.networkOnly
* @param {workbox.strategies.StrategyOptions} options
*/
/**
* @function workbox.strategies.staleWhileRevalidate
* @param {workbox.strategies.StrategyOptions} options
*/
const mapping = {
cacheFirst: CacheFirst,
cacheOnly: CacheOnly,
networkFirst: NetworkFirst,
networkOnly: NetworkOnly,
staleWhileRevalidate: StaleWhileRevalidate
};
const defaultExport = {};
Object.keys(mapping).forEach(keyName => {
defaultExport[keyName] = (options = {}) => {
const StrategyClass = mapping[keyName];
return new StrategyClass(Object.assign(options));
};
});
/*
Copyright 2017 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
const finalExport = Object.assign(defaultExport, publicAPI);
return finalExport;
}(workbox.core._private,workbox.core._private,workbox.core._private,workbox.core._private,workbox.core._private,workbox.core._private));
//# sourceMappingURL=workbox-strategies.dev.js.map
| {
"content_hash": "32d8bf42d690f309530ecc840d277758",
"timestamp": "",
"source": "github",
"line_count": 1086,
"max_line_length": 174,
"avg_line_length": 35.451197053407,
"alnum_prop": 0.6292987012987012,
"repo_name": "barrabinfc/react-iching",
"id": "e14c1ac71a660cf8b8b29a9ac6fe850361b0f9f3",
"size": "39109",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/assets/wb-assets/workbox-v3.4.1/workbox-strategies.dev.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "442477"
},
{
"name": "HTML",
"bytes": "38996"
},
{
"name": "Java",
"bytes": "257278"
},
{
"name": "JavaScript",
"bytes": "60716"
},
{
"name": "Makefile",
"bytes": "1296"
},
{
"name": "Objective-C",
"bytes": "138266"
},
{
"name": "Python",
"bytes": "343649"
},
{
"name": "Swift",
"bytes": "217951"
}
],
"symlink_target": ""
} |
const int ID_TOOLBAR = 500;
static const long TOOLBAR_STYLE = wxTB_FLAT | wxTB_DOCKABLE | wxTB_TEXT;
enum
{
IDM_VIEW_TOOLBARSHOW = 200,
//Îļþ²Ëµ¥Ïî
IDM_FILE_SAVE,
IDM_FILE_OPEN,
//±à¼²Ëµ¥Ïî
IDM_EDIT_UNDO,
IDM_EDIT_COPY,
IDM_EDIT_PASTE,
IDM_EDIT_CUT,
IDM_EDIT_DELETE,
//ÊÓͼ²Ëµ¥Ïî
IDM_VIEWMENU_CALLBACK1,
IDM_VIEWMENU_CALLBACK2,
IDM_VIEWMENU_CALLBACK3,
IDM_VIEW_COLOUR,
IDM_VIEWMENU_STOPRENDER,
IDM_VIEWMENU_STARTRENDER,
IDM_VIEWMENU_CHANGERESOLUTION,
IDM_VIEWMENU_PROPERTY,
IDM_TOOLBAR_OTHER_1,
IDM_TOOLBAR_OTHER_2,
IDM_TOOLBAR_OTHER_3,
// ¹¤¾ß²Ëµ¥Ïî
IDM_TOOLMENU_CREATEMAP,
IDM_TOOLMENU_CHANGELIGHT,
IDM_TOOLMENU_CALLBACK1,
IDM_TOOLMENU_CALLBACK2,
IDM_TOOLMENU_CALLBACK3,
IDM_TOOLMENU_CALLBACK4,
IDM_TOOLMENU_CALLBACK5,
IDM_TOOLMENU_CALLBACK6,
IDM_TOOLMENU_CALLBACK7,
ID_COMBO = 1000
};
BEGIN_EVENT_TABLE(TFrame, wxFrame)
EVT_SIZE(TFrame::OnSize)
EVT_CLOSE(TFrame::OnClose)
EVT_MENU(IDM_FILE_OPEN, TFrame::OnFileOpen)
EVT_MENU(IDM_FILE_SAVE, TFrame::OnFileSave)
EVT_MENU(wxID_EXIT, TFrame::OnQuit)
EVT_MENU(IDM_EDIT_UNDO, TFrame::OnUndo)
EVT_MENU(IDM_EDIT_COPY, TFrame::OnCopy)
EVT_MENU(IDM_EDIT_PASTE, TFrame::OnPaste)
EVT_MENU(IDM_EDIT_CUT, TFrame::OnCut)
EVT_MENU(IDM_EDIT_DELETE, TFrame::OnDelete)
EVT_MENU(wxID_HELP, TFrame::OnAbout)
EVT_MENU(IDM_VIEW_TOOLBARSHOW, TFrame::OnShowToolbar)
EVT_MENU_RANGE(IDM_VIEWMENU_STOPRENDER, IDM_VIEWMENU_PROPERTY, TFrame::OnStopRender)
EVT_MENU(IDM_TOOLMENU_CREATEMAP, TFrame::OnCreateMap)
EVT_MENU(IDM_TOOLMENU_CHANGELIGHT, TFrame::OnDeletePrint)
EVT_MENU(IDM_TOOLMENU_CALLBACK1, TFrame::OnInsertPrint)
EVT_MENU(IDM_TOOLMENU_CALLBACK2, TFrame::OnToggleHelp)
EVT_MENU(IDM_TOOLMENU_CALLBACK3, TFrame::OnToggleSearch)
EVT_MENU_RANGE(IDM_TOOLMENU_CALLBACK4, IDM_TOOLMENU_CALLBACK6,TFrame::OnToggleRadioBtn)
EVT_MENU(IDM_TOOLMENU_CALLBACK7, TFrame::OnChangeToolTip)
EVT_MENU_RANGE(IDM_VIEWMENU_CALLBACK1, IDM_VIEWMENU_CALLBACK3,TFrame::OnToolbarStyle)
EVT_MENU(IDM_VIEW_COLOUR, TFrame::OnBackgroundColour)
EVT_MENU(wxID_ANY, TFrame::OnToolLeftClick)
EVT_COMBOBOX(ID_COMBO, TFrame::OnCombo)
EVT_TOOL_RCLICKED(wxID_ANY, TFrame::OnToolRightClick)
EVT_TOOL_DROPDOWN(wxID_ANY, TFrame::OnToolDropdown)
EVT_UPDATE_UI(wxID_COPY, TFrame::OnUpdateCopyAndCut)
EVT_UPDATE_UI(wxID_CUT, TFrame::OnUpdateCopyAndCut)
EVT_UPDATE_UI_RANGE(IDM_TOOLMENU_CALLBACK4,IDM_TOOLMENU_CALLBACK6,TFrame::OnUpdateToggleRadioBtn)
END_EVENT_TABLE()
void TFrame::RecreateToolbar()
{
wxToolBarBase *toolBar = GetToolBar();
long style = toolBar ? toolBar->GetWindowStyle() : TOOLBAR_STYLE;
if (toolBar && m_searchTool && m_searchTool->GetToolBar() == NULL)
{
toolBar->AddTool(m_searchTool);
}
m_searchTool = NULL;
delete toolBar;
SetToolBar(NULL);
style &= ~(wxTB_HORIZONTAL | wxTB_VERTICAL | wxTB_BOTTOM | wxTB_RIGHT | wxTB_HORZ_LAYOUT);
switch( m_toolbarPosition )
{
case TOOLBAR_LEFT:
style |= wxTB_LEFT;
break;
case TOOLBAR_TOP:
style |= wxTB_TOP;
break;
case TOOLBAR_RIGHT:
style |= wxTB_RIGHT;
break;
case TOOLBAR_BOTTOM:
style |= wxTB_BOTTOM;
break;
}
if ( m_showTooltips )
style &= ~wxTB_NO_TOOLTIPS;
else
style |= wxTB_NO_TOOLTIPS;
if ( style & wxTB_TEXT && !(style & wxTB_NOICONS) && m_horzText )
style |= wxTB_HORZ_LAYOUT;
toolBar = CreateToolBar(style, ID_TOOLBAR);
PopulateToolbar(toolBar);
}
void TFrame::PopulateToolbar(wxToolBarBase* toolBar)
{
int w = 64;
int h = 64;
toolBar->SetToolBitmapSize(wxSize(w, h));
wxBitmap NEW(wxT("Resource\\Editor\\New.png"), wxBITMAP_TYPE_PNG);
wxBitmap NEWEX(NEW.ConvertToImage().Scale(w, h));
toolBar->AddTool(wxID_NEW, wxT("н¨"), NEWEX);
wxBitmap OPEN(wxT("Resource\\Editor\\Open.png"), wxBITMAP_TYPE_PNG);
wxBitmap OPENEX(OPEN.ConvertToImage().Scale(w, h));
toolBar->AddTool(wxID_OPEN, wxT("´ò¿ª"), OPENEX);
wxBitmap SAVE(wxT("Resource\\Editor\\Save.png"), wxBITMAP_TYPE_PNG);
wxBitmap SAVEEX(SAVE.ConvertToImage().Scale(w, h));
toolBar->AddTool(wxID_SAVE, wxT("±£´æ"), SAVEEX);
wxBitmap COPY(wxT("Resource\\Editor\\Copy.png"), wxBITMAP_TYPE_PNG);
wxBitmap COPYEX(COPY.ConvertToImage().Scale(w, h));
toolBar->AddTool(wxID_COPY, wxT("¸´ÖÆ"), COPYEX);
wxBitmap CUT(wxT("Resource\\Editor\\Cut.png"), wxBITMAP_TYPE_PNG);
wxBitmap CUTEX(CUT.ConvertToImage().Scale(w, h));
toolBar->AddTool(wxID_CUT, wxT("¼ôÇÐ"), CUTEX);
wxBitmap PASTE(wxT("Resource\\Editor\\Paint.png"), wxBITMAP_TYPE_PNG);
wxBitmap PASTEEX(PASTE.ConvertToImage().Scale(w, h));
toolBar->AddTool(wxID_PASTE, wxT("Õ³Ìù"), PASTEEX);
wxBitmap PRINT(wxT("Resource\\Editor\\tool.png"), wxBITMAP_TYPE_PNG);
wxBitmap PRINTEX(PRINT.ConvertToImage().Scale(w, h));
toolBar->AddTool(wxID_PRINT, wxT("¹¤¾ßÏä"), PRINTEX);
if ( !toolBar->IsVertical() )
{
wxComboBox *combo = new wxComboBox(toolBar, ID_COMBO, wxEmptyString, wxDefaultPosition, wxSize(100,-1) );
combo->Append(wxT("Direct3D 9"));
combo->Append(wxT("Direct3D 10"));
combo->Append(wxT("Direct3D 11"));
combo->Append(wxT("OpenGL 2.0"));
combo->Append(wxT("OpenGL 3.2"));
toolBar->AddControl(combo, wxT(""));
}
toolBar->Realize();
toolBar->SetRows(toolBar->IsVertical() ? toolBar->GetToolsCount() / m_rows: m_rows);
}
TFrame::TFrame(wxFrame* parent,const wxString& title):wxFrame(parent,wxID_ANY,title,wxDefaultPosition, wxDefaultSize, wxDEFAULT_FRAME_STYLE|wxCLIP_CHILDREN|wxNO_FULL_REPAINT_ON_RESIZE)
{
wxBoxSizer* mainsizer;
mainsizer = new wxBoxSizer(wxVERTICAL);
wxPanel* container = new wxPanel(this, wxID_ANY);
container->SetSizer(mainsizer);
m_smallToolbar = true;
m_horzText = false;
m_useCustomDisabled = false;
m_showTooltips = true;
m_searchTool = NULL;
m_rows = 1;
CreateStatusBar();
wxIcon icon;
icon.CopyFromBitmap(wxImage(wxT("Resource\\Editor\\Editor.ico")));
this->SetIcon(icon);
RenderSection = new TRenderSection((wxFrame *)container);
RenderSection->SetSize(wxSize(1024,800));
m_LogWin = new wxTextCtrl(container, 105, wxEmptyString, wxPoint(0, 900), wxSize(600, 60), wxTE_MULTILINE);
wxLogTextCtrl* logger = new wxLogTextCtrl(m_LogWin);
m_LogOld = logger->SetActiveTarget(logger);
logger->DisableTimestamp();
wxMenu *FileMenu = new wxMenu;
FileMenu->Append(IDM_FILE_OPEN, wxT("´ò¿ª\tCtrl-O"));
FileMenu->Append(IDM_FILE_SAVE,wxT("&±£´æ\tCtrl-S"),wxT("±£´æ"));
FileMenu->AppendSeparator();
FileMenu->Append(wxID_EXIT, wxT("Í˳ö"), wxT("Í˳ö±à¼Æ÷") );
wxMenu *EditMenu = new wxMenu;
EditMenu->Append(IDM_EDIT_UNDO, wxT("³·Ïú\tCtrl-Z"));
EditMenu->Append(IDM_EDIT_COPY, wxT("¸´ÖÆ\tCtrl-C"));
EditMenu->Append(IDM_EDIT_PASTE, wxT("Õ³Ìù\tCtrl-P"));
EditMenu->Append(IDM_EDIT_CUT, wxT("¼ôÇÐ\tCtrl-X"));
EditMenu->Append(IDM_EDIT_DELETE, wxT("ɾ³ý\tCtrl-D"));
wxMenu *ViewMenu = new wxMenu;
ViewMenu->AppendSeparator();
ViewMenu->AppendCheckItem(IDM_VIEW_TOOLBARSHOW, wxT("ÏÔʾ¹¤¾ßÀ¸"));
ViewMenu->Append(IDM_VIEW_COLOUR, wxT("Ñ¡Ôñ±³¾°ÑÕÉ«"));
ViewMenu->Append(IDM_VIEWMENU_STARTRENDER, wxT("Çå³ý³¡¾°"));
ViewMenu->Append(IDM_VIEWMENU_STOPRENDER, wxT("Í£Ö¹äÖȾ"));
ViewMenu->Append(IDM_VIEWMENU_CHANGERESOLUTION, wxT("Ð޸ķֱæÂÊ"));
ViewMenu->Append(IDM_VIEWMENU_PROPERTY, wxT("¶ÔÏóÊôÐÔ"));
ViewMenu->Append(IDM_VIEWMENU_CALLBACK1, wxT("ÊÓͼ±£Áô²Ëµ¥"));
ViewMenu->Append(IDM_VIEWMENU_CALLBACK2, wxT("ÊÓͼ±£Áô²Ëµ¥"));
ViewMenu->Append(IDM_VIEWMENU_CALLBACK3, wxT("ÊÓͼ±£Áô²Ëµ¥"));
ViewMenu->AppendSeparator();
wxMenu *ToolMenu = new wxMenu;
ToolMenu->AppendSeparator();
ToolMenu->Append(IDM_TOOLMENU_CREATEMAP, wxT("&´´½¨µØͼ"));
ToolMenu->Append(IDM_TOOLMENU_CHANGELIGHT, wxT("&Ð޸ĹâÕÕ"));
ToolMenu->Append(IDM_TOOLMENU_CALLBACK1, wxT("¹¤¾ß±£Áô²Ëµ¥"));
ToolMenu->Append(IDM_TOOLMENU_CALLBACK2, wxT("¹¤¾ß±£Áô²Ëµ¥"));
ToolMenu->Append(IDM_TOOLMENU_CALLBACK3, wxT("¹¤¾ß±£Áô²Ëµ¥"));
ToolMenu->AppendSeparator();
ToolMenu->Append(IDM_TOOLMENU_CALLBACK4, wxT("¹¤¾ß±£Áô²Ëµ¥"));
ToolMenu->Append(IDM_TOOLMENU_CALLBACK5, wxT("¹¤¾ß±£Áô²Ëµ¥"));
ToolMenu->Append(IDM_TOOLMENU_CALLBACK6, wxT("¹¤¾ß±£Áô²Ëµ¥"));
ToolMenu->Append(IDM_TOOLMENU_CALLBACK7, wxT("¹¤¾ß±£Áô²Ëµ¥"));
ToolMenu->AppendSeparator();
wxMenu* HelpMenu = new wxMenu;
HelpMenu->Append(wxID_HELP, wxT("&¹ØÓÚ"), wxT("ÓÎÏ·±à¼Æ÷ÐÅÏ¢"));
wxMenuBar* menuBar = new wxMenuBar(wxMB_DOCKABLE);
menuBar->Append(FileMenu, wxT("&Îļþ"));
menuBar->Append(EditMenu, wxT("&±à¼"));
menuBar->Append(ViewMenu, wxT("&ÊÓͼ"));
menuBar->Append(ToolMenu, wxT("&¹¤¾ß"));
menuBar->Append(HelpMenu, wxT("&°ïÖú"));
this->SetMenuBar(menuBar);
m_toolbarPosition = TOOLBAR_TOP;
RecreateToolbar();
mainsizer->Add(RenderSection,9, wxEXPAND,3);
mainsizer->Add(m_LogWin, 1, wxEXPAND,3);
this->SetSize(1440,900);
this->Fit();
this->Center();
}
TFrame::~TFrame()
{
if ( m_searchTool && !m_searchTool->GetToolBar() )
{
GetToolBar()->AddTool(m_searchTool);
}
}
void TFrame::LayoutChildren()
{
}
void TFrame::OnSize(wxSizeEvent& event)
{
event.Skip();
int dummycommand=0;
SendEditorCommand(&dummycommand,sizeof(int));
}
void TFrame::OnShowToolbar(wxCommandEvent& WXUNUSED(event))
{
wxToolBar *tbar = GetToolBar();
if (!tbar ){
RecreateToolbar();
}
else{
delete tbar;
}
printf("OnShowToolbar\n");
}
void TFrame::OnFileSave(wxCommandEvent& WXUNUSED(event))
{
printf("±£´æ\n");
}
void TFrame::OnCreateMap(wxCommandEvent& event)
{
int Buffer[2]={1,(int)RenderSection->GetHWND()};
SendEditorCommand((void*)Buffer,sizeof(int)*2);
}
void TFrame::OnStopRender(wxCommandEvent& event)
{
int Buffer=2;
SendEditorCommand(&Buffer,sizeof(int));
}
void TFrame::OnQuit(wxCommandEvent& WXUNUSED(event))
{
Close(true);
}
void TFrame::OnAbout(wxCommandEvent& event)
{
(void)wxMessageBox(wxT("3DÓÎÏ·±à¼Æ÷"), wxT("¹ØÓÚ"));
}
void TFrame::OnToolLeftClick(wxCommandEvent& event)
{
}
void TFrame::OnToolRightClick(wxCommandEvent& event)
{
}
void TFrame::OnCombo(wxCommandEvent& event)
{
wxLogStatus(wxT("äÖȾÒýÇæ:%s"), event.GetString().c_str());
}
void TFrame::DoEnablePrint()
{
}
void TFrame::DoDeletePrint()
{
}
void TFrame::DoToggleHelp()
{
}
void TFrame::OnToggleSearch(wxCommandEvent& WXUNUSED(event))
{
printf("OnToggleSearch\n");
}
void TFrame::OnUpdateCopyAndCut(wxUpdateUIEvent& event)
{
}
void TFrame::OnUpdateToggleHorzText(wxUpdateUIEvent& event)
{
printf("OnUpdateToggleHorzText\n");
}
void TFrame::OnChangeToolTip(wxCommandEvent& WXUNUSED(event))
{
GetToolBar()->SetToolShortHelp(wxID_NEW, wxT("New toolbar button"));
}
void TFrame::OnToolbarStyle(wxCommandEvent& event)
{
printf("OnToolbarStyle\n");
}
void TFrame::OnBackgroundColour(wxCommandEvent& WXUNUSED(event))
{
wxColour col = wxGetColourFromUser(this,GetToolBar()->GetBackgroundColour(),"Ñ¡Ôñ±³¾°ÑÕÉ«");
}
void TFrame::OnFileOpen(wxCommandEvent& WXUNUSED(event))
{
m_pathBmp = wxLoadFileSelector("Editor File", "");
}
void TFrame::OnInsertPrint(wxCommandEvent& WXUNUSED(event))
{
}
void TFrame::OnToggleRadioBtn(wxCommandEvent& event)
{
printf("OnToggleRadioBtn\n");
}
void TFrame::OnToolDropdown(wxCommandEvent& event)
{
event.Skip();
}
//±à¼Æ÷²Ëµ¥»Øµ÷
void TFrame::OnUndo(wxCommandEvent& event)
{
printf("OnUndo\n");
}
void TFrame::OnCopy(wxCommandEvent& event)
{
printf("OnCopy\n");
}
void TFrame::OnPaste(wxCommandEvent& event)
{
printf("OnPaste\n");
}
void TFrame::OnCut(wxCommandEvent& event)
{
printf("OnCut\n");
}
void TFrame::OnDelete(wxCommandEvent& event)
{
printf("OnDelete\n");
}
void TFrame::OnClose(wxCloseEvent& event)
{
printf("Send Quit Message!!!\n");
int Buffer=5;
SendEditorCommand(&Buffer,sizeof(int));
wxFrame::OnCloseWindow(event);
}
| {
"content_hash": "842d3749de7343c37766ce3977593c7e",
"timestamp": "",
"source": "github",
"line_count": 442,
"max_line_length": 184,
"avg_line_length": 28.38914027149321,
"alnum_prop": 0.6585910105196047,
"repo_name": "NaughtyCode/Editor",
"id": "9eeb9196e474dc310ee4193d0f8d9235fd3dabe7",
"size": "12591",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "netcode/TFrame.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5754"
},
{
"name": "C++",
"bytes": "68290"
}
],
"symlink_target": ""
} |
export {find} from "./find"
export {bind} from "./bind"
export {defer} from "./defer"
export {filter} from "./filter"
export {expand} from "./expand"
export {flatten} from "./flatten"
export * from "./logging"
| {
"content_hash": "bf0b37819441e6bcc905556e52f51c22",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 33,
"avg_line_length": 30,
"alnum_prop": 0.6714285714285714,
"repo_name": "bucaran/fly-util",
"id": "7e8c0a13bd60a1cab7191dda21ec7a11b3d086d5",
"size": "210",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/index.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "9306"
}
],
"symlink_target": ""
} |
#pragma once
#include <pcl/point_cloud.h>
#include <pcl/point_types.h>
#include <v4r/common/normal_estimator.h>
#include <v4r/core/macros.h>
namespace v4r
{
/**
* @author Thomas Faeulhammer ([email protected])
* @date July, 2015
* @brief collection of methods for normal computation of point clouds
*/
DEPRECATED(
template<typename PointT>
V4R_EXPORTS
void computeNormals(const typename pcl::PointCloud<PointT>::ConstPtr &cloud,
pcl::PointCloud<pcl::Normal>::Ptr &normals,
int method=2, float radius=0.02f) );
template<typename PointT>
typename NormalEstimator<PointT>::Ptr
initNormalEstimator(int method, std::vector<std::string> ¶ms);
}
| {
"content_hash": "95d24c8980e129639e7444efb1501656",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 76,
"avg_line_length": 23.193548387096776,
"alnum_prop": 0.6981919332406119,
"repo_name": "ThoMut/v4r",
"id": "9c3ce83eb254a0e9709c763411fb719c744a5d9a",
"size": "1998",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/common/include/v4r/common/normals.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "6915"
},
{
"name": "C++",
"bytes": "4416785"
},
{
"name": "CMake",
"bytes": "187357"
},
{
"name": "Objective-C",
"bytes": "255"
},
{
"name": "Shell",
"bytes": "5165"
}
],
"symlink_target": ""
} |
ALTER TABLE PROTOCOL_FUNDING_SOURCE DROP CONSTRAINT FK_PROTOCOL_FUNDING_SOURCE2;
ALTER TABLE PROTOCOL_FUNDING_SOURCE MODIFY (FUNDING_SOURCE_TYPE_CODE VARCHAR2(3));
-- These must be run together to avoid errors --
-- ALTER TABLE FUNDING_SOURCE_TYPE MODIFY (FUNDING_SOURCE_TYPE_CODE VARCHAR2(3)); --
ALTER TABLE FUNDING_SOURCE_TYPE
ADD (FUNDING_SOURCE_TYPE_CODE_TEMP VARCHAR2(3));
UPDATE FUNDING_SOURCE_TYPE
SET FUNDING_SOURCE_TYPE_CODE_TEMP = FUNDING_SOURCE_TYPE_CODE;
ALTER TABLE FUNDING_SOURCE_TYPE
DROP COLUMN FUNDING_SOURCE_TYPE_CODE;
ALTER TABLE FUNDING_SOURCE_TYPE
ADD (FUNDING_SOURCE_TYPE_CODE VARCHAR2(3));
UPDATE FUNDING_SOURCE_TYPE
SET FUNDING_SOURCE_TYPE_CODE = FUNDING_SOURCE_TYPE_CODE_TEMP;
ALTER TABLE FUNDING_SOURCE_TYPE
DROP COLUMN FUNDING_SOURCE_TYPE_CODE_TEMP;
ALTER TABLE FUNDING_SOURCE_TYPE
ADD PRIMARY KEY (FUNDING_SOURCE_TYPE_CODE);
ALTER TABLE PROTOCOL_FUNDING_SOURCE
ADD CONSTRAINT FK_PROTOCOL_FUNDING_SOURCE2 FOREIGN KEY (FUNDING_SOURCE_TYPE_CODE)
REFERENCES FUNDING_SOURCE_TYPE (FUNDING_SOURCE_TYPE_CODE); | {
"content_hash": "2314f90d4ac32bfd8780a7cb991cbbac",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 84,
"avg_line_length": 45.91304347826087,
"alnum_prop": 0.7859848484848485,
"repo_name": "blackcathacker/kc.preclean",
"id": "92ccafac571d2a6127a3ef29a75b310c5f49a067",
"size": "1056",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "coeus-db/coeus-db-sql/src/main/resources/org/kuali/coeus/coeus-sql/RELEASE-SCRIPTS/KC-RELEASE-3_1_SP2-SCRIPT/ORACLE/TABLES/KC_TBL_FUNDING_SOURCE_TYPE.sql",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "96034"
},
{
"name": "Java",
"bytes": "27623677"
},
{
"name": "JavaScript",
"bytes": "749782"
},
{
"name": "Perl",
"bytes": "1278"
},
{
"name": "Scheme",
"bytes": "8283377"
},
{
"name": "Shell",
"bytes": "69314"
},
{
"name": "XSLT",
"bytes": "20298494"
}
],
"symlink_target": ""
} |
#ifndef __G_MARKUP_H__
#define __G_MARKUP_H__
#include <stdarg.h>
#include <glib/gerror.h>
G_BEGIN_DECLS
typedef enum
{
G_MARKUP_ERROR_BAD_UTF8,
G_MARKUP_ERROR_EMPTY,
G_MARKUP_ERROR_PARSE,
/* These three are primarily intended for specific GMarkupParser
* implementations to set.
*/
G_MARKUP_ERROR_UNKNOWN_ELEMENT,
G_MARKUP_ERROR_UNKNOWN_ATTRIBUTE,
G_MARKUP_ERROR_INVALID_CONTENT
} GMarkupError;
#define G_MARKUP_ERROR g_markup_error_quark ()
GQuark g_markup_error_quark (void);
typedef enum
{
G_MARKUP_DO_NOT_USE_THIS_UNSUPPORTED_FLAG = 1 << 0,
G_MARKUP_TREAT_CDATA_AS_TEXT = 1 << 1
} GMarkupParseFlags;
typedef struct _GMarkupParseContext GMarkupParseContext;
typedef struct _GMarkupParser GMarkupParser;
struct _GMarkupParser
{
/* Called for open tags <foo bar="baz"> */
void (*start_element) (GMarkupParseContext *context,
const gchar *element_name,
const gchar **attribute_names,
const gchar **attribute_values,
gpointer user_data,
GError **error);
/* Called for close tags </foo> */
void (*end_element) (GMarkupParseContext *context,
const gchar *element_name,
gpointer user_data,
GError **error);
/* Called for character data */
/* text is not nul-terminated */
void (*text) (GMarkupParseContext *context,
const gchar *text,
gsize text_len,
gpointer user_data,
GError **error);
/* Called for strings that should be re-saved verbatim in this same
* position, but are not otherwise interpretable. At the moment
* this includes comments and processing instructions.
*/
/* text is not nul-terminated. */
void (*passthrough) (GMarkupParseContext *context,
const gchar *passthrough_text,
gsize text_len,
gpointer user_data,
GError **error);
/* Called on error, including one set by other
* methods in the vtable. The GError should not be freed.
*/
void (*error) (GMarkupParseContext *context,
GError *error,
gpointer user_data);
};
GMarkupParseContext *g_markup_parse_context_new (const GMarkupParser *parser,
GMarkupParseFlags flags,
gpointer user_data,
GDestroyNotify user_data_dnotify);
void g_markup_parse_context_free (GMarkupParseContext *context);
gboolean g_markup_parse_context_parse (GMarkupParseContext *context,
const gchar *text,
gssize text_len,
GError **error);
gboolean g_markup_parse_context_end_parse (GMarkupParseContext *context,
GError **error);
G_CONST_RETURN gchar *g_markup_parse_context_get_element (GMarkupParseContext *context);
/* For user-constructed error messages, has no precise semantics */
void g_markup_parse_context_get_position (GMarkupParseContext *context,
gint *line_number,
gint *char_number);
/* useful when saving */
gchar* g_markup_escape_text (const gchar *text,
gssize length);
gchar *g_markup_printf_escaped (const char *format,
...) G_GNUC_PRINTF (1, 2);
gchar *g_markup_vprintf_escaped (const char *format,
va_list args);
G_END_DECLS
#endif /* __G_MARKUP_H__ */
| {
"content_hash": "3f6c9e436563bacb15e99b660424d7a8",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 92,
"avg_line_length": 38.794642857142854,
"alnum_prop": 0.4915995397008055,
"repo_name": "relokin/parsec",
"id": "b272d5ca7653eb43395d7d1c92cfd3d829fa6e11",
"size": "5182",
"binary": false,
"copies": "13",
"ref": "refs/heads/master",
"path": "pkgs/libs/glib/src/glib/gmarkup.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Ada",
"bytes": "89080"
},
{
"name": "Assembly",
"bytes": "2965111"
},
{
"name": "Awk",
"bytes": "142"
},
{
"name": "C",
"bytes": "78031615"
},
{
"name": "C#",
"bytes": "54113"
},
{
"name": "C++",
"bytes": "14801114"
},
{
"name": "CLIPS",
"bytes": "6933"
},
{
"name": "CSS",
"bytes": "20961"
},
{
"name": "Emacs Lisp",
"bytes": "9437"
},
{
"name": "FORTRAN",
"bytes": "6058"
},
{
"name": "Java",
"bytes": "291"
},
{
"name": "JavaScript",
"bytes": "37584"
},
{
"name": "Logos",
"bytes": "108920"
},
{
"name": "Lua",
"bytes": "9"
},
{
"name": "Objective-C",
"bytes": "362901"
},
{
"name": "PHP",
"bytes": "20640"
},
{
"name": "Pascal",
"bytes": "40318"
},
{
"name": "Perl",
"bytes": "2133525"
},
{
"name": "Pike",
"bytes": "1350"
},
{
"name": "Prolog",
"bytes": "3350"
},
{
"name": "Python",
"bytes": "871836"
},
{
"name": "Rebol",
"bytes": "106436"
},
{
"name": "Ruby",
"bytes": "1237"
},
{
"name": "Scheme",
"bytes": "4249"
},
{
"name": "Shell",
"bytes": "3229646"
},
{
"name": "Tcl",
"bytes": "2809"
},
{
"name": "VimL",
"bytes": "7550"
},
{
"name": "XSLT",
"bytes": "167372"
},
{
"name": "eC",
"bytes": "4568"
}
],
"symlink_target": ""
} |
using namespace cocos2d;
HelloSimple::~HelloSimple()
{
if (_targets)
{
_targets->release();
_targets = NULL;
}
if (_projectiles)
{
_projectiles->release();
_projectiles = NULL;
}
// cpp don't need to call super dealloc
// virtual destructor will do this
}
HelloSimple::HelloSimple()
:_targets(NULL)
,_projectiles(NULL)
,_projectilesDestroyed(0)
{
}
CCScene* HelloSimple::scene()
{
CCScene * scene = NULL;
do
{
// 'scene' is an autorelease object
scene = CCScene::create();
CC_BREAK_IF(! scene);
// 'layer' is an autorelease object
HelloSimple *layer = HelloSimple::create();
CC_BREAK_IF(! layer);
// add layer as a child to scene
scene->addChild(layer);
} while (0);
// return the scene
return scene;
}
// on "init" you need to initialize your instance
bool HelloSimple::init()
{
bool bRet = false;
do
{
//////////////////////////////////////////////////////////////////////////
// super init first
//////////////////////////////////////////////////////////////////////////
CC_BREAK_IF(! CCLayerColor::initWithColor( ccc4(255,255,255,255) ) );
//////////////////////////////////////////////////////////////////////////
// add your codes below...
//////////////////////////////////////////////////////////////////////////
// 1. Add a menu item with "X" image, which is clicked to quit the program.
// Create a "close" menu item with close icon, it's an auto release object.
CCMenuItemImage *pCloseItem = CCMenuItemImage::create(
"CloseNormal.png",
"CloseSelected.png",
this,
menu_selector(HelloSimple::menuCloseCallback));
CC_BREAK_IF(! pCloseItem);
// Place the menu item bottom-right conner.
CCSize visibleSize = CCDirector::sharedDirector()->getVisibleSize();
CCPoint origin = CCDirector::sharedDirector()->getVisibleOrigin();
pCloseItem->setPosition(ccp(origin.x + visibleSize.width - pCloseItem->getContentSize().width/2,
origin.y + pCloseItem->getContentSize().height/2));
// Create a menu with the "close" menu item, it's an auto release object.
CCMenu* pMenu = CCMenu::create(pCloseItem, NULL);
pMenu->setPosition(CCPointZero);
CC_BREAK_IF(! pMenu);
// Add the menu to HelloSimple layer as a child layer.
this->addChild(pMenu, 1);
/////////////////////////////
// 2. add your codes below...
CCSprite *player = CCSprite::create("Player.png", CCRectMake(0, 0, 27, 40) );
player->setPosition( ccp(origin.x + player->getContentSize().width/2,
origin.y + visibleSize.height/2) );
this->addChild(player);
this->schedule( schedule_selector(HelloSimple::gameLogic), 1.0 );
this->setTouchEnabled(true);
_targets = new CCArray;
_projectiles = new CCArray;
// use updateGame instead of update, otherwise it will conflit with SelectorProtocol::update
// see http://www.cocos2d-x.org/boards/6/topics/1478
this->schedule( schedule_selector(HelloSimple::updateGame) );
// CocosDenshion::SimpleAudioEngine::sharedEngine()->playBackgroundMusic("background-music-aac.wav", true);
bRet = true;
} while (0);
return bRet;
}
void HelloSimple::menuCloseCallback(CCObject* pSender)
{
// "close" menu item clicked
#if (CC_TARGET_PLATFORM == CC_PLATFORM_WINRT) || (CC_TARGET_PLATFORM == CC_PLATFORM_WP8)
CCMessageBox("You pressed the close button. Windows Store Apps do not implement a close button.", "Alert");
#else
CCDirector::sharedDirector()->end();
#endif
}
// cpp with cocos2d-x
void HelloSimple::addTarget()
{
CCSprite *target = CCSprite::create("Target.png", CCRectMake(0,0,27,40) );
// Determine where to spawn the target along the Y axis
CCSize winSize = CCDirector::sharedDirector()->getVisibleSize();
float minY = target->getContentSize().height/2;
float maxY = winSize.height - target->getContentSize().height/2;
int rangeY = (int)(maxY - minY);
// srand( TimGetTicks() );
int actualY = ( rand() % rangeY ) + (int)minY;
// Create the target slightly off-screen along the right edge,
// and along a random position along the Y axis as calculated
target->setPosition(
ccp(winSize.width + (target->getContentSize().width/2),
CCDirector::sharedDirector()->getVisibleOrigin().y + actualY) );
this->addChild(target);
// Determine speed of the target
int minDuration = (int)2.0;
int maxDuration = (int)4.0;
int rangeDuration = maxDuration - minDuration;
// srand( TimGetTicks() );
int actualDuration = ( rand() % rangeDuration ) + minDuration;
// Create the actions
CCFiniteTimeAction* actionMove = CCMoveTo::create( (float)actualDuration,
ccp(0 - target->getContentSize().width/2, actualY) );
CCFiniteTimeAction* actionMoveDone = CCCallFuncN::create( this,
callfuncN_selector(HelloSimple::spriteMoveFinished));
target->runAction( CCSequence::create(actionMove, actionMoveDone, NULL) );
// Add to targets array
target->setTag(1);
_targets->addObject(target);
}
void HelloSimple::spriteMoveFinished(CCNode* sender)
{
CCSprite *sprite = (CCSprite *)sender;
this->removeChild(sprite, true);
if (sprite->getTag() == 1) // target
{
_targets->removeObject(sprite);
GameOverScene *gameOverScene = GameOverScene::create();
gameOverScene->getLayer()->getLabel()->setString("You Lose :[");
CCDirector::sharedDirector()->replaceScene(gameOverScene);
}
else if (sprite->getTag() == 2) // projectile
{
_projectiles->removeObject(sprite);
}
}
void HelloSimple::gameLogic(float dt)
{
this->addTarget();
}
// cpp with cocos2d-x
void HelloSimple::ccTouchesEnded(CCSet* touches, CCEvent* event)
{
// Choose one of the touches to work with
CCTouch* touch = (CCTouch*)( touches->anyObject() );
CCPoint location = touch->getLocation();
CCLog("++++++++after x:%f, y:%f", location.x, location.y);
// Set up initial location of projectile
CCSize winSize = CCDirector::sharedDirector()->getVisibleSize();
CCPoint origin = CCDirector::sharedDirector()->getVisibleOrigin();
CCSprite *projectile = CCSprite::create("Projectile.png", CCRectMake(0, 0, 20, 20));
projectile->setPosition( ccp(origin.x+20, origin.y+winSize.height/2) );
// Determinie offset of location to projectile
float offX = location.x - projectile->getPosition().x;
float offY = location.y - projectile->getPosition().y;
// Bail out if we are shooting down or backwards
if (offX <= 0) return;
// Ok to add now - we've double checked position
this->addChild(projectile);
// Determine where we wish to shoot the projectile to
float realX = origin.x+winSize.width + (projectile->getContentSize().width/2);
float ratio = offY / offX;
float realY = (realX * ratio) + projectile->getPosition().y;
CCPoint realDest = ccp(realX, realY);
// Determine the length of how far we're shooting
float offRealX = realX - projectile->getPosition().x;
float offRealY = realY - projectile->getPosition().y;
float length = sqrtf((offRealX * offRealX) + (offRealY*offRealY));
float velocity = 480/1; // 480pixels/1sec
float realMoveDuration = length/velocity;
// Move projectile to actual endpoint
projectile->runAction( CCSequence::create(
CCMoveTo::create(realMoveDuration, realDest),
CCCallFuncN::create(this,
callfuncN_selector(HelloSimple::spriteMoveFinished)),
NULL) );
// Add to projectiles array
projectile->setTag(2);
_projectiles->addObject(projectile);
// CocosDenshion::SimpleAudioEngine::sharedEngine()->playEffect("pew-pew-lei.wav");
}
void HelloSimple::updateGame(float dt)
{
CCArray *projectilesToDelete = new CCArray;
CCObject* it = NULL;
CCObject* jt = NULL;
// for (it = _projectiles->begin(); it != _projectiles->end(); it++)
CCARRAY_FOREACH(_projectiles, it)
{
CCSprite *projectile = dynamic_cast<CCSprite*>(it);
CCRect projectileRect = CCRectMake(
projectile->getPosition().x - (projectile->getContentSize().width/2),
projectile->getPosition().y - (projectile->getContentSize().height/2),
projectile->getContentSize().width,
projectile->getContentSize().height);
CCArray* targetsToDelete =new CCArray;
// for (jt = _targets->begin(); jt != _targets->end(); jt++)
CCARRAY_FOREACH(_targets, jt)
{
CCSprite *target = dynamic_cast<CCSprite*>(jt);
CCRect targetRect = CCRectMake(
target->getPosition().x - (target->getContentSize().width/2),
target->getPosition().y - (target->getContentSize().height/2),
target->getContentSize().width,
target->getContentSize().height);
// if (CCRect::CCRectIntersectsRect(projectileRect, targetRect))
if (projectileRect.intersectsRect(targetRect))
{
targetsToDelete->addObject(target);
}
}
// for (jt = targetsToDelete->begin(); jt != targetsToDelete->end(); jt++)
CCARRAY_FOREACH(targetsToDelete, jt)
{
CCSprite *target = dynamic_cast<CCSprite*>(jt);
_targets->removeObject(target);
this->removeChild(target, true);
_projectilesDestroyed++;
if (_projectilesDestroyed >= 5)
{
GameOverScene *gameOverScene = GameOverScene::create();
gameOverScene->getLayer()->getLabel()->setString("You Win!");
CCDirector::sharedDirector()->replaceScene(gameOverScene);
}
}
if (targetsToDelete->count() > 0)
{
projectilesToDelete->addObject(projectile);
}
targetsToDelete->release();
}
// for (it = projectilesToDelete->begin(); it != projectilesToDelete->end(); it++)
CCARRAY_FOREACH(projectilesToDelete, it)
{
CCSprite* projectile = dynamic_cast<CCSprite*>(it);
_projectiles->removeObject(projectile);
this->removeChild(projectile, true);
}
projectilesToDelete->release();
}
void HelloSimple::registerWithTouchDispatcher()
{
// CCTouchDispatcher::sharedDispatcher()->addTargetedDelegate(this,0,true);
CCDirector::sharedDirector()->getTouchDispatcher()->addStandardDelegate(this,0);
}
| {
"content_hash": "199578134f2a8069969a0861022ff1b8",
"timestamp": "",
"source": "github",
"line_count": 316,
"max_line_length": 111,
"avg_line_length": 32.46518987341772,
"alnum_prop": 0.6411930987425675,
"repo_name": "linyehui/cocos2dx-game-as-library",
"id": "54f3dd25be901ea5b48008e30aebe04cbf35ebfd",
"size": "10352",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "SimpleGame/Classes/HelloSimpleScene.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3062"
},
{
"name": "C#",
"bytes": "1853"
},
{
"name": "C++",
"bytes": "78212"
},
{
"name": "Java",
"bytes": "3822"
},
{
"name": "Objective-C",
"bytes": "21426"
},
{
"name": "Objective-C++",
"bytes": "35435"
},
{
"name": "Shell",
"bytes": "6534"
}
],
"symlink_target": ""
} |
void main()
{
VDPSECURITY(); // Unlock VDP if necessary
for(;;) {
BDP_WRITE("Hello C world\n", 14); // Send data to BDB
ENTER_MONITOR(); // Go to monitor mode to wait for debug commands
}
}
| {
"content_hash": "82cedbc56b5907dc86387f46c11d8bd9",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 69,
"avg_line_length": 25.5,
"alnum_prop": 0.6176470588235294,
"repo_name": "retro16/blastsdk",
"id": "73fbf1fbcfa36eb7410b0a0b83b3a46793cf7505",
"size": "222",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "samples/bdphello_c/main.c",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "164887"
},
{
"name": "C",
"bytes": "28904"
},
{
"name": "C++",
"bytes": "7625"
},
{
"name": "GCC Machine Description",
"bytes": "25"
},
{
"name": "Shell",
"bytes": "5270"
}
],
"symlink_target": ""
} |
using System;
using static ValveResourceFormat.CompiledShader.ShaderUtilHelpers;
namespace ValveResourceFormat.CompiledShader
{
public class ConfigMappingDParams
{
public ConfigMappingDParams(ShaderFile shaderfile)
{
GenerateOffsetAndStateLookups(shaderfile);
}
int[] offsets;
int[] nr_states;
private void GenerateOffsetAndStateLookups(ShaderFile shaderFile)
{
if (shaderFile.DBlocks.Count == 0)
{
offsets = Array.Empty<int>();
nr_states = Array.Empty<int>();
return;
}
offsets = new int[shaderFile.DBlocks.Count];
nr_states = new int[shaderFile.DBlocks.Count];
offsets[0] = 1;
nr_states[0] = shaderFile.DBlocks[0].Arg2 + 1;
for (var i = 1; i < shaderFile.DBlocks.Count; i++)
{
nr_states[i] = shaderFile.DBlocks[i].Arg2 + 1;
offsets[i] = offsets[i - 1] * nr_states[i - 1];
}
}
public int[] GetConfigState(long zframeId)
{
var state = new int[nr_states.Length];
for (var i = 0; i < nr_states.Length; i++)
{
state[i] = (int)(zframeId / offsets[i]) % nr_states[i];
}
return state;
}
public void ShowOffsetAndLayersArrays(bool hex = true)
{
ShowIntArray(offsets, 8, "offsets", hex: hex);
ShowIntArray(nr_states, 8, "layers");
}
}
}
| {
"content_hash": "8bb308fc4b374556683b53dcf7573a9d",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 73,
"avg_line_length": 28.618181818181817,
"alnum_prop": 0.5235069885641678,
"repo_name": "SteamDatabase/ValveResourceFormat",
"id": "fb62a639dc940dd3cfa2eb69f6ffe42f94da2979",
"size": "1574",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ValveResourceFormat/CompiledShader/ConfigMappingDParams.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "1534272"
},
{
"name": "GLSL",
"bytes": "32230"
}
],
"symlink_target": ""
} |
This is a small project that I would love to see benefit others. However, it has been done in my limited spare time and I will do my best to help any others with issues but it is not a supported project.
## Our Pledge
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team on Twitter @kevmcdonk. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/
| {
"content_hash": "ef33e3dc64ec18476a5b1fa3fcade923",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 449,
"avg_line_length": 73.47826086956522,
"alnum_prop": 0.8177514792899409,
"repo_name": "kevmcdonk/Mcd79FindNewsImage",
"id": "3e5f6771c96b0c682be4029bcff2c9878e02b615",
"size": "3420",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CODE_OF_CONDUCT.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "4209"
},
{
"name": "CSS",
"bytes": "3463"
},
{
"name": "JavaScript",
"bytes": "527"
},
{
"name": "TypeScript",
"bytes": "21967"
}
],
"symlink_target": ""
} |
'use strict';
jest.dontMock('../../scripts/components/MenuButton.js');
let React = require('react/addons'),
MenuButton = require('../../scripts/components/MenuButton.js');
let TestUtils = React.addons.TestUtils;
describe("MenuButton", () => {
it("renders a menubutton", () => {
let handlerFunc = () => {};
let button = TestUtils.renderIntoDocument(
<MenuButton name="hello" handler={handlerFunc} />
);
expect(TestUtils.isCompositeComponent(button)).toEqual(true);
expect(button.getDOMNode().textContent).toEqual(" hello");
});
it("renders a menubutton with an icon, if specified", () => {
let handlerFunc = () => {};
let button = TestUtils.renderIntoDocument(
<MenuButton icon='styles-hello' name="hello" handler={handlerFunc} />
);
let icon = TestUtils.findRenderedDOMComponentWithTag(button, 'i');
expect(icon.getDOMNode().className).toContain('styles-hello');
expect(button.getDOMNode().textContent).toEqual(" hello");
});
it("executes the handler when clicked", () => {
let handlerFunc = jest.genMockFunction();
let button = TestUtils.renderIntoDocument(
<MenuButton name="hello" handler={handlerFunc} />
);
TestUtils.Simulate.click(button.getDOMNode());
expect(handlerFunc.mock.calls.length).toBe(1);
});
});
| {
"content_hash": "5b6cb1744c10cc4ddaf32525a466c29c",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 81,
"avg_line_length": 37.1578947368421,
"alnum_prop": 0.6225212464589235,
"repo_name": "zupzup/reactgym",
"id": "d8fdc2471d91655dabd4bbc342e8da4901e1b971",
"size": "1412",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "__tests__/components/MenuButton.spec.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "12497"
},
{
"name": "HTML",
"bytes": "555"
},
{
"name": "JavaScript",
"bytes": "182632"
}
],
"symlink_target": ""
} |
package example.dropwizard.armeria.services.http;
import com.linecorp.armeria.common.HttpResponse;
import com.linecorp.armeria.common.HttpStatus;
import com.linecorp.armeria.common.MediaType;
import com.linecorp.armeria.server.annotation.Get;
import com.linecorp.armeria.server.annotation.ProducesJson;
import com.linecorp.armeria.server.annotation.ProducesText;
public class HelloService {
@Get("/hello")
@ProducesText
public String helloText() {
// Return a text document to the client.
return "Armeria";
}
@Get("/hello")
@ProducesJson
public HttpResponse helloJson() {
// Return a JSON object to the client.
return HttpResponse.of(HttpStatus.OK, MediaType.JSON_UTF_8, "{ \"name\": \"Armeria\" }");
}
}
| {
"content_hash": "45c049bd5edb2eec1c2b42687cba8c96",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 97,
"avg_line_length": 30.92,
"alnum_prop": 0.7153945666235446,
"repo_name": "kojilin/armeria",
"id": "a68fca345a0c7ee3baf5dc8a8f20384828f905fe",
"size": "773",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "examples/dropwizard/src/main/java/example/dropwizard/armeria/services/http/HelloService.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "7197"
},
{
"name": "HTML",
"bytes": "1222"
},
{
"name": "Java",
"bytes": "17111554"
},
{
"name": "JavaScript",
"bytes": "26583"
},
{
"name": "Kotlin",
"bytes": "95340"
},
{
"name": "Less",
"bytes": "35092"
},
{
"name": "Scala",
"bytes": "230968"
},
{
"name": "Shell",
"bytes": "2062"
},
{
"name": "Thrift",
"bytes": "252456"
},
{
"name": "TypeScript",
"bytes": "255034"
}
],
"symlink_target": ""
} |
\newcommand{\Date}{February 14, 2021}
\newcommand{\Title}{Teaching: What Is the Bible?}
\input{../../includes/01-preamble}
\noindent As Seth mentioned, we're going to be spending a few weeks delving into
the idea of being formed by scripture. This series will go for several weeks and
we'll be hearing from Will and Seth as well in later weeks. This week, I've been
asked to talk about why God has given us the Bible. I'll start by looking at
what the Bible is.
\section{The Bible Is a Story}
I'll just spit it out: the Bible is a story. And that's probably something
you've heard several times before and, if you're anything like me, it didn't
mean a whole lot when you heard it. The team at the Bible Project love to talk
about how the Bible is one unified story that points to Jesus, so if you've
watched any of their videos, you've probably heard this quite a few times
before. But what does it mean to say that the Bible is a story?
\subsection{It's One Unified Story and It's Not About Us}
When we say that the Bible is a story, this means that it's a unified whole. You
can't just grab one bit of it and read it and expect it to make sense. Certainly
there are parts of it that make sense by themselves, but even then it's a
limited sort of sense.
It also means that it has the features of a story: it has a plot with a
beginning, a middle, and an end. Genesis tells us about the beginning,
Revelation tells us about the end, and the rest, the vast majority of the Bible,
tells us about the middle, which is the part of the story that we're living in.
This plot is actually retold again and again in miniature throughout the Bible:
God creates, humans rebel, there are consequences, God acts to restore, humans
rebel again, there are consequences again, and this continues so many times.
In addition to being a unified whole and having a beginning, a middle, and an
end, the Bible also has characters. \textit{But}, here's something we all need
to remember: humans are not the main characters of the Bible, people like
Abraham, Moses, David, and Paul are not the \textit{protagonists}, the ones that
drive the plot forward, people are usually the \textit{antagonists}, the ones
who get in the way of the plot moving forward, the ones who create the problems
that the main character has to find ways to fix. \textit{God} is the main
character. He's the one who exists at the beginning, is actively driving the
plot forward through the middle, and is ultimately victorious at the end.
We see throughout the Bible characters cast as new versions of previous
characters. Ezra might be seen as a new Moses, Nehemiah as a new Joshua, John
the Baptist as a new Elijah, Jesus as a new Adam, a new Moses, a new David, and
so on. One of the classic ways the Bible's story is told is the story of God's
rescue of his people from Egypt and Jesus's work is understood through this
story, he brings about the true and final exodus and he institutes a new
Passover with the once-and-for-all sacrifice for sin. In this way, we can see
that God is the main character, this time through Jesus -- Moses simply points
to Jesus, as do David, and Elijah, the whole Bible points to Jesus.
Another way we can know that the Bible is a story is in how many times the story
is retold within it. A great example is found in Nehemiah 9, which we looked at
a few weeks ago(Nehemiah 9:6--37). I won't read the whole thing, but it's like a
recap at the beginning of a new series of a TV show, or when they start a
two-parter with the classic, \enquote{Last time on\dots}. Nehemiah 9 starts with
creation and tells of God's actions through Abraham to the Exodus all the way to
the Babylonian exile from which they had just returned and it ends with a
description of God as a \enquote{mighty and awesome God, keeping covenant and
steadfast love} (Nehemiah 9:32). Nehemiah sees God as the main character in the
story, acting through all these times in all these different ways, and the
conclusion he draws is about God, not the Israelites.
The Bible is God telling us part of his story so that we can know him and what
he's like.
\subsection{It's One Unified Story and It Speaks to Those Who Listen}
There are a few verses that always seem to come up whenever Christians talk
about the Bible. I will not break from this tradition. Will someone read 2
Timothy 3:16--17 for us?
\begin{quote}
All scripture is inspired by God and is useful for teaching, for reproof,
for correction, and for training in righteousness, so that everyone who
belongs to God may be proficient, equipped for every good work.
\end{quote}
This passage tells us a lot about the Bible, right? It tells us how it can help
us in our everyday walk with Jesus.
I read something this week that made me question an assumption I had in
interpreting this verse. It comes down to how I interpreted Paul's phrase
\enquote{all scripture} -- I thought that meant that each individual verse in
Scripture was useful \textit{by itself} for all these things. But when you
examine that idea, it becomes quite ridiculous.
To start with, chapters and verses are divisions that aren't actually part of
the inspired text (they were added later), so if we're going to take that idea
to the extreme, we'd have to say something like each individual \textit{word}
was useful in this way. Words don't often have a lot of meaning by themselves,
so that can't be right.
So, do we instead take the approach that every \textit{book} is useful
\textit{by itself} for all these things that Paul mentioned? The books at least
are units of text that were inspired by God, right? That feels like it's getting
closer to something that might make sense. But we also know that God didn't give
us just one book of the Bible, he gave us the whole thing, in all its beautiful
diversity. Given this, reading a single book by itself without considering the
wider sweep of scripture doesn't seem right either.
Now, I think it's safe to say, that if all you had access to was, say, the
Gospel of Mark, you would find what you needed for salvation in there -- and it
would be very profitable for you to read that and internalise it -- but that's
not what this verse says, this verse isn't talking about something that
\textit{leads to} salvation, I think it's talking about how the Bible helps us
\textit{after}, once we have given our believing loyalty to king Jesus. In verse
15, just before this, Paul says that Timothy already knew the `sacred writings
that [were] able to instruct [him] for salvation' and yet Paul continued to talk
about what else was offered by \textit{all} scripture. Obviously, for Paul, the
Bible doesn't stop being useful for you once you're saved; on the contrary, it
becomes useful in a whole new way because it helps you live out the new life you
then have.
For example, how do we read Leviticus today as followers of Jesus? If I only
read Leviticus without understanding how Jesus read and interpreted it for his
followers, I might get stuck going down quite an unhelpful path. If I only read
Leviticus by itself without having also read Genesis and Exodus, I wouldn't see
the problem it's solving. And if I didn't continue on to read Numbers, I
wouldn't see that the solution provided by Leviticus actually worked. For the
modern reader, without this additional context, we're likely to come away from
Leviticus without seeing how well it tells us of God's rugged commitment to his
people despite all the difficulties that come from a holy God living with an
unholy people. If we read Leviticus without the rest of the Bible, we'd miss
God's revelation of his character that he wanted us to see. And if we ignore
Jesus's reapplication of parts of this text, we'll completely miss the fresh way
that God wants us to live it out. The same goes if we only read Jesus's words
and ignore the earlier parts of God's story; if we do that, we'll miss some of
the power and unexpectedness of Jesus's message.
Let me give you an example from a completely different context. Last year, I had
the pleasure of watching a TV show with Seth, called \textit{The Mandalorian}.
Now, I like \textit{Star Wars} as much as the next guy unless that next guy is
Seth. Seth understood things going on in that show that I never would have
picked up on because he's watched so many of the movies and other TV shows that
have come out in the \textit{Star Wars} universe multiple times and I've only
seen the movies once each. Every time Seth got excited about something, I paused
the show so he could tell me some awesome little Easter egg I'd missed or some
exciting thing that was likely to happen that I wouldn't have picked up on. It
made the experience so much more enjoyable for me. Watching \textit{The
Mandalorian} with Seth is a bit like reading the OT with Jesus and the other
authors of the NT, you get all this extra detail that you wouldn't have
otherwise known and you get so much more out of the experience. If I'd had
access to George Lucas, the writers of \textit{The Mandalorian} as well as a
bunch of people like Seth, I'd be starting to get close to what we have as
Christians when it comes to the Bible.
When we read the Bible, we not only have OT, the NT, what the NT says about the
OT, and what the church and the believing community throughout history has said,
we also have the very author of the text itself with us, personally speaking to
us about what we need to know to get the most out of it. It's a pretty amazing
deal, I must say. But to get everything God has for us, we need to engage with
all of the Bible, not just bits and pieces.
So the thing I realised as I was reading these verses in 2 Timothy may have been
obvious to everyone else, but the point is, this passage is not encouraging us
to look at individual parts of the Bible and look to them \textit{alone} for
teaching, reproof, correction, and training in righteousness, but to immerse
ourselves fully in the whole story of God, so that we can see who \textit{God}
is, where his heart is at, who \textit{we} are, and how we fit into his plan.
This is what I think Paul means by \textit{all scripture}. He means, read the
whole thing, don't stop just with the bits that help you find salvation because
there's so much more God wants to say than that. We do this, Paul says,
\textit{so that} we, who belong to God, may be \enquote{proficient, equipped for
every good work.}
\subsection{It's One Unified Story Written by Many Authors and a Single
Author}
The version that I quoted above, the NRSV, isn't that helpful for what I'm
about to say, so does anyone have a different version that they'd like to read
out?
In the NT times, \enquote{word of God} and \enquote{scripture} were not
necessarily the same thing. \enquote{Scripture} in 2 Timothy 3 translates the
Greek word \textit{graph\={e}}, which means \enquote{drawing, writing, painting,
or scripture}, but in various other places the \textit{word} part of `word of
God' is \textit{logos} (e.g., Hebrews 4). Sometimes \textit{logos} means the
spoken word, sometimes the written scriptures, sometimes Jesus's teaching, and
at other times, it refers to the divine nature and power of Jesus.
\textit{Graph\={e}}, on the other hand, specifically refers to the written
scriptures.
Now, in this verse, we're told that the \textit{written} scriptures are
\enquote{God-breathed} (\textit{theopneustos}, a word Paul seems to have
invented). Paul is saying that the source of our written scriptures is God's
very own breath, which is another way of saying that the written scriptures are,
in fact, also God's word.
This is a common pattern throughout the New Testament, let me give a couple of
examples:
\begin{itemize}
\item Let's read Mark 12:36. Here we have Jesus quoting Psalm 110, saying
that the Holy Spirit declared the content of that psalm through David.
The Holy Spirit declared it! That's God speaking through the Bible.
\begin{quote}
\enquote{David himself, by the Holy Spirit, declared, / \enquote{The
Lord said to my Lord, / \enquote{Sit at my right hand, / until I put
your enemies under your feet.}}}
\end{quote}
\item In John 10:35, Jesus equates the \enquote{word of God}
(\textit{logos}) with \enquote{scripture} (\textit{graph\={e}}). Jesus
is telling us that \textit{scripture} is the \textit{word of God}.
\begin{quote}
If those to whom the word of God came were called
\enquote{gods}---and the scripture cannot be annulled---
\end{quote}
\end{itemize}
A Bible scholar, Ben Witherington III, says that \enquote{ancients did not think
words, and especially divine words, were mere ciphers or sounds. The ancients
believed words partook of the character and quality of the one who spoke them,
especially when talking about God's words.}
This felt like an important concept to grasp, but also pretty hard for me wrap
my head around and even harder to explain, but I'll try.
Okay, so you know how sometimes it feels like you get to know your favourite
authors even though you have probably never met them and maybe they're not even
alive? I love and have read a lot of C.S. Lewis's books and I feel like if I
heard a quote for the first time that I didn't know was his I'd have a pretty
good chance of guessing he's the author because I know what his writing sounds
like. If I continued to read more and more of his stuff, I might even get to the
point where I could guess what he might say about a topic that he never wrote
anything about. But I can't get too far because I have never met him in real
life and so I don't know his personality apart from what he's written.
The point I'm trying to make is this: scripture has both \textit{many} authors
and a \textit{single} author. And, while we don't get to meet the many human
authors, we do get to meet in an intensely personal way the one divine author,
the Holy Spirit. And in this way we can know God both through reading what he's
written and meeting him personally. And I think as we spend more time reading
the things God says in all the various ways he says them, we will get to know
his voice better. This is something Jesus promises us that we picked up on over
the last few weeks: he tells us that his sheep hear his voice in (John 10:27).
So, as we engage more with God through the Bible, we will learn what God's voice
sounds like more and more and we'll know when we're hearing it and when we're
not. So, for example, when Jesus commands his disciples to wash each others'
feet, do we believe that's a command we should follow literally today? If we
know God personally, we'll know how to answer that question. But we won't get to
this point, we won't learn to hear his voice if we don't spend time listening to
him through the scriptures. This could be by reading it ourselves, or it could
be by reading it together as a community, or it could be by listening to it on a
smartphone or in many other ways, the point is not that you have to sit
personally down and read the text day and night along with 50 commentaries, but
that when you read or hear it, you engage with it to build a listening
relationship with God and not simply to understand the written words.
When we read the scriptures we're not just reading marks on a page, when we hear
them read aloud, we're not simply processing vibrations in the air, the
scriptures we have before us are brought to life in our hearts, minds, and souls
by the Spirit of God.
\section{The Bible Is Alive, and It's Fiercely Powerful}
\begin{quote}
We also constantly give thanks to God for this, that when you received the
word of God that you heard from us, you accepted it not as a human word but
as what it really is, God's word, which is also at work in you believers. (1
Thessalonians 2:13)
\end{quote}
I said before that when we read the scriptures we're not simply reading marks on
a page, this verse in 1 Thessalonians 2 reaffirms that truth and makes it clear
that the Bible works not only on individuals, but also in communities.
We have the words of the living God, these words are powerful -- they are
inspired by the Spirit, they are living and active, they are at work in
believers. Now, I've said the words \enquote{living and active}, and that
probably reminds you of another classic verse on the Bible.
\begin{quote}
Indeed, the word of God is living and active, sharper than any two-edged
sword, piercing until it divides soul from spirit, joints from marrow; it is
able to judge the thoughts and intentions of the heart. And before him no
creature is hidden, but all are naked and laid bare to the eyes of the one
to whom we must render account. (Hebrews 4:12--13)
\end{quote}
This verse is remarkable and it's worth pausing for a moment to consider what
it's saying. It's telling us that the word of God -- whether spoken or written --
actually does something to the hearer or reader. \textit{It does something!}
It's not simply that we \textit{consider} or \textit{pay attention to} its words
and perhaps change our minds, though that certainly happens. It's more than
that, it does something powerful and unexpected to the person who listens in
faith. I think this is something the Spirit does, this is part of the Spirit's
work as the promised guide and counsellor. If we are hearing or reading God's
word in \textit{relationship with God through the Spirit}, we'll find ourselves
changed and transformed, we'll find ourselves pierced, our thoughts and
intentions judged, our whole selves laid completely bare before our God and
judge. It's actually quite a sobering thought.
This is another affirmation of what we've been reading -- God's word, in all its
different aspects -- is alive and working in believers and believing communities
throughout the world. It's been this way since the Torah was first given to the
Israelites thousands of years ago and it will be this way at least until Jesus
returns.
\section{So, Why Has God Given Us the Bible?}
The simple answer is, I think, because God wants us to know him, his heart, what
he's done in the world, what he's done for us, and what he's going to do.
\section{Practice}
When we read the Bible, we should keep this in mind – the Bible is not a text
about \textit{how we should live}, it's a text about \textit{who God is} and its
primary purpose is to be a vehicle through which our relationship with God grows
and matures. As Christians, we don't so much have a relationship with the Bible,
but a relationship with God \textit{through} the Bible.
Read and study Psalm 23 this week by doing two things:
\begin{enumerate}
\item Look up all the cross references, see how it's been used throughout
the rest of God's story
\item Write it in your own words as a prayer to Jesus, the Good Shepherd
\end{enumerate}
\input{../../includes/02-epilogue}
| {
"content_hash": "d62f37c08d448c32d2f9bb848c354156",
"timestamp": "",
"source": "github",
"line_count": 328,
"max_line_length": 81,
"avg_line_length": 57.7469512195122,
"alnum_prop": 0.7667493796526055,
"repo_name": "lovek323/lovek323.github.io",
"id": "408b720c45d16ca531a11e8b4085e7eb4f4cab3e",
"size": "18943",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "latex/05-spiritual-practices-formed-by-scripture/01-what-is-the-bible/01-what-is-the-bible.tex",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "9177"
},
{
"name": "HTML",
"bytes": "11185"
},
{
"name": "JavaScript",
"bytes": "4813"
},
{
"name": "Ruby",
"bytes": "130"
},
{
"name": "TeX",
"bytes": "269406"
}
],
"symlink_target": ""
} |
package org.elasticsearch.xpack.security.action.privilege;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesAction;
import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesRequest;
import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesResponse;
import org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore;
import java.util.Collections;
/**
* Transport action to retrieve one or more application privileges from the security index
*/
public class TransportPutPrivilegesAction extends HandledTransportAction<PutPrivilegesRequest, PutPrivilegesResponse> {
private final NativePrivilegeStore privilegeStore;
@Inject
public TransportPutPrivilegesAction(ActionFilters actionFilters, NativePrivilegeStore privilegeStore,
TransportService transportService) {
super(PutPrivilegesAction.NAME, transportService, actionFilters, PutPrivilegesRequest::new);
this.privilegeStore = privilegeStore;
}
@Override
protected void doExecute(Task task, final PutPrivilegesRequest request, final ActionListener<PutPrivilegesResponse> listener) {
if (request.getPrivileges() == null || request.getPrivileges().size() == 0) {
listener.onResponse(new PutPrivilegesResponse(Collections.emptyMap()));
} else {
this.privilegeStore.putPrivileges(request.getPrivileges(), request.getRefreshPolicy(), ActionListener.wrap(
created -> listener.onResponse(new PutPrivilegesResponse(created)),
listener::onFailure
));
}
}
}
| {
"content_hash": "091694dc67631c759c075b91a709356c",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 131,
"avg_line_length": 46.54761904761905,
"alnum_prop": 0.767774936061381,
"repo_name": "robin13/elasticsearch",
"id": "61673ab9ac0f42dc89465c656c345d11afdb2494",
"size": "2207",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/privilege/TransportPutPrivilegesAction.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "11082"
},
{
"name": "Batchfile",
"bytes": "14049"
},
{
"name": "Emacs Lisp",
"bytes": "3341"
},
{
"name": "FreeMarker",
"bytes": "45"
},
{
"name": "Groovy",
"bytes": "315863"
},
{
"name": "HTML",
"bytes": "3399"
},
{
"name": "Java",
"bytes": "40107206"
},
{
"name": "Perl",
"bytes": "7271"
},
{
"name": "Python",
"bytes": "54437"
},
{
"name": "Shell",
"bytes": "108937"
}
],
"symlink_target": ""
} |
import sys
from keystone.common import cms
from oslo_config import cfg
from oslo_log import log
from oslo_serialization import jsonutils
from oslo_utils import importutils
from oslo_utils import timeutils
import six
from keystone.common import controller
from keystone.common import dependency
from keystone.common import wsgi
from keystone import config
from keystone.models import token_model
from keystone import exception
from keystone.i18n import _, _LI, _LW
from keystone.resource import controllers as resource_controllers
LOG = log.getLogger(__name__)
CONF = cfg.CONF
# registry of authentication methods
AUTH_METHODS = {}
AUTH_PLUGINS_LOADED = False
def load_auth_methods():
global AUTH_PLUGINS_LOADED
if AUTH_PLUGINS_LOADED:
# Only try and load methods a single time.
return
# config.setup_authentication should be idempotent, call it to ensure we
# have setup all the appropriate configuration options we may need.
config.setup_authentication()
for plugin in CONF.auth.methods:
if '.' in plugin:
# NOTE(morganfainberg): if '.' is in the plugin name, it should be
# imported rather than used as a plugin identifier.
plugin_class = plugin
driver = importutils.import_object(plugin)
if not hasattr(driver, 'method'):
raise ValueError(_('Cannot load an auth-plugin by class-name '
'without a "method" attribute defined: %s'),
plugin_class)
LOG.info(_LI('Loading auth-plugins by class-name is deprecated.'))
plugin_name = driver.method
else:
plugin_name = plugin
plugin_class = CONF.auth.get(plugin)
driver = importutils.import_object(plugin_class)
if plugin_name in AUTH_METHODS:
raise ValueError(_('Auth plugin %(plugin)s is requesting '
'previously registered method %(method)s') %
{'plugin': plugin_class, 'method': driver.method})
AUTH_METHODS[plugin_name] = driver
AUTH_PLUGINS_LOADED = True
def get_auth_method(method_name):
global AUTH_METHODS
if method_name not in AUTH_METHODS:
raise exception.AuthMethodNotSupported()
return AUTH_METHODS[method_name]
class AuthContext(dict):
"""Retrofitting auth_context to reconcile identity attributes.
The identity attributes must not have conflicting values among the
auth plug-ins. The only exception is `expires_at`, which is set to its
earliest value.
"""
# identity attributes need to be reconciled among the auth plugins
IDENTITY_ATTRIBUTES = frozenset(['user_id', 'project_id',
'access_token_id', 'domain_id',
'expires_at'])
def __setitem__(self, key, val):
if key in self.IDENTITY_ATTRIBUTES and key in self:
existing_val = self[key]
if key == 'expires_at':
# special treatment for 'expires_at', we are going to take
# the earliest expiration instead.
if existing_val != val:
LOG.info(_LI('"expires_at" has conflicting values '
'%(existing)s and %(new)s. Will use the '
'earliest value.'),
{'existing': existing_val, 'new': val})
if existing_val is None or val is None:
val = existing_val or val
else:
val = min(existing_val, val)
elif existing_val != val:
msg = _('Unable to reconcile identity attribute %(attribute)s '
'as it has conflicting values %(new)s and %(old)s') % (
{'attribute': key,
'new': val,
'old': existing_val})
raise exception.Unauthorized(msg)
return super(AuthContext, self).__setitem__(key, val)
# TODO(blk-u): this class doesn't use identity_api directly, but makes it
# available for consumers. Consumers should probably not be getting
# identity_api from this since it's available in global registry, then
# identity_api should be removed from this list.
@dependency.requires('identity_api', 'resource_api')
class AuthInfo(object):
"""Encapsulation of "auth" request."""
@staticmethod
def create(context, auth=None):
auth_info = AuthInfo(context, auth=auth)
auth_info._validate_and_normalize_auth_data()
return auth_info
def __init__(self, context, auth=None):
self.context = context
self.auth = auth
self._scope_data = (None, None, None, None)
# self._scope_data is (domain_id, project_id, unscoped)
# project scope: (None, project_id, None, None)
# domain scope: (domain_id, None, None, None)
# unscoped: (None, None, None, 'unscoped')
def _assert_project_is_enabled(self, project_ref):
# ensure the project is enabled
try:
self.resource_api.assert_project_enabled(
project_id=project_ref['id'],
project=project_ref)
except AssertionError as e:
LOG.warning(six.text_type(e))
six.reraise(exception.Unauthorized, exception.Unauthorized(e),
sys.exc_info()[2])
def _assert_domain_is_enabled(self, domain_ref):
try:
self.resource_api.assert_domain_enabled(
domain_id=domain_ref['id'],
domain=domain_ref)
except AssertionError as e:
LOG.warning(six.text_type(e))
six.reraise(exception.Unauthorized, exception.Unauthorized(e),
sys.exc_info()[2])
def _lookup_domain(self, domain_info):
domain_id = domain_info.get('id')
domain_name = domain_info.get('name')
domain_ref = None
if not domain_id and not domain_name:
raise exception.ValidationError(attribute='id or name',
target='domain')
try:
if domain_name:
domain_ref = self.resource_api.get_domain_by_name(
domain_name)
else:
domain_ref = self.resource_api.get_domain(domain_id)
except exception.DomainNotFound as e:
LOG.exception(six.text_type(e))
raise exception.Unauthorized(e)
self._assert_domain_is_enabled(domain_ref)
return domain_ref
def _lookup_project(self, project_info):
project_id = project_info.get('id')
project_name = project_info.get('name')
project_ref = None
if not project_id and not project_name:
raise exception.ValidationError(attribute='id or name',
target='project')
try:
if project_name:
if 'domain' not in project_info:
raise exception.ValidationError(attribute='domain',
target='project')
domain_ref = self._lookup_domain(project_info['domain'])
project_ref = self.resource_api.get_project_by_name(
project_name, domain_ref['id'])
else:
project_ref = self.resource_api.get_project(project_id)
# NOTE(morganfainberg): The _lookup_domain method will raise
# exception.Unauthorized if the domain isn't found or is
# disabled.
self._lookup_domain({'id': project_ref['domain_id']})
except exception.ProjectNotFound as e:
raise exception.Unauthorized(e)
self._assert_project_is_enabled(project_ref)
return project_ref
def _validate_and_normalize_scope_data(self):
"""Validate and normalize scope data."""
if 'scope' not in self.auth:
return
if sum(['project' in self.auth['scope'],
'domain' in self.auth['scope'],
'unscoped' in self.auth['scope']]) != 1:
raise exception.ValidationError(
attribute='project, domain or unscoped',
target='scope')
if 'unscoped' in self.auth['scope']:
self._scope_data = (None, None, 'unscoped')
return
if 'project' in self.auth['scope']:
project_ref = self._lookup_project(self.auth['scope']['project'])
self._scope_data = (None, project_ref['id'], None)
elif 'domain' in self.auth['scope']:
domain_ref = self._lookup_domain(self.auth['scope']['domain'])
self._scope_data = (domain_ref['id'], None, None)
def _validate_auth_methods(self):
if 'identity' not in self.auth:
raise exception.ValidationError(attribute='identity',
target='auth')
# make sure auth methods are provided
if 'methods' not in self.auth['identity']:
raise exception.ValidationError(attribute='methods',
target='identity')
# make sure all the method data/payload are provided
for method_name in self.get_method_names():
if method_name not in self.auth['identity']:
raise exception.ValidationError(attribute=method_name,
target='identity')
# make sure auth method is supported
for method_name in self.get_method_names():
if method_name not in AUTH_METHODS:
raise exception.AuthMethodNotSupported()
def _validate_and_normalize_auth_data(self):
"""Make sure "auth" is valid."""
# make sure "auth" exist
if not self.auth:
raise exception.ValidationError(attribute='auth',
target='request body')
self._validate_auth_methods()
self._validate_and_normalize_scope_data()
def get_method_names(self):
"""Returns the identity method names.
:returns: list of auth method names
"""
# Sanitizes methods received in request's body
# Filters out duplicates, while keeping elements' order.
method_names = []
for method in self.auth['identity']['methods']:
if method not in method_names:
method_names.append(method)
return method_names
def get_method_data(self, method):
"""Get the auth method payload.
:returns: auth method payload
"""
if method not in self.auth['identity']['methods']:
raise exception.ValidationError(attribute=method,
target='identity')
return self.auth['identity'][method]
def get_scope(self):
"""Get scope information.
Verify and return the scoping information.
:returns: (domain_id, project_id, unscoped).
If scope to a project, (None, project_id, None)
will be returned.
If scoped to a domain, (domain_id, None, None)
will be returned.
If unscoped, (None, None, 'unscoped') will be
returned.
"""
return self._scope_data
def set_scope(self, domain_id=None, project_id=None, unscoped=None):
"""Set scope information."""
if domain_id and project_id:
msg = _('Scoping to both domain and project is not allowed')
raise ValueError(msg)
self._scope_data = (domain_id, project_id, unscoped)
@dependency.requires('assignment_api', 'catalog_api', 'identity_api',
'resource_api', 'token_provider_api')
class Auth(controller.Controller):
# Note(atiwari): From V3 auth controller code we are
# calling protection() wrappers, so we need to setup
# the member_name and collection_name attributes of
# auth controller code.
# In the absence of these attributes, default 'entity'
# string will be used to represent the target which is
# generic. Policy can be defined using 'entity' but it
# would not reflect the exact entity that is in context.
# We are defining collection_name = 'tokens' and
# member_name = 'token' to facilitate policy decisions.
collection_name = 'tokens'
member_name = 'token'
def __init__(self, *args, **kw):
super(Auth, self).__init__(*args, **kw)
config.setup_authentication()
def authenticate_for_token(self, context, auth=None):
"""Authenticate user and issue a token."""
include_catalog = 'nocatalog' not in context['query_string']
auth_info = AuthInfo.create(context, auth=auth)
auth_context = AuthContext(extras={},
method_names=[],
bind={})
self.authenticate(context, auth_info, auth_context)
self._check_and_set_default_scoping(auth_info, auth_context)
(domain_id, project_id, unscoped) = auth_info.get_scope()
method_names = auth_info.get_method_names()
method_names += auth_context.get('method_names', [])
# make sure the list is unique
method_names = list(set(method_names))
expires_at = auth_context.get('expires_at')
# NOTE(morganfainberg): define this here so it is clear what the
# argument is during the issue_v3_token provider call.
metadata_ref = None
token_audit_id = auth_context.get('audit_id')
(token_id, token_data) = self.token_provider_api.issue_v3_token(
auth_context['user_id'], method_names, expires_at, project_id,
domain_id, auth_context, metadata_ref, include_catalog,
parent_audit_id=token_audit_id)
return render_token_data_response(token_id, token_data,
created=True)
def _check_and_set_default_scoping(self, auth_info, auth_context):
(domain_id, project_id, unscoped) = auth_info.get_scope()
if domain_id or project_id:
# scope is specified
return
# Do not scope if request is for explicitly unscoped token
if unscoped is not None:
return
def authenticate(self, context, auth_info, auth_context):
"""Authenticate user."""
# The 'external' method allows any 'REMOTE_USER' based authentication
# In some cases the server can set REMOTE_USER as '' instead of
# dropping it, so this must be filtered out
if context['environment'].get('REMOTE_USER'):
try:
external = get_auth_method('external')
external.authenticate(context, auth_info, auth_context)
except exception.AuthMethodNotSupported:
# This will happen there is no 'external' plugin registered
# and the container is performing authentication.
# The 'kerberos' and 'saml' methods will be used this way.
# In those cases, it is correct to not register an
# 'external' plugin; if there is both an 'external' and a
# 'kerberos' plugin, it would run the check on identity twice.
LOG.debug("No 'external' plugin is registered.")
except exception.Unauthorized:
# If external fails then continue and attempt to determine
# user identity using remaining auth methods
LOG.debug("Authorization failed for 'external' auth method.")
# need to aggregate the results in case two or more methods
# are specified
auth_response = {'methods': []}
for method_name in auth_info.get_method_names():
method = get_auth_method(method_name)
resp = method.authenticate(context,
auth_info.get_method_data(method_name),
auth_context)
if resp:
auth_response['methods'].append(method_name)
auth_response[method_name] = resp
if auth_response["methods"]:
# authentication continuation required
raise exception.AdditionalAuthRequired(auth_response)
if 'user_id' not in auth_context:
msg = _('User not found')
raise exception.Unauthorized(msg)
def _check_subject_token(self, context, protection, *args, **kwargs):
target = {}
if context.get('subject_token_id') is not None:
ks_token = token_model.KeystoneToken(
token_id=context['subject_token_id'],
token_data=self.token_provider_api.validate_token(
context['subject_token_id']))
target.setdefault('token', {})
target['token']['user_id'] = ks_token.user_id
target['token']['id'] = ks_token.token_id
target['token']['domain_id'] = ks_token.user_domain_id
if ks_token.project_scoped:
target['token']['scope'] = 'project'
target['token']['scope_project_id'] = ks_token.project_id
target['token']['scope_domain_id'] = (ks_token.
project_domain_id)
elif ks_token.domain_scoped:
target['token']['scope'] = 'domain'
target['token']['scope_domain_id'] = ks_token.domain_id
else:
raise exception.UnsupportedTokenScope()
return self.check_protection(context, protection, target)
@controller.protected(callback=_check_subject_token)
def check_token(self, context):
token_id = context.get('subject_token_id')
token_data = self.token_provider_api.validate_v3_token(
token_id)
# NOTE(morganfainberg): The code in
# ``keystone.common.wsgi.render_response`` will remove the content
# body.
return render_token_data_response(token_id, token_data)
@controller.protected(callback=_check_subject_token)
def revoke_token(self, context):
token_id = context.get('subject_token_id')
return self.token_provider_api.revoke_token(token_id)
@controller.protected(callback=_check_subject_token)
def validate_token(self, context):
token_id = context.get('subject_token_id')
include_catalog = 'nocatalog' not in context['query_string']
token_data = self.token_provider_api.validate_v3_token(
token_id)
if not include_catalog and 'catalog' in token_data['token']:
del token_data['token']['catalog']
return render_token_data_response(token_id, token_data)
@controller.protected()
def revocation_list(self, context, auth=None):
if not CONF.token.revoke_by_id:
raise exception.Gone()
tokens = self.token_provider_api.list_revoked_tokens()
for t in tokens:
expires = t['expires']
if not (expires and isinstance(expires, six.text_type)):
t['expires'] = timeutils.isotime(expires)
data = {'revoked': tokens}
json_data = jsonutils.dumps(data)
signed_text = cms.cms_sign_text(json_data,
CONF.signing.certfile,
CONF.signing.keyfile)
return {'signed': signed_text}
def _combine_lists_uniquely(self, a, b):
# it's most likely that only one of these will be filled so avoid
# the combination if possible.
if a and b:
return {x['id']: x for x in a + b}.values()
else:
return a or b
@controller.protected()
def get_auth_projects(self, context):
auth_context = self.get_auth_context(context)
user_id = auth_context.get('user_id')
user_refs = []
if user_id:
try:
user_refs = self.assignment_api.list_projects_for_user(user_id)
except exception.UserNotFound:
# federated users have an id but they don't link to anything
pass
group_ids = auth_context.get('group_ids')
grp_refs = []
if group_ids:
grp_refs = self.assignment_api.list_projects_for_groups(group_ids)
refs = self._combine_lists_uniquely(user_refs, grp_refs)
return resource_controllers.Project.wrap_collection(context, refs)
@controller.protected()
def get_auth_domains(self, context):
auth_context = self.get_auth_context(context)
user_id = auth_context.get('user_id')
user_refs = []
if user_id:
try:
user_refs = self.assignment_api.list_domains_for_user(user_id)
except exception.UserNotFound:
# federated users have an id but they don't link to anything
pass
group_ids = auth_context.get('group_ids')
grp_refs = []
if group_ids:
grp_refs = self.assignment_api.list_domains_for_groups(group_ids)
refs = self._combine_lists_uniquely(user_refs, grp_refs)
return resource_controllers.Domain.wrap_collection(context, refs)
@controller.protected()
def get_auth_catalog(self, context):
auth_context = self.get_auth_context(context)
user_id = auth_context.get('user_id')
project_id = auth_context.get('scope_project_id')
if not project_id:
raise exception.Forbidden(
_('A project-scoped token is required to produce a service '
'catalog.'))
# The Controller base methods mostly assume that you're returning
# either a collection or a single element from a collection, neither of
# which apply to the catalog. Because this is a special case, this
# re-implements a tiny bit of work done by the base controller (such as
# self-referential link building) to avoid overriding or refactoring
# several private methods.
return {
'catalog': self.catalog_api.get_v3_catalog(user_id, project_id),
'links': {'self': self.base_url(context, path='auth/catalog')}
}
# FIXME(gyee): not sure if it belongs here or keystone.common. Park it here
# for now.
def render_token_data_response(token_id, token_data, created=False):
"""Render token data HTTP response.
Stash token ID into the X-Subject-Token header.
"""
headers = [('X-Subject-Token', token_id)]
if created:
status = (201, 'Created')
else:
status = (200, 'OK')
return wsgi.render_response(body=token_data,
status=status, headers=headers)
| {
"content_hash": "75fe02aa5ed7414e19300cb64fcdc7ba",
"timestamp": "",
"source": "github",
"line_count": 561,
"max_line_length": 79,
"avg_line_length": 41.03030303030303,
"alnum_prop": 0.5816752107046659,
"repo_name": "darren-wang/ks3",
"id": "47c36ada0ce1c8bb7e861d685e7ed49a532661ff",
"size": "23604",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "keystone/auth/controllers.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "864167"
},
{
"name": "Shell",
"bytes": "4861"
}
],
"symlink_target": ""
} |
@ECHO OFF
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set BUILDDIR=_build
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
set I18NSPHINXOPTS=%SPHINXOPTS% .
if NOT "%PAPER%" == "" (
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
)
if "%1" == "" goto help
if "%1" == "help" (
:help
echo.Please use `make ^<target^>` where ^<target^> is one of
echo. html to make standalone HTML files
echo. dirhtml to make HTML files named index.html in directories
echo. singlehtml to make a single large HTML file
echo. pickle to make pickle files
echo. json to make JSON files
echo. htmlhelp to make HTML files and a HTML help project
echo. qthelp to make HTML files and a qthelp project
echo. devhelp to make HTML files and a Devhelp project
echo. epub to make an epub
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
echo. text to make text files
echo. man to make manual pages
echo. texinfo to make Texinfo files
echo. gettext to make PO message catalogs
echo. changes to make an overview over all changed/added/deprecated items
echo. xml to make Docutils-native XML files
echo. pseudoxml to make pseudoxml-XML files for display purposes
echo. linkcheck to check all external links for integrity
echo. doctest to run all doctests embedded in the documentation if enabled
goto end
)
if "%1" == "clean" (
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
del /q /s %BUILDDIR%\*
goto end
)
%SPHINXBUILD% 2> nul
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
if "%1" == "html" (
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
goto end
)
if "%1" == "dirhtml" (
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
goto end
)
if "%1" == "singlehtml" (
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
goto end
)
if "%1" == "pickle" (
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the pickle files.
goto end
)
if "%1" == "json" (
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the JSON files.
goto end
)
if "%1" == "htmlhelp" (
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run HTML Help Workshop with the ^
.hhp project file in %BUILDDIR%/htmlhelp.
goto end
)
if "%1" == "qthelp" (
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run "qcollectiongenerator" with the ^
.qhcp project file in %BUILDDIR%/qthelp, like this:
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\django-admin2.qhcp
echo.To view the help file:
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\django-admin2.ghc
goto end
)
if "%1" == "devhelp" (
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished.
goto end
)
if "%1" == "epub" (
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The epub file is in %BUILDDIR%/epub.
goto end
)
if "%1" == "latex" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
if errorlevel 1 exit /b 1
echo.
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdf" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf
cd %BUILDDIR%/..
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdfja" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf-ja
cd %BUILDDIR%/..
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "text" (
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The text files are in %BUILDDIR%/text.
goto end
)
if "%1" == "man" (
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The manual pages are in %BUILDDIR%/man.
goto end
)
if "%1" == "texinfo" (
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
goto end
)
if "%1" == "gettext" (
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
goto end
)
if "%1" == "changes" (
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
if errorlevel 1 exit /b 1
echo.
echo.The overview file is in %BUILDDIR%/changes.
goto end
)
if "%1" == "linkcheck" (
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
if errorlevel 1 exit /b 1
echo.
echo.Link check complete; look for any errors in the above output ^
or in %BUILDDIR%/linkcheck/output.txt.
goto end
)
if "%1" == "doctest" (
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
if errorlevel 1 exit /b 1
echo.
echo.Testing of doctests in the sources finished, look at the ^
results in %BUILDDIR%/doctest/output.txt.
goto end
)
if "%1" == "xml" (
%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The XML files are in %BUILDDIR%/xml.
goto end
)
if "%1" == "pseudoxml" (
%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
goto end
)
:end
| {
"content_hash": "22b15b1c7303f56aef93bc4cbe1c9149",
"timestamp": "",
"source": "github",
"line_count": 242,
"max_line_length": 79,
"avg_line_length": 27.74793388429752,
"alnum_prop": 0.675204765450484,
"repo_name": "andrewsmedina/django-admin2",
"id": "148211dd280a7465c93f6a98be3b0b6d0ee25fef",
"size": "6715",
"binary": false,
"copies": "5",
"ref": "refs/heads/develop",
"path": "docs/make.bat",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "19219"
},
{
"name": "HTML",
"bytes": "42539"
},
{
"name": "JavaScript",
"bytes": "2540"
},
{
"name": "Python",
"bytes": "229693"
}
],
"symlink_target": ""
} |
package org.apache.nifi.processors.smb;
import static java.util.Arrays.asList;
import static java.util.Arrays.fill;
import static java.util.stream.Collectors.toSet;
import static org.apache.nifi.processor.util.list.AbstractListProcessor.LISTING_STRATEGY;
import static org.apache.nifi.processor.util.list.AbstractListProcessor.RECORD_WRITER;
import static org.apache.nifi.processor.util.list.AbstractListProcessor.REL_SUCCESS;
import static org.apache.nifi.processors.smb.ListSmb.DIRECTORY;
import static org.apache.nifi.processors.smb.ListSmb.FILE_NAME_SUFFIX_FILTER;
import static org.apache.nifi.processors.smb.ListSmb.MINIMUM_AGE;
import static org.apache.nifi.processors.smb.ListSmb.MINIMUM_SIZE;
import static org.apache.nifi.processors.smb.ListSmb.SMB_CLIENT_PROVIDER_SERVICE;
import static org.apache.nifi.services.smb.SmbjClientProviderService.DOMAIN;
import static org.apache.nifi.services.smb.SmbjClientProviderService.HOSTNAME;
import static org.apache.nifi.services.smb.SmbjClientProviderService.PASSWORD;
import static org.apache.nifi.services.smb.SmbjClientProviderService.PORT;
import static org.apache.nifi.services.smb.SmbjClientProviderService.SHARE;
import static org.apache.nifi.services.smb.SmbjClientProviderService.USERNAME;
import static org.apache.nifi.util.TestRunners.newTestRunner;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.nifi.serialization.SimpleRecordSchema;
import org.apache.nifi.serialization.record.MockRecordWriter;
import org.apache.nifi.services.smb.SmbClientProviderService;
import org.apache.nifi.services.smb.SmbListableEntity;
import org.apache.nifi.services.smb.SmbjClientProviderService;
import org.apache.nifi.util.MockFlowFile;
import org.apache.nifi.util.TestRunner;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.output.Slf4jLogConsumer;
import org.testcontainers.containers.wait.strategy.Wait;
import org.testcontainers.images.builder.Transferable;
import org.testcontainers.utility.DockerImageName;
public class ListSmbIT {
private final static Integer DEFAULT_SAMBA_PORT = 445;
private final static Logger logger = LoggerFactory.getLogger(ListSmbTest.class);
private final GenericContainer<?> sambaContainer = new GenericContainer<>(DockerImageName.parse("dperson/samba"))
.withExposedPorts(DEFAULT_SAMBA_PORT, 139)
.waitingFor(Wait.forListeningPort())
.withLogConsumer(new Slf4jLogConsumer(logger))
.withCommand("-w domain -u username;password -s share;/folder;;no;no;username;;; -p");
@BeforeEach
public void beforeEach() {
sambaContainer.start();
}
@AfterEach
public void afterEach() {
sambaContainer.stop();
}
@ParameterizedTest
@ValueSource(ints = {4, 50, 45000})
public void shouldFillSizeAttributeProperly(int size) throws Exception {
writeFile("1.txt", generateContentWithSize(size));
final TestRunner testRunner = newTestRunner(ListSmb.class);
testRunner.setProperty(LISTING_STRATEGY, "none");
testRunner.setProperty(MINIMUM_AGE, "0 ms");
SmbjClientProviderService smbjClientProviderService = configureTestRunnerForSambaDockerContainer(testRunner);
testRunner.enableControllerService(smbjClientProviderService);
testRunner.run();
testRunner.assertTransferCount(REL_SUCCESS, 1);
testRunner.getFlowFilesForRelationship(REL_SUCCESS)
.forEach(flowFile -> assertEquals(size, Integer.valueOf(flowFile.getAttribute("size"))));
testRunner.assertValid();
testRunner.disableControllerService(smbjClientProviderService);
}
@Test
public void shouldShowBulletinOnMissingDirectory() throws Exception {
final TestRunner testRunner = newTestRunner(ListSmb.class);
testRunner.setProperty(LISTING_STRATEGY, "none");
testRunner.setProperty(MINIMUM_AGE, "0 ms");
testRunner.setProperty(DIRECTORY, "folderDoesNotExists");
SmbjClientProviderService smbjClientProviderService = configureTestRunnerForSambaDockerContainer(testRunner);
testRunner.enableControllerService(smbjClientProviderService);
testRunner.run();
assertEquals(1, testRunner.getLogger().getErrorMessages().size());
testRunner.assertValid();
testRunner.disableControllerService(smbjClientProviderService);
}
@Test
public void shouldShowBulletinWhenShareIsInvalid() throws Exception {
final TestRunner testRunner = newTestRunner(ListSmb.class);
SmbjClientProviderService smbjClientProviderService = configureTestRunnerForSambaDockerContainer(testRunner);
testRunner.setProperty(smbjClientProviderService, SHARE, "invalid_share");
testRunner.enableControllerService(smbjClientProviderService);
testRunner.run();
assertEquals(1, testRunner.getLogger().getErrorMessages().size());
testRunner.assertValid();
testRunner.disableControllerService(smbjClientProviderService);
}
@Test
public void shouldShowBulletinWhenSMBPortIsInvalid() throws Exception {
final TestRunner testRunner = newTestRunner(ListSmb.class);
final SmbClientProviderService smbClientProviderService =
configureTestRunnerForSambaDockerContainer(testRunner);
testRunner.setProperty(smbClientProviderService, PORT, "1");
testRunner.enableControllerService(smbClientProviderService);
testRunner.run();
assertEquals(1, testRunner.getLogger().getErrorMessages().size());
testRunner.assertValid();
testRunner.disableControllerService(smbClientProviderService);
}
@Test
public void shouldShowBulletinWhenSMBHostIsInvalid() throws Exception {
final TestRunner testRunner = newTestRunner(ListSmb.class);
final SmbClientProviderService smbClientProviderService =
configureTestRunnerForSambaDockerContainer(testRunner);
testRunner.setProperty(smbClientProviderService, HOSTNAME, "this.host.should.not.exists");
testRunner.enableControllerService(smbClientProviderService);
testRunner.run();
assertEquals(1, testRunner.getLogger().getErrorMessages().size());
testRunner.disableControllerService(smbClientProviderService);
}
@Test
public void shouldUseRecordWriterProperly() throws Exception {
final Set<String> testFiles = new HashSet<>(asList(
"1.txt",
"directory/2.txt",
"directory/subdirectory/3.txt",
"directory/subdirectory2/4.txt",
"directory/subdirectory3/5.txt"
));
testFiles.forEach(file -> writeFile(file, generateContentWithSize(4)));
final TestRunner testRunner = newTestRunner(ListSmb.class);
final MockRecordWriter writer = new MockRecordWriter(null, false);
final SimpleRecordSchema simpleRecordSchema = SmbListableEntity.getRecordSchema();
testRunner.addControllerService("writer", writer);
testRunner.enableControllerService(writer);
testRunner.setProperty(LISTING_STRATEGY, "none");
testRunner.setProperty(RECORD_WRITER, "writer");
testRunner.setProperty(MINIMUM_AGE, "0 ms");
SmbjClientProviderService smbjClientProviderService = configureTestRunnerForSambaDockerContainer(testRunner);
testRunner.enableControllerService(smbjClientProviderService);
testRunner.run();
testRunner.assertTransferCount(REL_SUCCESS, 1);
final String result = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0).getContent();
final int identifierColumnIndex = simpleRecordSchema.getFieldNames().indexOf("identifier");
final Set<String> actual = Arrays.stream(result.split("\n"))
.map(row -> row.split(",")[identifierColumnIndex])
.collect(toSet());
assertEquals(testFiles, actual);
testRunner.assertValid();
testRunner.disableControllerService(smbjClientProviderService);
}
@Test
public void shouldWriteFlowFileAttributesProperly() throws Exception {
final Set<String> testFiles = new HashSet<>(asList(
"file_name", "directory/file_name", "directory/subdirectory/file_name"
));
testFiles.forEach(file -> writeFile(file, generateContentWithSize(4)));
final TestRunner testRunner = newTestRunner(ListSmb.class);
final SmbjClientProviderService smbjClientProviderService =
configureTestRunnerForSambaDockerContainer(testRunner);
testRunner.setProperty(LISTING_STRATEGY, "none");
testRunner.setProperty(MINIMUM_AGE, "0 sec");
testRunner.enableControllerService(smbjClientProviderService);
testRunner.run(1);
testRunner.assertTransferCount(REL_SUCCESS, 3);
final Set<Map<String, String>> allAttributes = testRunner.getFlowFilesForRelationship(REL_SUCCESS)
.stream()
.map(MockFlowFile::getAttributes)
.collect(toSet());
final Set<String> identifiers = allAttributes.stream()
.map(attributes -> attributes.get("identifier"))
.collect(toSet());
assertEquals(testFiles, identifiers);
allAttributes.forEach(attribute -> assertEquals(
Stream.of(attribute.get("path"), attribute.get("filename")).filter(s -> !s.isEmpty()).collect(
Collectors.joining("/")),
attribute.get("absolute.path")));
final Set<String> fileNames = allAttributes.stream()
.map(attributes -> attributes.get("filename"))
.collect(toSet());
assertEquals(new HashSet<>(Arrays.asList("file_name")), fileNames);
testRunner.assertValid();
testRunner.disableControllerService(smbjClientProviderService);
}
@Test
public void shouldFilterFilesBySizeCriteria() throws Exception {
final TestRunner testRunner = newTestRunner(ListSmb.class);
final SmbClientProviderService smbClientProviderService =
configureTestRunnerForSambaDockerContainer(testRunner);
testRunner.enableControllerService(smbClientProviderService);
testRunner.setProperty(MINIMUM_AGE, "0 ms");
testRunner.setProperty(LISTING_STRATEGY, "none");
writeFile("1.txt", generateContentWithSize(1));
writeFile("10.txt", generateContentWithSize(10));
writeFile("100.txt", generateContentWithSize(100));
testRunner.run();
testRunner.assertTransferCount(REL_SUCCESS, 3);
testRunner.clearTransferState();
testRunner.setProperty(MINIMUM_SIZE, "10 B");
testRunner.run();
testRunner.assertTransferCount(REL_SUCCESS, 2);
testRunner.clearTransferState();
testRunner.setProperty(MINIMUM_SIZE, "50 B");
testRunner.run();
testRunner.assertTransferCount(REL_SUCCESS, 1);
testRunner.disableControllerService(smbClientProviderService);
}
@Test
public void shouldFilterByGivenSuffix() throws Exception {
final TestRunner testRunner = newTestRunner(ListSmb.class);
final SmbClientProviderService smbClientProviderService =
configureTestRunnerForSambaDockerContainer(testRunner);
testRunner.enableControllerService(smbClientProviderService);
testRunner.setProperty(MINIMUM_AGE, "0 ms");
testRunner.setProperty(FILE_NAME_SUFFIX_FILTER, ".suffix");
testRunner.setProperty(LISTING_STRATEGY, "none");
writeFile("should_list_this", generateContentWithSize(1));
writeFile("should_skip_this.suffix", generateContentWithSize(1));
testRunner.run();
testRunner.assertTransferCount(REL_SUCCESS, 1);
testRunner.disableControllerService(smbClientProviderService);
}
private SmbjClientProviderService configureTestRunnerForSambaDockerContainer(TestRunner testRunner)
throws Exception {
SmbjClientProviderService smbjClientProviderService = new SmbjClientProviderService();
testRunner.addControllerService("connection-pool", smbjClientProviderService);
testRunner.setProperty(SMB_CLIENT_PROVIDER_SERVICE, "connection-pool");
testRunner.setProperty(smbjClientProviderService, HOSTNAME, sambaContainer.getHost());
testRunner.setProperty(smbjClientProviderService, PORT,
String.valueOf(sambaContainer.getMappedPort(DEFAULT_SAMBA_PORT)));
testRunner.setProperty(smbjClientProviderService, USERNAME, "username");
testRunner.setProperty(smbjClientProviderService, PASSWORD, "password");
testRunner.setProperty(smbjClientProviderService, SHARE, "share");
testRunner.setProperty(smbjClientProviderService, DOMAIN, "domain");
return smbjClientProviderService;
}
private String generateContentWithSize(int sizeInBytes) {
byte[] bytes = new byte[sizeInBytes];
fill(bytes, (byte) 1);
return new String(bytes);
}
private void writeFile(String path, String content) {
String containerPath = "/folder/" + path;
sambaContainer.copyFileToContainer(Transferable.of(content), containerPath);
}
} | {
"content_hash": "ae9bb13fef3e3bd31c773e144c899b9f",
"timestamp": "",
"source": "github",
"line_count": 282,
"max_line_length": 117,
"avg_line_length": 48.741134751773046,
"alnum_prop": 0.7297926518734085,
"repo_name": "jfrazee/nifi",
"id": "8ef5aa7ba3a941230afd3b6cbecf7de2fe78af17",
"size": "14546",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "nifi-nar-bundles/nifi-smb-bundle/nifi-smb-processors/src/test/java/org/apache/nifi/processors/smb/ListSmbIT.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "44090"
},
{
"name": "C++",
"bytes": "652"
},
{
"name": "CSS",
"bytes": "286195"
},
{
"name": "Clojure",
"bytes": "3993"
},
{
"name": "Dockerfile",
"bytes": "24505"
},
{
"name": "GAP",
"bytes": "31169"
},
{
"name": "Groovy",
"bytes": "2205715"
},
{
"name": "HTML",
"bytes": "1120188"
},
{
"name": "Handlebars",
"bytes": "38564"
},
{
"name": "Java",
"bytes": "54569943"
},
{
"name": "JavaScript",
"bytes": "4035982"
},
{
"name": "Lua",
"bytes": "983"
},
{
"name": "Mustache",
"bytes": "2438"
},
{
"name": "Python",
"bytes": "26583"
},
{
"name": "Ruby",
"bytes": "23018"
},
{
"name": "SCSS",
"bytes": "22032"
},
{
"name": "Shell",
"bytes": "166750"
},
{
"name": "TypeScript",
"bytes": "1337"
},
{
"name": "XSLT",
"bytes": "7835"
}
],
"symlink_target": ""
} |
package net.schmizz.sshj.common;
/** Disconnect error codes */
public enum DisconnectReason {
UNKNOWN,
HOST_NOT_ALLOWED_TO_CONNECT,
PROTOCOL_ERROR,
KEY_EXCHANGE_FAILED,
RESERVED,
MAC_ERROR,
COMPRESSION_ERROR,
SERVICE_NOT_AVAILABLE,
PROTOCOL_VERSION_NOT_SUPPORTED,
HOST_KEY_NOT_VERIFIABLE,
CONNECTION_LOST,
BY_APPLICATION,
TOO_MANY_CONNECTIONS,
AUTH_CANCELLED_BY_USER,
NO_MORE_AUTH_METHODS_AVAILABLE,
ILLEGAL_USER_NAME;
public static DisconnectReason fromInt(int code) {
final int len = values().length;
if (code < 0 || code > len)
return UNKNOWN;
return values()[code];
}
public int toInt() {
return ordinal();
}
}
| {
"content_hash": "5cd576f703e4267dca707dfb1fb805f9",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 54,
"avg_line_length": 21.285714285714285,
"alnum_prop": 0.6322147651006711,
"repo_name": "ashwanthkumar/sshj",
"id": "5984d4c75db10a1fdab8ed3a556118b8a083daba",
"size": "1350",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/net/schmizz/sshj/common/DisconnectReason.java",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
FROM debian:sid
RUN echo force-unsafe-io > /etc/dpkg/dpkg.cfg.d/docker-apt-speedup
# Paper over occasional network flakiness of some mirrors.
RUN echo 'APT::Acquire::Retries "5";' > /etc/apt/apt.conf.d/80retry
# NOTE: I tried exclusively using gce_debian_mirror.storage.googleapis.com
# instead of httpredir.debian.org, but the results (Fetched 123 MB in 36s (3357
# kB/s)) are not any better than httpredir.debian.org (Fetched 123 MB in 34s
# (3608 kB/s)). Hence, let’s stick with httpredir.debian.org (default) for now.
# Install mk-build-deps (for installing the i3 build dependencies),
# clang and clang-format-3.8 (for checking formatting and building with clang),
# lintian (for checking spelling errors),
# test suite dependencies (for running tests)
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
dpkg-dev devscripts git equivs \
clang clang-format-3.8 \
lintian \
libmodule-install-perl libanyevent-perl libextutils-pkgconfig-perl xcb-proto cpanminus xvfb xserver-xephyr xauth libinline-perl libinline-c-perl libxml-simple-perl libmouse-perl libmousex-nativetraits-perl libextutils-depends-perl perl libtest-deep-perl libtest-exception-perl libxml-parser-perl libtest-simple-perl libtest-fatal-perl libdata-dump-perl libtest-differences-perl libxml-tokeparser-perl libipc-run-perl libxcb-xtest0-dev libx11-xcb-perl libjson-xs-perl x11-xserver-utils && \
rm -rf /var/lib/apt/lists/*
# Install i3 build dependencies.
COPY debian/control /usr/src/i3-debian-packaging/control
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive mk-build-deps --install --remove --tool 'apt-get --no-install-recommends -y' /usr/src/i3-debian-packaging/control && \
rm -rf /var/lib/apt/lists/*
| {
"content_hash": "d4af92a119eee7450201f9584e9e6fd7",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 493,
"avg_line_length": 63.285714285714285,
"alnum_prop": 0.7635440180586908,
"repo_name": "cornerman/i3",
"id": "7eafb9fb12dcf6f3750a5012a2af30e8541c0ea9",
"size": "1794",
"binary": false,
"copies": "4",
"ref": "refs/heads/next",
"path": "travis/travis-base.Dockerfile",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1377177"
},
{
"name": "C++",
"bytes": "27846"
},
{
"name": "Makefile",
"bytes": "22870"
},
{
"name": "Objective-C",
"bytes": "3018"
},
{
"name": "Perl",
"bytes": "777705"
},
{
"name": "Shell",
"bytes": "12418"
},
{
"name": "VimL",
"bytes": "519"
}
],
"symlink_target": ""
} |
package strict
import (
"io/ioutil"
"path/filepath"
"testing"
kubeadmapiv1 "k8s.io/kubernetes/cmd/kubeadm/app/apis/kubeadm/v1beta3"
"k8s.io/kubernetes/cmd/kubeadm/app/constants"
"k8s.io/apimachinery/pkg/runtime/schema"
kubeproxyconfigv1alpha1 "k8s.io/kube-proxy/config/v1alpha1"
kubeletconfigv1beta1 "k8s.io/kubelet/config/v1beta1"
)
func TestVerifyUnmarshalStrict(t *testing.T) {
const (
pathTestData = "testdata/"
)
var testFiles = []struct {
fileName string
kind string
groupVersion schema.GroupVersion
expectedError bool
}{
// tests with file errors
{
fileName: "invalid_duplicate_field_clustercfg.yaml",
kind: constants.InitConfigurationKind,
groupVersion: kubeadmapiv1.SchemeGroupVersion,
expectedError: true,
},
{
fileName: "invalid_duplicate_field_joincfg.yaml",
kind: constants.JoinConfigurationKind,
groupVersion: kubeadmapiv1.SchemeGroupVersion,
expectedError: true,
},
{
fileName: "invalid_duplicate_field_kubeletcfg.yaml",
kind: "KubeletConfiguration",
groupVersion: kubeletconfigv1beta1.SchemeGroupVersion,
expectedError: true,
},
{
fileName: "invalid_duplicate_field_kubeproxycfg.yaml",
kind: "KubeProxyConfiguration",
groupVersion: kubeproxyconfigv1alpha1.SchemeGroupVersion,
expectedError: true,
},
{
fileName: "invalid_unknown_field_clustercfg.yaml",
kind: constants.ClusterConfigurationKind,
groupVersion: kubeadmapiv1.SchemeGroupVersion,
expectedError: true,
},
{
fileName: "invalid_unknown_field_initcfg.yaml",
kind: constants.InitConfigurationKind,
groupVersion: kubeadmapiv1.SchemeGroupVersion,
expectedError: true,
},
{
fileName: "invalid_unknown_field_joincfg.yaml",
kind: constants.JoinConfigurationKind,
groupVersion: kubeadmapiv1.SchemeGroupVersion,
expectedError: true,
},
{
fileName: "invalid_unknown_field_kubeletcfg.yaml",
kind: "KubeletConfiguration",
groupVersion: kubeletconfigv1beta1.SchemeGroupVersion,
expectedError: true,
},
{
fileName: "invalid_unknown_field_kubeproxycfg.yaml",
kind: "KubeProxyConfiguration",
groupVersion: kubeproxyconfigv1alpha1.SchemeGroupVersion,
expectedError: true,
},
// test unknown groupVersion and kind
{
fileName: "valid_clustercfg.yaml",
kind: constants.ClusterConfigurationKind,
groupVersion: schema.GroupVersion{Group: "someGroup", Version: "v1"},
expectedError: true,
},
{
fileName: "valid_clustercfg.yaml",
kind: "SomeUnknownKind",
groupVersion: kubeadmapiv1.SchemeGroupVersion,
expectedError: true,
},
// valid tests
{
fileName: "valid_clustercfg.yaml",
kind: constants.ClusterConfigurationKind,
groupVersion: kubeadmapiv1.SchemeGroupVersion,
expectedError: false,
},
{
fileName: "valid_initcfg.yaml",
kind: constants.InitConfigurationKind,
groupVersion: kubeadmapiv1.SchemeGroupVersion,
expectedError: false,
},
{
fileName: "valid_joincfg.yaml",
kind: constants.JoinConfigurationKind,
groupVersion: kubeadmapiv1.SchemeGroupVersion,
expectedError: false,
},
{
fileName: "valid_kubeletcfg.yaml",
kind: "KubeletConfiguration",
groupVersion: kubeletconfigv1beta1.SchemeGroupVersion,
expectedError: false,
},
{
fileName: "valid_kubeproxycfg.yaml",
kind: "KubeProxyConfiguration",
groupVersion: kubeproxyconfigv1alpha1.SchemeGroupVersion,
expectedError: false,
},
}
for _, test := range testFiles {
t.Run(test.fileName, func(t *testing.T) {
bytes, err := ioutil.ReadFile(filepath.Join(pathTestData, test.fileName))
if err != nil {
t.Fatalf("couldn't read test data: %v", err)
}
gvk := schema.GroupVersionKind{
Group: test.groupVersion.Group,
Version: test.groupVersion.Version,
Kind: test.kind,
}
err = VerifyUnmarshalStrict(bytes, gvk)
if (err != nil) != test.expectedError {
t.Errorf("expected error %v, got %v, error: %v", err != nil, test.expectedError, err)
}
})
}
}
| {
"content_hash": "2376ee834667adeb9cc40d5131c5d4d8",
"timestamp": "",
"source": "github",
"line_count": 147,
"max_line_length": 89,
"avg_line_length": 28.877551020408163,
"alnum_prop": 0.6812720848056537,
"repo_name": "Acidburn0zzz/kubernetes",
"id": "3c5924602178fc1f08791d720d5872e056c3c33c",
"size": "4814",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "cmd/kubeadm/app/util/config/strict/strict_test.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "3902"
},
{
"name": "Dockerfile",
"bytes": "52294"
},
{
"name": "Go",
"bytes": "57932175"
},
{
"name": "HTML",
"bytes": "128"
},
{
"name": "Lua",
"bytes": "17200"
},
{
"name": "Makefile",
"bytes": "63177"
},
{
"name": "PowerShell",
"bytes": "149547"
},
{
"name": "Python",
"bytes": "23795"
},
{
"name": "Ruby",
"bytes": "448"
},
{
"name": "Shell",
"bytes": "1742939"
},
{
"name": "sed",
"bytes": "1262"
}
],
"symlink_target": ""
} |
'use strict';
// future work would be to query TSSstatus API to grab this info
// but then the app would hit the interwebs. Could make configurable
// this would allow us to easily determine latest ios version by device
// https://api.ineal.me/tss/status
module.exports = {
LATEST_IOS_VERSION: '12.4.1'
};
| {
"content_hash": "cece1ca98060f1a8abb4de3fcda5d8c9",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 71,
"avg_line_length": 31,
"alnum_prop": 0.7322580645161291,
"repo_name": "ahoog42/ios-triage",
"id": "057a6117741003b8f52da4f227e78b77af5bf8d0",
"size": "310",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ios-versions.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "15617"
},
{
"name": "JavaScript",
"bytes": "49029"
}
],
"symlink_target": ""
} |
var optional = require('./lib/optional')
, http = require('http')
, https = optional('https')
, tls = optional('tls')
, url = require('url')
, util = require('util')
, stream = require('stream')
, qs = require('qs')
, querystring = require('querystring')
, crypto = require('crypto')
, zlib = require('zlib')
, oauth = optional('oauth-sign')
, hawk = optional('hawk')
, aws = optional('aws-sign2')
, httpSignature = optional('http-signature')
, uuid = require('node-uuid')
, mime = require('mime-types')
, tunnel = optional('tunnel-agent')
, _safeStringify = require('json-stringify-safe')
, stringstream = optional('stringstream')
, ForeverAgent = require('forever-agent')
, FormData = optional('form-data')
, cookies = require('./lib/cookies')
, globalCookieJar = cookies.jar()
, copy = require('./lib/copy')
, debug = require('./lib/debug')
, getSafe = require('./lib/getSafe')
, net = require('net')
;
function safeStringify (obj) {
var ret
try { ret = JSON.stringify(obj) }
catch (e) { ret = _safeStringify(obj) }
return ret
}
var globalPool = {}
var isUrl = /^https?:|^unix:/
// Hacky fix for pre-0.4.4 https
if (https && !https.Agent) {
https.Agent = function (options) {
http.Agent.call(this, options)
}
util.inherits(https.Agent, http.Agent)
https.Agent.prototype._getConnection = function (host, port, cb) {
var s = tls.connect(port, host, this.options, function () {
// do other checks here?
if (cb) cb()
})
return s
}
}
function isReadStream (rs) {
return rs.readable && rs.path && rs.mode;
}
function toBase64 (str) {
return (new Buffer(str || "", "ascii")).toString("base64")
}
function md5 (str) {
return crypto.createHash('md5').update(str).digest('hex')
}
function Request (options) {
stream.Stream.call(this)
this.readable = true
this.writable = true
if (typeof options === 'string') {
options = {uri:options}
}
var reserved = Object.keys(Request.prototype)
for (var i in options) {
if (reserved.indexOf(i) === -1) {
this[i] = options[i]
} else {
if (typeof options[i] === 'function') {
delete options[i]
}
}
}
if (options.method) {
this.explicitMethod = true
}
this.canTunnel = options.tunnel !== false && tunnel;
this.init(options)
}
util.inherits(Request, stream.Stream)
Request.prototype.init = function (options) {
// init() contains all the code to setup the request object.
// the actual outgoing request is not started until start() is called
// this function is called from both the constructor and on redirect.
var self = this
if (!options) options = {}
if (!self.method) self.method = options.method || 'GET'
self.localAddress = options.localAddress
debug(options)
if (!self.pool && self.pool !== false) self.pool = globalPool
self.dests = self.dests || []
self.__isRequestRequest = true
// Protect against double callback
if (!self._callback && self.callback) {
self._callback = self.callback
self.callback = function () {
if (self._callbackCalled) return // Print a warning maybe?
self._callbackCalled = true
self._callback.apply(self, arguments)
}
self.on('error', self.callback.bind())
self.on('complete', self.callback.bind(self, null))
}
if (self.url && !self.uri) {
// People use this property instead all the time so why not just support it.
self.uri = self.url
delete self.url
}
if (!self.uri) {
// this will throw if unhandled but is handleable when in a redirect
return self.emit('error', new Error("options.uri is a required argument"))
} else {
if (typeof self.uri == "string") self.uri = url.parse(self.uri)
}
if (self.strictSSL === false) {
self.rejectUnauthorized = false
}
if(!self.hasOwnProperty('proxy')) {
// check for HTTP(S)_PROXY environment variables
if(self.uri.protocol == "http:") {
self.proxy = process.env.HTTP_PROXY || process.env.http_proxy || null;
} else if(self.uri.protocol == "https:") {
self.proxy = process.env.HTTPS_PROXY || process.env.https_proxy ||
process.env.HTTP_PROXY || process.env.http_proxy || null;
}
}
if (self.proxy) {
if (typeof self.proxy == 'string') self.proxy = url.parse(self.proxy)
// do the HTTP CONNECT dance using koichik/node-tunnel
if (http.globalAgent && self.uri.protocol === "https:" && self.canTunnel) {
var tunnelFn = self.proxy.protocol === "http:"
? tunnel.httpsOverHttp : tunnel.httpsOverHttps
var tunnelOptions = { proxy: { host: self.proxy.hostname
, port: +self.proxy.port
, proxyAuth: self.proxy.auth
, headers: { Host: self.uri.hostname + ':' +
(self.uri.port || self.uri.protocol === 'https:' ? 443 : 80) }}
, rejectUnauthorized: self.rejectUnauthorized
, ca: this.ca
, cert:this.cert
, key: this.key}
self.agent = tunnelFn(tunnelOptions)
self.tunnel = true
}
}
if (!self.uri.pathname) {self.uri.pathname = '/'}
if (!self.uri.host && !self.protocol=='unix:') {
// Invalid URI: it may generate lot of bad errors, like "TypeError: Cannot call method 'indexOf' of undefined" in CookieJar
// Detect and reject it as soon as possible
var faultyUri = url.format(self.uri)
var message = 'Invalid URI "' + faultyUri + '"'
if (Object.keys(options).length === 0) {
// No option ? This can be the sign of a redirect
// As this is a case where the user cannot do anything (they didn't call request directly with this URL)
// they should be warned that it can be caused by a redirection (can save some hair)
message += '. This can be caused by a crappy redirection.'
}
self.emit('error', new Error(message))
return // This error was fatal
}
self._redirectsFollowed = self._redirectsFollowed || 0
self.maxRedirects = (self.maxRedirects !== undefined) ? self.maxRedirects : 10
self.followRedirect = (self.followRedirect !== undefined) ? self.followRedirect : true
self.followAllRedirects = (self.followAllRedirects !== undefined) ? self.followAllRedirects : false
if (self.followRedirect || self.followAllRedirects)
self.redirects = self.redirects || []
self.headers = self.headers ? copy(self.headers) : {}
self.setHost = false
if (!self.hasHeader('host')) {
self.setHeader('host', self.uri.hostname)
if (self.uri.port) {
if ( !(self.uri.port === 80 && self.uri.protocol === 'http:') &&
!(self.uri.port === 443 && self.uri.protocol === 'https:') )
self.setHeader('host', self.getHeader('host') + (':'+self.uri.port) )
}
self.setHost = true
}
self.jar(self._jar || options.jar)
if (!self.uri.port) {
if (self.uri.protocol == 'http:') {self.uri.port = 80}
else if (self.uri.protocol == 'https:') {self.uri.port = 443}
}
if (self.proxy && !self.tunnel) {
self.port = self.proxy.port
self.host = self.proxy.hostname
} else {
self.port = self.uri.port
self.host = self.uri.hostname
}
self.clientErrorHandler = function (error) {
if (self._aborted) return
if (self.req && self.req._reusedSocket && error.code === 'ECONNRESET'
&& self.agent.addRequestNoreuse) {
self.agent = { addRequest: self.agent.addRequestNoreuse.bind(self.agent) }
self.start()
self.req.end()
return
}
if (self.timeout && self.timeoutTimer) {
clearTimeout(self.timeoutTimer)
self.timeoutTimer = null
}
self.emit('error', error)
}
self._parserErrorHandler = function (error) {
if (this.res) {
if (this.res.request) {
this.res.request.emit('error', error)
} else {
this.res.emit('error', error)
}
} else {
this._httpMessage.emit('error', error)
}
}
self._buildRequest = function(){
var self = this;
if (options.form) {
self.form(options.form)
}
if (options.qs) self.qs(options.qs)
if (self.uri.path) {
self.path = self.uri.path
} else {
self.path = self.uri.pathname + (self.uri.search || "")
}
if (self.path.length === 0) self.path = '/'
// Auth must happen last in case signing is dependent on other headers
if (options.oauth) {
self.oauth(options.oauth)
}
if (options.aws) {
self.aws(options.aws)
}
if (options.hawk) {
self.hawk(options.hawk)
}
if (options.httpSignature) {
self.httpSignature(options.httpSignature)
}
if (options.auth) {
if (Object.prototype.hasOwnProperty.call(options.auth, 'username')) options.auth.user = options.auth.username
if (Object.prototype.hasOwnProperty.call(options.auth, 'password')) options.auth.pass = options.auth.password
self.auth(
options.auth.user,
options.auth.pass,
options.auth.sendImmediately,
options.auth.bearer
)
}
if (self.gzip && !self.hasHeader('accept-encoding')) {
self.setHeader('accept-encoding', 'gzip')
}
if (self.uri.auth && !self.hasHeader('authorization')) {
var authPieces = self.uri.auth.split(':').map(function(item){ return querystring.unescape(item) })
self.auth(authPieces[0], authPieces.slice(1).join(':'), true)
}
if (self.proxy && self.proxy.auth && !self.hasHeader('proxy-authorization') && !self.tunnel) {
self.setHeader('proxy-authorization', "Basic " + toBase64(self.proxy.auth.split(':').map(function(item){ return querystring.unescape(item)}).join(':')))
}
if (self.proxy && !self.tunnel) self.path = (self.uri.protocol + '//' + self.uri.host + self.path)
if (options.json) {
self.json(options.json)
} else if (options.multipart) {
self.boundary = uuid()
self.multipart(options.multipart)
}
if (self.body) {
var length = 0
if (!Buffer.isBuffer(self.body)) {
if (Array.isArray(self.body)) {
for (var i = 0; i < self.body.length; i++) {
length += self.body[i].length
}
} else {
self.body = new Buffer(self.body)
length = self.body.length
}
} else {
length = self.body.length
}
if (length) {
if (!self.hasHeader('content-length')) self.setHeader('content-length', length)
} else {
throw new Error('Argument error, options.body.')
}
}
var protocol = self.proxy && !self.tunnel ? self.proxy.protocol : self.uri.protocol
, defaultModules = {'http:':http, 'https:':https, 'unix:':http}
, httpModules = self.httpModules || {}
;
self.httpModule = httpModules[protocol] || defaultModules[protocol]
if (!self.httpModule) return this.emit('error', new Error("Invalid protocol: " + protocol))
if (options.ca) self.ca = options.ca
if (!self.agent) {
if (options.agentOptions) self.agentOptions = options.agentOptions
if (options.agentClass) {
self.agentClass = options.agentClass
} else if (options.forever) {
self.agentClass = protocol === 'http:' ? ForeverAgent : ForeverAgent.SSL
} else {
self.agentClass = self.httpModule.Agent
}
}
if (self.pool === false) {
self.agent = false
} else {
self.agent = self.agent || self.getAgent()
if (self.maxSockets) {
// Don't use our pooling if node has the refactored client
self.agent.maxSockets = self.maxSockets
}
if (self.pool.maxSockets) {
// Don't use our pooling if node has the refactored client
self.agent.maxSockets = self.pool.maxSockets
}
}
self.on('pipe', function (src) {
if (self.ntick && self._started) throw new Error("You cannot pipe to this stream after the outbound request has started.")
self.src = src
if (isReadStream(src)) {
if (!self.hasHeader('content-type')) self.setHeader('content-type', mime.lookup(src.path))
} else {
if (src.headers) {
for (var i in src.headers) {
if (!self.hasHeader(i)) {
self.setHeader(i, src.headers[i])
}
}
}
if (self._json && !self.hasHeader('content-type'))
self.setHeader('content-type', 'application/json')
if (src.method && !self.explicitMethod) {
self.method = src.method
}
}
// self.on('pipe', function () {
// console.error("You have already piped to this stream. Pipeing twice is likely to break the request.")
// })
})
process.nextTick(function () {
if (self._aborted) return
var end = function () {
if (self._form) {
self._form.pipe(self)
}
if (self.body) {
if (Array.isArray(self.body)) {
self.body.forEach(function (part) {
self.write(part)
})
} else {
self.write(self.body)
}
self.end()
} else if (self.requestBodyStream) {
console.warn("options.requestBodyStream is deprecated, please pass the request object to stream.pipe.")
self.requestBodyStream.pipe(self)
} else if (!self.src) {
if (self.method !== 'GET' && typeof self.method !== 'undefined') {
self.setHeader('content-length', 0)
}
self.end()
}
}
if (self._form && !self.hasHeader('content-length')) {
// Before ending the request, we had to compute the length of the whole form, asyncly
self.setHeaders(self._form.getHeaders())
self._form.getLength(function (err, length) {
if (!err) {
self.setHeader('content-length', length)
}
end()
})
} else {
end()
}
self.ntick = true
})
} // End _buildRequest
self._handleUnixSocketURI = function(self){
// Parse URI and extract a socket path (tested as a valid socket using net.connect), and a http style path suffix
// Thus http requests can be made to a socket using the uri unix://tmp/my.socket/urlpath
// and a request for '/urlpath' will be sent to the unix socket at /tmp/my.socket
self.unixsocket = true;
var full_path = self.uri.href.replace(self.uri.protocol+'/', '');
var lookup = full_path.split('/');
var error_connecting = true;
var lookup_table = {};
do { lookup_table[lookup.join('/')]={} } while(lookup.pop())
for (r in lookup_table){
try_next(r);
}
function try_next(table_row){
var client = net.connect( table_row );
client.path = table_row
client.on('error', function(){ lookup_table[this.path].error_connecting=true; this.end(); });
client.on('connect', function(){ lookup_table[this.path].error_connecting=false; this.end(); });
table_row.client = client;
}
wait_for_socket_response();
response_counter = 0;
function wait_for_socket_response(){
var detach;
if('undefined' == typeof setImmediate ) detach = process.nextTick
else detach = setImmediate;
detach(function(){
// counter to prevent infinite blocking waiting for an open socket to be found.
response_counter++;
var trying = false;
for (r in lookup_table){
//console.log(r, lookup_table[r], lookup_table[r].error_connecting)
if('undefined' == typeof lookup_table[r].error_connecting)
trying = true;
}
if(trying && response_counter<1000)
wait_for_socket_response()
else
set_socket_properties();
})
}
function set_socket_properties(){
var host;
for (r in lookup_table){
if(lookup_table[r].error_connecting === false){
host = r
}
}
if(!host){
self.emit('error', new Error("Failed to connect to any socket in "+full_path))
}
var path = full_path.replace(host, '')
self.socketPath = host
self.uri.pathname = path
self.uri.href = path
self.uri.path = path
self.host = ''
self.hostname = ''
delete self.host
delete self.hostname
self._buildRequest();
}
}
// Intercept UNIX protocol requests to change properties to match socket
if(/^unix:/.test(self.uri.protocol)){
self._handleUnixSocketURI(self);
} else {
self._buildRequest();
}
}
// Must call this when following a redirect from https to http or vice versa
// Attempts to keep everything as identical as possible, but update the
// httpModule, Tunneling agent, and/or Forever Agent in use.
Request.prototype._updateProtocol = function () {
var self = this
var protocol = self.uri.protocol
if (protocol === 'https:') {
// previously was doing http, now doing https
// if it's https, then we might need to tunnel now.
if (self.proxy && self.canTunnel) {
self.tunnel = true
var tunnelFn = self.proxy.protocol === 'http:'
? tunnel.httpsOverHttp : tunnel.httpsOverHttps
var tunnelOptions = { proxy: { host: self.proxy.hostname
, port: +self.proxy.port
, proxyAuth: self.proxy.auth }
, rejectUnauthorized: self.rejectUnauthorized
, ca: self.ca }
self.agent = tunnelFn(tunnelOptions)
return
}
self.httpModule = https
switch (self.agentClass) {
case ForeverAgent:
self.agentClass = ForeverAgent.SSL
break
case http.Agent:
self.agentClass = https.Agent
break
default:
// nothing we can do. Just hope for the best.
return
}
// if there's an agent, we need to get a new one.
if (self.agent) self.agent = self.getAgent()
} else {
// previously was doing https, now doing http
// stop any tunneling.
if (self.tunnel) self.tunnel = false
self.httpModule = http
switch (self.agentClass) {
case ForeverAgent.SSL:
self.agentClass = ForeverAgent
break
case https.Agent:
self.agentClass = http.Agent
break
default:
// nothing we can do. just hope for the best
return
}
// if there's an agent, then get a new one.
if (self.agent) {
self.agent = null
self.agent = self.getAgent()
}
}
}
Request.prototype.getAgent = function () {
var Agent = this.agentClass
var options = {}
if (this.agentOptions) {
for (var i in this.agentOptions) {
options[i] = this.agentOptions[i]
}
}
if (this.ca) options.ca = this.ca
if (this.ciphers) options.ciphers = this.ciphers
if (this.secureProtocol) options.secureProtocol = this.secureProtocol
if (this.secureOptions) options.secureOptions = this.secureOptions
if (typeof this.rejectUnauthorized !== 'undefined') options.rejectUnauthorized = this.rejectUnauthorized
if (this.cert && this.key) {
options.key = this.key
options.cert = this.cert
}
var poolKey = ''
// different types of agents are in different pools
if (Agent !== this.httpModule.Agent) {
poolKey += Agent.name
}
if (!this.httpModule.globalAgent) {
// node 0.4.x
options.host = this.host
options.port = this.port
if (poolKey) poolKey += ':'
poolKey += this.host + ':' + this.port
}
// ca option is only relevant if proxy or destination are https
var proxy = this.proxy
if (typeof proxy === 'string') proxy = url.parse(proxy)
var isHttps = (proxy && proxy.protocol === 'https:') || this.uri.protocol === 'https:'
if (isHttps) {
if (options.ca) {
if (poolKey) poolKey += ':'
poolKey += options.ca
}
if (typeof options.rejectUnauthorized !== 'undefined') {
if (poolKey) poolKey += ':'
poolKey += options.rejectUnauthorized
}
if (options.cert)
poolKey += options.cert.toString('ascii') + options.key.toString('ascii')
if (options.ciphers) {
if (poolKey) poolKey += ':'
poolKey += options.ciphers
}
if (options.secureProtocol) {
if (poolKey) poolKey += ':'
poolKey += options.secureProtocol
}
if (options.secureOptions) {
if (poolKey) poolKey += ':'
poolKey += options.secureOptions
}
}
if (this.pool === globalPool && !poolKey && Object.keys(options).length === 0 && this.httpModule.globalAgent) {
// not doing anything special. Use the globalAgent
return this.httpModule.globalAgent
}
// we're using a stored agent. Make sure it's protocol-specific
poolKey = this.uri.protocol + poolKey
// already generated an agent for this setting
if (this.pool[poolKey]) return this.pool[poolKey]
return this.pool[poolKey] = new Agent(options)
}
Request.prototype.start = function () {
// start() is called once we are ready to send the outgoing HTTP request.
// this is usually called on the first write(), end() or on nextTick()
var self = this
if (self._aborted) return
self._started = true
self.method = self.method || 'GET'
self.href = self.uri.href
if (self.src && self.src.stat && self.src.stat.size && !self.hasHeader('content-length')) {
self.setHeader('content-length', self.src.stat.size)
}
if (self._aws) {
self.aws(self._aws, true)
}
// We have a method named auth, which is completely different from the http.request
// auth option. If we don't remove it, we're gonna have a bad time.
var reqOptions = copy(self)
delete reqOptions.auth
debug('make request', self.uri.href)
self.req = self.httpModule.request(reqOptions, self.onResponse.bind(self))
if (self.timeout && !self.timeoutTimer) {
self.timeoutTimer = setTimeout(function () {
self.req.abort()
var e = new Error("ETIMEDOUT")
e.code = "ETIMEDOUT"
self.emit("error", e)
}, self.timeout)
// Set additional timeout on socket - in case if remote
// server freeze after sending headers
if (self.req.setTimeout) { // only works on node 0.6+
self.req.setTimeout(self.timeout, function () {
if (self.req) {
self.req.abort()
var e = new Error("ESOCKETTIMEDOUT")
e.code = "ESOCKETTIMEDOUT"
self.emit("error", e)
}
})
}
}
self.req.on('error', self.clientErrorHandler)
self.req.on('drain', function() {
self.emit('drain')
})
self.on('end', function() {
if ( self.req.connection ) self.req.connection.removeListener('error', self._parserErrorHandler)
})
self.emit('request', self.req)
}
Request.prototype.onResponse = function (response) {
var self = this
debug('onResponse', self.uri.href, response.statusCode, response.headers)
response.on('end', function() {
debug('response end', self.uri.href, response.statusCode, response.headers)
});
// The check on response.connection is a workaround for browserify.
if (response.connection && response.connection.listeners('error').indexOf(self._parserErrorHandler) === -1) {
response.connection.setMaxListeners(0)
response.connection.once('error', self._parserErrorHandler)
}
if (self._aborted) {
debug('aborted', self.uri.href)
response.resume()
return
}
if (self._paused) response.pause()
// Check that response.resume is defined. Workaround for browserify.
else response.resume && response.resume()
self.response = response
response.request = self
response.toJSON = toJSON
// XXX This is different on 0.10, because SSL is strict by default
if (self.httpModule === https &&
self.strictSSL &&
!response.client.authorized) {
debug('strict ssl error', self.uri.href)
var sslErr = response.client.authorizationError
self.emit('error', new Error('SSL Error: '+ sslErr))
return
}
if (self.setHost && self.hasHeader('host')) delete self.headers[self.hasHeader('host')]
if (self.timeout && self.timeoutTimer) {
clearTimeout(self.timeoutTimer)
self.timeoutTimer = null
}
var targetCookieJar = (self._jar && self._jar.setCookie)?self._jar:globalCookieJar;
var addCookie = function (cookie) {
//set the cookie if it's domain in the href's domain.
try {
targetCookieJar.setCookie(cookie, self.uri.href, {ignoreError: true});
} catch (e) {
self.emit('error', e);
}
}
if (hasHeader('set-cookie', response.headers) && (!self._disableCookies)) {
var headerName = hasHeader('set-cookie', response.headers)
if (Array.isArray(response.headers[headerName])) response.headers[headerName].forEach(addCookie)
else addCookie(response.headers[headerName])
}
var redirectTo = null
if (response.statusCode >= 300 && response.statusCode < 400 && hasHeader('location', response.headers)) {
var location = response.headers[hasHeader('location', response.headers)]
debug('redirect', location)
if (self.followAllRedirects) {
redirectTo = location
} else if (self.followRedirect) {
switch (self.method) {
case 'PATCH':
case 'PUT':
case 'POST':
case 'DELETE':
// Do not follow redirects
break
default:
redirectTo = location
break
}
}
} else if (response.statusCode == 401 && self._hasAuth && !self._sentAuth) {
var authHeader = response.headers[hasHeader('www-authenticate', response.headers)]
var authVerb = authHeader && authHeader.split(' ')[0].toLowerCase()
debug('reauth', authVerb)
switch (authVerb) {
case 'basic':
self.auth(self._user, self._pass, true)
redirectTo = self.uri
break
case 'bearer':
self.auth(null, null, true, self._bearer)
redirectTo = self.uri
break
case 'digest':
// TODO: More complete implementation of RFC 2617.
// - check challenge.algorithm
// - support algorithm="MD5-sess"
// - handle challenge.domain
// - support qop="auth-int" only
// - handle Authentication-Info (not necessarily?)
// - check challenge.stale (not necessarily?)
// - increase nc (not necessarily?)
// For reference:
// http://tools.ietf.org/html/rfc2617#section-3
// https://github.com/bagder/curl/blob/master/lib/http_digest.c
var challenge = {}
var re = /([a-z0-9_-]+)=(?:"([^"]+)"|([a-z0-9_-]+))/gi
for (;;) {
var match = re.exec(authHeader)
if (!match) break
challenge[match[1]] = match[2] || match[3];
}
var ha1 = md5(self._user + ':' + challenge.realm + ':' + self._pass)
var ha2 = md5(self.method + ':' + self.uri.path)
var qop = /(^|,)\s*auth\s*($|,)/.test(challenge.qop) && 'auth'
var nc = qop && '00000001'
var cnonce = qop && uuid().replace(/-/g, '')
var digestResponse = qop ? md5(ha1 + ':' + challenge.nonce + ':' + nc + ':' + cnonce + ':' + qop + ':' + ha2) : md5(ha1 + ':' + challenge.nonce + ':' + ha2)
var authValues = {
username: self._user,
realm: challenge.realm,
nonce: challenge.nonce,
uri: self.uri.path,
qop: qop,
response: digestResponse,
nc: nc,
cnonce: cnonce,
algorithm: challenge.algorithm,
opaque: challenge.opaque
}
authHeader = []
for (var k in authValues) {
if (!authValues[k]) {
//ignore
} else if (k === 'qop' || k === 'nc' || k === 'algorithm') {
authHeader.push(k + '=' + authValues[k])
} else {
authHeader.push(k + '="' + authValues[k] + '"')
}
}
authHeader = 'Digest ' + authHeader.join(', ')
self.setHeader('authorization', authHeader)
self._sentAuth = true
redirectTo = self.uri
break
}
}
if (redirectTo) {
debug('redirect to', redirectTo)
// ignore any potential response body. it cannot possibly be useful
// to us at this point.
if (self._paused) response.resume()
if (self._redirectsFollowed >= self.maxRedirects) {
self.emit('error', new Error("Exceeded maxRedirects. Probably stuck in a redirect loop "+self.uri.href))
return
}
self._redirectsFollowed += 1
if (!isUrl.test(redirectTo)) {
redirectTo = url.resolve(self.uri.href, redirectTo)
}
var uriPrev = self.uri
self.uri = url.parse(redirectTo)
// handle the case where we change protocol from https to http or vice versa
if (self.uri.protocol !== uriPrev.protocol) {
self._updateProtocol()
}
self.redirects.push(
{ statusCode : response.statusCode
, redirectUri: redirectTo
}
)
if (self.followAllRedirects && response.statusCode != 401 && response.statusCode != 307) self.method = 'GET'
// self.method = 'GET' // Force all redirects to use GET || commented out fixes #215
delete self.src
delete self.req
delete self.agent
delete self._started
if (response.statusCode != 401 && response.statusCode != 307) {
// Remove parameters from the previous response, unless this is the second request
// for a server that requires digest authentication.
delete self.body
delete self._form
if (self.headers) {
if (self.hasHeader('host')) delete self.headers[self.hasHeader('host')]
if (self.hasHeader('content-type')) delete self.headers[self.hasHeader('content-type')]
if (self.hasHeader('content-length')) delete self.headers[self.hasHeader('content-length')]
}
}
self.emit('redirect');
self.init()
return // Ignore the rest of the response
} else {
self._redirectsFollowed = self._redirectsFollowed || 0
// Be a good stream and emit end when the response is finished.
// Hack to emit end on close because of a core bug that never fires end
response.on('close', function () {
if (!self._ended) self.response.emit('end')
})
var dataStream
if (self.gzip) {
var contentEncoding = response.headers["content-encoding"] || "identity"
contentEncoding = contentEncoding.trim().toLowerCase()
if (contentEncoding === "gzip") {
dataStream = zlib.createGunzip()
response.pipe(dataStream)
} else {
// Since previous versions didn't check for Content-Encoding header,
// ignore any invalid values to preserve backwards-compatibility
if (contentEncoding !== "identity") {
debug("ignoring unrecognized Content-Encoding " + contentEncoding)
}
dataStream = response
}
} else {
dataStream = response
}
if (self.encoding) {
if (self.dests.length !== 0) {
console.error("Ignoring encoding parameter as this stream is being piped to another stream which makes the encoding option invalid.")
} else if (dataStream.setEncoding) {
dataStream.setEncoding(self.encoding)
} else {
// Should only occur on node pre-v0.9.4 (joyent/node@9b5abe5) with
// zlib streams.
// If/When support for 0.9.4 is dropped, this should be unnecessary.
dataStream = dataStream.pipe(stringstream(self.encoding))
}
}
self.emit('response', response)
self.dests.forEach(function (dest) {
self.pipeDest(dest)
})
dataStream.on("data", function (chunk) {
self._destdata = true
self.emit("data", chunk)
})
dataStream.on("end", function (chunk) {
self._ended = true
self.emit("end", chunk)
})
dataStream.on("close", function () {self.emit("close")})
if (self.callback) {
var buffer = []
var bodyLen = 0
self.on("data", function (chunk) {
buffer.push(chunk)
bodyLen += chunk.length
})
self.on("end", function () {
debug('end event', self.uri.href)
if (self._aborted) {
debug('aborted', self.uri.href)
return
}
if (buffer.length && Buffer.isBuffer(buffer[0])) {
debug('has body', self.uri.href, bodyLen)
var body = new Buffer(bodyLen)
var i = 0
buffer.forEach(function (chunk) {
chunk.copy(body, i, 0, chunk.length)
i += chunk.length
})
if (self.encoding === null) {
response.body = body
} else {
response.body = body.toString(self.encoding)
}
} else if (buffer.length) {
// The UTF8 BOM [0xEF,0xBB,0xBF] is converted to [0xFE,0xFF] in the JS UTC16/UCS2 representation.
// Strip this value out when the encoding is set to 'utf8', as upstream consumers won't expect it and it breaks JSON.parse().
if (self.encoding === 'utf8' && buffer[0].length > 0 && buffer[0][0] === "\uFEFF") {
buffer[0] = buffer[0].substring(1)
}
response.body = buffer.join('')
}
if (self._json) {
try {
response.body = JSON.parse(response.body)
} catch (e) {}
}
debug('emitting complete', self.uri.href)
if(response.body == undefined && !self._json) {
response.body = "";
}
self.emit('complete', response, response.body)
})
}
//if no callback
else{
self.on("end", function () {
if (self._aborted) {
debug('aborted', self.uri.href)
return
}
self.emit('complete', response);
});
}
}
debug('finish init function', self.uri.href)
}
Request.prototype.abort = function () {
this._aborted = true
if (this.req) {
this.req.abort()
}
else if (this.response) {
this.response.abort()
}
this.emit("abort")
}
Request.prototype.pipeDest = function (dest) {
var response = this.response
// Called after the response is received
if (dest.headers && !dest.headersSent) {
if (hasHeader('content-type', response.headers)) {
var ctname = hasHeader('content-type', response.headers)
if (dest.setHeader) dest.setHeader(ctname, response.headers[ctname])
else dest.headers[ctname] = response.headers[ctname]
}
if (hasHeader('content-length', response.headers)) {
var clname = hasHeader('content-length', response.headers)
if (dest.setHeader) dest.setHeader(clname, response.headers[clname])
else dest.headers[clname] = response.headers[clname]
}
}
if (dest.setHeader && !dest.headersSent) {
for (var i in response.headers) {
// If the response content is being decoded, the Content-Encoding header
// of the response doesn't represent the piped content, so don't pass it.
if (!this.gzip || i !== 'content-encoding') {
dest.setHeader(i, response.headers[i])
}
}
dest.statusCode = response.statusCode
}
if (this.pipefilter) this.pipefilter(response, dest)
}
// Composable API
Request.prototype.setHeader = function (name, value, clobber) {
if (clobber === undefined) clobber = true
if (clobber || !this.hasHeader(name)) this.headers[name] = value
else this.headers[this.hasHeader(name)] += ',' + value
return this
}
Request.prototype.setHeaders = function (headers) {
for (var i in headers) {this.setHeader(i, headers[i])}
return this
}
Request.prototype.hasHeader = function (header, headers) {
var headers = Object.keys(headers || this.headers)
, lheaders = headers.map(function (h) {return h.toLowerCase()})
;
header = header.toLowerCase()
for (var i=0;i<lheaders.length;i++) {
if (lheaders[i] === header) return headers[i]
}
return false
}
var hasHeader = Request.prototype.hasHeader
Request.prototype.qs = function (q, clobber) {
var base
if (!clobber && this.uri.query) base = qs.parse(this.uri.query)
else base = {}
for (var i in q) {
base[i] = q[i]
}
if (qs.stringify(base) === ''){
return this
}
this.uri = url.parse(this.uri.href.split('?')[0] + '?' + qs.stringify(base))
this.url = this.uri
this.path = this.uri.path
return this
}
Request.prototype.form = function (form) {
if (form) {
this.setHeader('content-type', 'application/x-www-form-urlencoded; charset=utf-8')
this.body = (typeof form === 'string') ? form.toString('utf8') : qs.stringify(form).toString('utf8')
return this
}
// create form-data object
this._form = new FormData()
return this._form
}
Request.prototype.multipart = function (multipart) {
var self = this
self.body = []
if (!self.hasHeader('content-type')) {
self.setHeader('content-type', 'multipart/related; boundary=' + self.boundary)
} else {
var headerName = self.hasHeader('content-type');
self.setHeader(headerName, self.headers[headerName].split(';')[0] + '; boundary=' + self.boundary)
}
if (!multipart.forEach) throw new Error('Argument error, options.multipart.')
if (self.preambleCRLF) {
self.body.push(new Buffer('\r\n'))
}
multipart.forEach(function (part) {
var body = part.body
if(body == null) throw Error('Body attribute missing in multipart.')
delete part.body
var preamble = '--' + self.boundary + '\r\n'
Object.keys(part).forEach(function (key) {
preamble += key + ': ' + part[key] + '\r\n'
})
preamble += '\r\n'
self.body.push(new Buffer(preamble))
self.body.push(new Buffer(body))
self.body.push(new Buffer('\r\n'))
})
self.body.push(new Buffer('--' + self.boundary + '--'))
return self
}
Request.prototype.json = function (val) {
var self = this
if (!self.hasHeader('accept')) self.setHeader('accept', 'application/json')
this._json = true
if (typeof val === 'boolean') {
if (typeof this.body === 'object') {
this.body = safeStringify(this.body)
if (!self.hasHeader('content-type'))
self.setHeader('content-type', 'application/json')
}
} else {
this.body = safeStringify(val)
if (!self.hasHeader('content-type'))
self.setHeader('content-type', 'application/json')
}
return this
}
Request.prototype.getHeader = function (name, headers) {
var result, re, match
if (!headers) headers = this.headers
Object.keys(headers).forEach(function (key) {
if (key.length !== name.length) return
re = new RegExp(name, 'i')
match = key.match(re)
if (match) result = headers[key]
})
return result
}
var getHeader = Request.prototype.getHeader
Request.prototype.auth = function (user, pass, sendImmediately, bearer) {
if (bearer !== undefined) {
this._bearer = bearer
this._hasAuth = true
if (sendImmediately || typeof sendImmediately == 'undefined') {
if (typeof bearer === 'function') {
bearer = bearer()
}
this.setHeader('authorization', 'Bearer ' + bearer)
this._sentAuth = true
}
return this
}
if (typeof user !== 'string' || (pass !== undefined && typeof pass !== 'string')) {
throw new Error('auth() received invalid user or password')
}
this._user = user
this._pass = pass
this._hasAuth = true
var header = typeof pass !== 'undefined' ? user + ':' + pass : user
if (sendImmediately || typeof sendImmediately == 'undefined') {
this.setHeader('authorization', 'Basic ' + toBase64(header))
this._sentAuth = true
}
return this
}
Request.prototype.aws = function (opts, now) {
if (!now) {
this._aws = opts
return this
}
var date = new Date()
this.setHeader('date', date.toUTCString())
var auth =
{ key: opts.key
, secret: opts.secret
, verb: this.method.toUpperCase()
, date: date
, contentType: this.getHeader('content-type') || ''
, md5: this.getHeader('content-md5') || ''
, amazonHeaders: aws.canonicalizeHeaders(this.headers)
}
if (opts.bucket && this.path) {
auth.resource = '/' + opts.bucket + this.path
} else if (opts.bucket && !this.path) {
auth.resource = '/' + opts.bucket
} else if (!opts.bucket && this.path) {
auth.resource = this.path
} else if (!opts.bucket && !this.path) {
auth.resource = '/'
}
auth.resource = aws.canonicalizeResource(auth.resource)
this.setHeader('authorization', aws.authorization(auth))
return this
}
Request.prototype.httpSignature = function (opts) {
var req = this
httpSignature.signRequest({
getHeader: function(header) {
return getHeader(header, req.headers)
},
setHeader: function(header, value) {
req.setHeader(header, value)
},
method: this.method,
path: this.path
}, opts)
debug('httpSignature authorization', this.getHeader('authorization'))
return this
}
Request.prototype.hawk = function (opts) {
this.setHeader('Authorization', hawk.client.header(this.uri, thi | {
"content_hash": "f9af250ace58c4f161f9eb74a8b59206",
"timestamp": "",
"source": "github",
"line_count": 1299,
"max_line_length": 164,
"avg_line_length": 31.53194765204003,
"alnum_prop": 0.6080810546875,
"repo_name": "wenjoy/homePage",
"id": "dd082d573a4d8f60c36c1cc572a572bb40ce803d",
"size": "40960",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "node_modules/geetest/node_modules/request/node_modules/har-validator/node_modules/standard/node_modules/eslint-plugin-react/node_modules/coveralls/node_modules/request/request.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "330"
},
{
"name": "HTML",
"bytes": "4292"
},
{
"name": "JavaScript",
"bytes": "26625"
}
],
"symlink_target": ""
} |
from django.conf import settings as django_settings
from django.contrib.sites.models import Site
from django.template.defaultfilters import slugify
from django.test import RequestFactory, TestCase
from django.utils import translation
from feincms.extensions.translations import (
translation_set_language,
user_has_language_set,
)
from feincms.module.page.models import Page
class TranslationTestCase(TestCase):
def setUp(self):
Page.register_templates(
{
"key": "base",
"title": "Standard template",
"path": "feincms_base.html",
"regions": (
("main", "Main content area"),
("sidebar", "Sidebar", "inherited"),
),
}
)
self.site_1 = Site.objects.all()[0]
# create a bunch of pages
en = self.create_default_page_set(language="en")
de = self.create_default_page_set(language="de", title="Testseite")
de.translation_of = en
de.save()
de.parent.translation_of = en.parent
de.parent.save()
self.page_de = de.parent
self.page_en = en.parent
if hasattr(translation, "LANGUAGE_SESSION_KEY"):
self.language_session_key = translation.LANGUAGE_SESSION_KEY
else:
# Django 1.6
self.language_session_key = django_settings.LANGUAGE_COOKIE_NAME
def create_page(self, title="Test page", parent=None, **kwargs):
defaults = {
"template_key": "base",
"site": self.site_1,
"in_navigation": False,
"active": False,
}
defaults.update(kwargs)
return Page.objects.create(
title=title,
slug=kwargs.get("slug", slugify(title)),
parent=parent,
**defaults
)
def create_default_page_set(self, **kwargs):
return self.create_page("Test child page", parent=self.create_page(**kwargs))
def testPage(self):
page = Page()
self.assertTrue(hasattr(page, "language"))
self.assertTrue(hasattr(page, "translation_of"))
self.assertEqual(self.page_de.translation_of, self.page_en)
self.assertEqual(self.page_de.original_translation, self.page_en)
# TODO: add request tests
# with translation.override('de'):
def test_user_has_language_set_with_session(self):
factory = RequestFactory()
request = factory.get(self.page_en.get_navigation_url())
setattr(request, "session", dict())
request.session[self.language_session_key] = "en"
self.assertEqual(user_has_language_set(request), True)
def test_user_has_language_set_with_cookie(self):
factory = RequestFactory()
request = factory.get(self.page_en.get_navigation_url())
request.COOKIES[django_settings.LANGUAGE_COOKIE_NAME] = "en"
self.assertEqual(user_has_language_set(request), True)
def test_translation_set_language_to_session(self):
factory = RequestFactory()
request = factory.get(self.page_de.get_navigation_url())
setattr(request, "session", dict())
translation_set_language(request, "de")
self.assertEqual(request.LANGUAGE_CODE, "de")
self.assertEqual(request.session[self.language_session_key], "de")
def test_translation_set_language_to_session_primary(self):
factory = RequestFactory()
request = factory.get(self.page_en.get_navigation_url())
setattr(request, "session", dict())
translation_set_language(request, "en")
self.assertEqual(request.LANGUAGE_CODE, "en")
# We avoid setting the translation language to the primary language, so should not be set
self.assertEqual(
request.session.get(self.language_session_key, "unset"), "unset"
)
def test_translation_set_language_to_cookie(self):
factory = RequestFactory()
request = factory.get(self.page_en.get_navigation_url())
response = translation_set_language(request, "en")
self.assertEqual(request.LANGUAGE_CODE, "en")
c_key = django_settings.LANGUAGE_COOKIE_NAME
self.assertEqual(response.cookies[c_key].value, "en")
| {
"content_hash": "ca6da1a000e5d5ebc751478eaff25bf9",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 97,
"avg_line_length": 36.956896551724135,
"alnum_prop": 0.6207137858642408,
"repo_name": "feincms/feincms",
"id": "803c18221e357c231141de2e2ccdb8c884eed0de",
"size": "4287",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "tests/testapp/tests/test_extensions.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "6217"
},
{
"name": "HTML",
"bytes": "30915"
},
{
"name": "JavaScript",
"bytes": "35874"
},
{
"name": "Python",
"bytes": "427172"
}
],
"symlink_target": ""
} |
<?php namespace App\Http\Requests\Admin;
use App\Http\Requests\Request;
class CreateArticleRequest extends Request {
/**
* Determine if the user is authorized to make this request.
*
* @return bool
*/
public function authorize()
{
return true;
}
/**
* Get the validation rules that apply to the request.
*
* @return array
*/
public function rules()
{
return [
'title'=>'required'
];
}
}
| {
"content_hash": "55a071d34e6e22f432ee8d409caea538",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 61,
"avg_line_length": 14.655172413793103,
"alnum_prop": 0.6517647058823529,
"repo_name": "fishercom/LayqaCMS",
"id": "cdfedae0370fc2815e6b7e7ae033951672eaff66",
"size": "425",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/Http/Requests/Admin/CreateArticleRequest.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "97871"
},
{
"name": "HTML",
"bytes": "2074886"
},
{
"name": "Java",
"bytes": "305368"
},
{
"name": "JavaScript",
"bytes": "2376227"
},
{
"name": "PHP",
"bytes": "1513019"
},
{
"name": "Vue",
"bytes": "561"
}
],
"symlink_target": ""
} |
<?php
/** Zend_Dojo_Form_Element_Dijit */
require_once 'Zend/Dojo/Form/Element/Dijit.php';
/**
* TextBox dijit
*
* @category Zend
* @package Zend_Dojo
* @subpackage Form_Element
* @copyright Copyright (c) 2005-2009 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @version $Id: TextBox.php 16204 2009-06-21 18:58:29Z thomas $
*/
class Zend_Dojo_Form_Element_TextBox extends Zend_Dojo_Form_Element_Dijit
{
/**
* Use TextBox dijit view helper
* @var string
*/
public $helper = 'TextBox';
/**
* Set lowercase flag
*
* @param bool $lowercase
* @return Zend_Dojo_Form_Element_TextBox
*/
public function setLowercase($flag)
{
$this->setDijitParam('lowercase', (bool) $flag);
return $this;
}
/**
* Retrieve lowercase flag
*
* @return bool
*/
public function getLowercase()
{
if (!$this->hasDijitParam('lowercase')) {
return false;
}
return $this->getDijitParam('lowercase');
}
/**
* Set propercase flag
*
* @param bool $propercase
* @return Zend_Dojo_Form_Element_TextBox
*/
public function setPropercase($flag)
{
$this->setDijitParam('propercase', (bool) $flag);
return $this;
}
/**
* Retrieve propercase flag
*
* @return bool
*/
public function getPropercase()
{
if (!$this->hasDijitParam('propercase')) {
return false;
}
return $this->getDijitParam('propercase');
}
/**
* Set uppercase flag
*
* @param bool $uppercase
* @return Zend_Dojo_Form_Element_TextBox
*/
public function setUppercase($flag)
{
$this->setDijitParam('uppercase', (bool) $flag);
return $this;
}
/**
* Retrieve uppercase flag
*
* @return bool
*/
public function getUppercase()
{
if (!$this->hasDijitParam('uppercase')) {
return false;
}
return $this->getDijitParam('uppercase');
}
/**
* Set trim flag
*
* @param bool $trim
* @return Zend_Dojo_Form_Element_TextBox
*/
public function setTrim($flag)
{
$this->setDijitParam('trim', (bool) $flag);
return $this;
}
/**
* Retrieve trim flag
*
* @return bool
*/
public function getTrim()
{
if (!$this->hasDijitParam('trim')) {
return false;
}
return $this->getDijitParam('trim');
}
/**
* Set maxLength
*
* @param int $length
* @return Zend_Dojo_Form_Element_TextBox
*/
public function setMaxLength($length)
{
$this->setDijitParam('maxLength', (int) $length);
return $this;
}
/**
* Retrieve maxLength
*
* @return int|null
*/
public function getMaxLength()
{
return $this->getDijitParam('maxLength');
}
}
| {
"content_hash": "e4fb0e5cfa05c73b3c97fedded6ea338",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 87,
"avg_line_length": 21,
"alnum_prop": 0.542726679712981,
"repo_name": "vrunoa/examen-fadgut",
"id": "b3a907780292ae0ce2e7b0b10529100e40ac13a7",
"size": "3764",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "solucion/library/Zend/Dojo/Form/Element/TextBox.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "12033871"
},
{
"name": "Ruby",
"bytes": "94"
}
],
"symlink_target": ""
} |
package com.iciql.test.models;
import com.iciql.Iciql.IQTable;
import java.util.Arrays;
import java.util.List;
/**
* This class inherits all its fields from a parent class which has annotated
* columns. The IQTable annotation of the parent class is ignored and only the
* IQTable annotation of this class matters. However, this table inherits
* IQColumns from its super class.
*/
@IQTable(inheritColumns = true, annotationsOnly = false)
public class ProductInheritedAnnotation extends ProductMixedAnnotation {
public ProductInheritedAnnotation() {
// public constructor
}
private ProductInheritedAnnotation(int productId, String productName, String category, double unitPrice,
int unitsInStock, String mappedField) {
super(productId, productName, category, unitPrice, unitsInStock, mappedField);
}
private static ProductInheritedAnnotation create(int productId, String productName, String category,
double unitPrice, int unitsInStock, String mappedField) {
return new ProductInheritedAnnotation(productId, productName, category, unitPrice, unitsInStock,
mappedField);
}
public static List<ProductInheritedAnnotation> getData() {
String mappedField = "mapped";
ProductInheritedAnnotation[] list = {create(1, "Chai", "Beverages", 18, 39, mappedField),
create(2, "Chang", "Beverages", 19.0, 17, mappedField),
create(3, "Aniseed Syrup", "Condiments", 10.0, 13, mappedField),
create(4, "Chef Anton's Cajun Seasoning", "Condiments", 22.0, 53, mappedField),
create(5, "Chef Anton's Gumbo Mix", "Condiments", 21.3500, 0, mappedField),
create(6, "Grandma's Boysenberry Spread", "Condiments", 25.0, 120, mappedField),
create(7, "Uncle Bob's Organic Dried Pears", "Produce", 30.0, 15, mappedField),
create(8, "Northwoods Cranberry Sauce", "Condiments", 40.0, 6, mappedField),
create(9, "Mishi Kobe Niku", "Meat/Poultry", 97.0, 29, mappedField),
create(10, "Ikura", "Seafood", 31.0, 31, mappedField),};
return Arrays.asList(list);
}
}
| {
"content_hash": "8c1dc936aecc3071b45ca055ccc32d07",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 110,
"avg_line_length": 46.326530612244895,
"alnum_prop": 0.6524229074889868,
"repo_name": "gitblit/iciql",
"id": "c41a3f0f2245ce90983d4a5467a71844f6ead49f",
"size": "2898",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test/java/com/iciql/test/models/ProductInheritedAnnotation.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "3048"
},
{
"name": "Java",
"bytes": "754077"
}
],
"symlink_target": ""
} |
package com.hp.autonomy.frontend.find.idol.configuration;
import com.autonomy.aci.client.annotations.IdolAnnotationsProcessorFactory;
import com.autonomy.aci.client.services.AciService;
import com.hp.autonomy.frontend.configuration.ValidationResult;
import com.hp.autonomy.frontend.configuration.Validator;
import com.hp.autonomy.searchcomponents.idol.statsserver.Statistic;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import java.util.Set;
@Component
public class StatsServerConfigValidator implements Validator<StatsServerConfig> {
private final AciService aciService;
private final IdolAnnotationsProcessorFactory processorFactory;
@Resource(name = "requiredStatistics")
private Set<Statistic> requiredStatistics;
@Autowired
public StatsServerConfigValidator(final AciService aciService, final IdolAnnotationsProcessorFactory processorFactory) {
this.aciService = aciService;
this.processorFactory = processorFactory;
}
@Override
public ValidationResult<?> validate(final StatsServerConfig config) {
return config.validate(aciService, requiredStatistics, processorFactory);
}
@Override
public Class<StatsServerConfig> getSupportedClass() {
return StatsServerConfig.class;
}
}
| {
"content_hash": "8c8cf6b8aaf5d76951c36c904032bc21",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 125,
"avg_line_length": 33.58536585365854,
"alnum_prop": 0.7973856209150327,
"repo_name": "ExperisIT-rav/FindExperisIT",
"id": "afc74006c2963ce3becfbd5ac0dc76ceb48f06c3",
"size": "1564",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "idol/src/main/java/com/hp/autonomy/frontend/find/idol/configuration/StatsServerConfigValidator.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "250209"
},
{
"name": "CoffeeScript",
"bytes": "5997"
},
{
"name": "HTML",
"bytes": "53144"
},
{
"name": "Java",
"bytes": "565369"
},
{
"name": "JavaScript",
"bytes": "815020"
},
{
"name": "Ruby",
"bytes": "206"
},
{
"name": "Shell",
"bytes": "4427"
}
],
"symlink_target": ""
} |
<wicket:extend xmlns:wicket="http://www.w3.org/1999/xhtml">
<div wicket:id="filters">
<div wicket:id="container" class="input-container"></div>
</div>
</wicket:extend> | {
"content_hash": "ad22a0ad7789327cf231a1532f140845",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 65,
"avg_line_length": 36.6,
"alnum_prop": 0.6502732240437158,
"repo_name": "WeaxMe/Orienteer",
"id": "0b0232524860f1970adc68a415f4e8c53b520338",
"size": "183",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "orienteer-core/src/main/java/org/orienteer/core/component/property/filter/CollectionFilterPanel.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "11931"
},
{
"name": "Dockerfile",
"bytes": "1136"
},
{
"name": "HTML",
"bytes": "53524"
},
{
"name": "Java",
"bytes": "2233417"
},
{
"name": "JavaScript",
"bytes": "216403"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<!--
Template Name: Metronic - Responsive Admin Dashboard Template build with Twitter Bootstrap 3.3.2
Version: 3.2.0
Author: KeenThemes
Website: http://www.keenthemes.com/
Contact: [email protected]
Follow: www.twitter.com/keenthemes
Like: www.facebook.com/keenthemes
Purchase: http://themeforest.net/item/metronic-responsive-admin-dashboard-template/4021469?ref=keenthemes
License: You must have a valid license purchased only from themeforest(the above link) in order to legally use the theme for your project.
-->
<!--[if IE 8]> <html lang="en" class="ie8 no-js"> <![endif]-->
<!--[if IE 9]> <html lang="en" class="ie9 no-js"> <![endif]-->
<!--[if !IE]><!-->
<html lang="en">
<!--<![endif]-->
<!-- BEGIN HEAD -->
<head>
<meta charset="utf-8"/>
<title>Metronic | New User Profile</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta content="width=device-width, initial-scale=1.0" name="viewport"/>
<meta http-equiv="Content-type" content="text/html; charset=utf-8">
<meta content="" name="description"/>
<meta content="" name="author"/>
<!-- BEGIN GLOBAL MANDATORY STYLES -->
<link href="http://fonts.googleapis.com/css?family=Open+Sans:400,300,600,700&subset=all" rel="stylesheet" type="text/css">
<link href="/assets/global/plugins/font-awesome/css/font-awesome.min.css" rel="stylesheet" type="text/css">
<link href="/assets/global/plugins/simple-line-icons/simple-line-icons.min.css" rel="stylesheet" type="text/css">
<link href="/assets/global/plugins/bootstrap/css/bootstrap.min.css" rel="stylesheet" type="text/css">
<link href="public/css/uniform.default.css" rel="stylesheet" type="text/css">
<!-- END GLOBAL MANDATORY STYLES -->
<!-- BEGIN PAGE LEVEL STYLES -->
<link href="/assets/global/plugins/bootstrap-fileinput/bootstrap-fileinput.css" rel="stylesheet" type="text/css"/>
<link href="/assets/admin/pages/css/profile.css" rel="stylesheet" type="text/css"/>
<link href="/assets/admin/pages/css/tasks.css" rel="stylesheet" type="text/css"/>
<!-- END PAGE LEVEL STYLES -->
<!-- BEGIN THEME STYLES -->
<link href="/assets/global/css/components-rounded.css" id="style_components" rel="stylesheet" type="text/css">
<link href="/assets/global/css/plugins.css" rel="stylesheet" type="text/css">
<link href="/assets/admin/layout3/css/layout.css" rel="stylesheet" type="text/css">
<link href="/assets/admin/layout3/css/themes/default.css" rel="stylesheet" type="text/css" id="style_color">
<link href="assets/admin/layout3/css/custom.css" rel="stylesheet" type="text/css">
<!-- END THEME STYLES -->
<link rel="shortcut icon" href="favicon.ico"/>
</head>
<!-- END HEAD --> | {
"content_hash": "c11b817a279d571e864d64f6733d902b",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 138,
"avg_line_length": 54.4375,
"alnum_prop": 0.7183314198239571,
"repo_name": "Tanginatushar/lab_test",
"id": "a942b65efa13fbbbf1661560986c1bb2bc384542",
"size": "2614",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "resources/views/partials-newuserprofile/header.blade.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "1160"
},
{
"name": "ApacheConf",
"bytes": "2220"
},
{
"name": "CSS",
"bytes": "10962908"
},
{
"name": "CoffeeScript",
"bytes": "337000"
},
{
"name": "Go",
"bytes": "28304"
},
{
"name": "HTML",
"bytes": "24904953"
},
{
"name": "JavaScript",
"bytes": "23674182"
},
{
"name": "PHP",
"bytes": "2276574"
},
{
"name": "Python",
"bytes": "23380"
},
{
"name": "Shell",
"bytes": "2664"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<resources xmlns:android="http://schemas.android.com/apk/res/android">
<string name="cancel">"Ακύρωση"</string>
<string name="http_post_warning">"Στη σελίδα που αναζητάτε χρησιμοποποιήθηκαν πληροφορίες που καταχωρίσατε. Αν επιστρέψετε σε αυτή τη σελίδα ίσως επαναληφθούν ενέργειες που εκτελέσατε. Θέλετε να συνεχίσετε;"</string>
<string name="http_post_warning_resend">"Συνέχεια"</string>
<string name="firstrun_signed_in_title">"Έχετε συνδεθεί πλέον στο Chrome."</string>
<string name="firstrun_signed_in_description">"Οι ανοιχτές καρτέλες, οι σελιδοδείκτες, το ιστορικό, καθώς και άλλα δεδομένα συγχρονίζονται με το Λογαριασμό σας Google."</string>
<string name="sad_tab_title">"Όπα! Κάτι πήγε στραβά!"</string>
<string name="sad_tab_message">"Παρουσιάστηκε πρόβλημα κατά την εμφάνιση αυτής της ιστοσελίδας. Για να συνεχίσετε, επιλέξτε \"Επαναφόρτωση\" ή μεταβείτε σε άλλη σελίδα."</string>
<string name="sad_tab_help_message">"Αν αυτό το μήνυμα εμφανίζεται συχνά, μπορείτε να βρείτε βοήθεια εδώ $1."</string>
<string name="sad_tab_help_link">"προτάσεις"</string>
<string name="sad_tab_reload_label">"Επαναφόρτωση"</string>
<string name="options_startup_pages_placeholder">"Εισαγωγή διεύθυνσης URL:"</string>
</resources>
| {
"content_hash": "37efe36e556890d98e863ef97a73c99f",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 216,
"avg_line_length": 90.21428571428571,
"alnum_prop": 0.7711797307996833,
"repo_name": "kevin-smets/android-chromium",
"id": "ec7cdbe0bb2db9f4abf1cf515d3bd8bc8f2968a9",
"size": "1765",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "chromium/chrome/src/main/res/values-el/generated_resources.xml",
"mode": "33188",
"license": "bsd-2-clause",
"language": [],
"symlink_target": ""
} |
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=US-ASCII">
<title>posix::stream_descriptor_service::construct</title>
<link rel="stylesheet" href="../../../../../doc/src/boostbook.css" type="text/css">
<meta name="generator" content="DocBook XSL Stylesheets V1.76.1">
<link rel="home" href="../../../boost_asio.html" title="Boost.Asio">
<link rel="up" href="../posix__stream_descriptor_service.html" title="posix::stream_descriptor_service">
<link rel="prev" href="close.html" title="posix::stream_descriptor_service::close">
<link rel="next" href="destroy.html" title="posix::stream_descriptor_service::destroy">
</head>
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
<table cellpadding="2" width="100%"><tr>
<td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../../boost.png"></td>
<td align="center"><a href="../../../../../index.html">Home</a></td>
<td align="center"><a href="../../../../../libs/libraries.htm">Libraries</a></td>
<td align="center"><a href="http://www.boost.org/users/people.html">People</a></td>
<td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td>
<td align="center"><a href="../../../../../more/index.htm">More</a></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="close.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../posix__stream_descriptor_service.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../boost_asio.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="destroy.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
<div class="section">
<div class="titlepage"><div><div><h4 class="title">
<a name="boost_asio.reference.posix__stream_descriptor_service.construct"></a><a class="link" href="construct.html" title="posix::stream_descriptor_service::construct">posix::stream_descriptor_service::construct</a>
</h4></div></div></div>
<p>
<a class="indexterm" name="idp90260880"></a>
Construct a new stream descriptor
implementation.
</p>
<pre class="programlisting"><span class="keyword">void</span> <span class="identifier">construct</span><span class="special">(</span>
<span class="identifier">implementation_type</span> <span class="special">&</span> <span class="identifier">impl</span><span class="special">);</span>
</pre>
</div>
<table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr>
<td align="left"></td>
<td align="right"><div class="copyright-footer">Copyright © 2003-2015 Christopher M.
Kohlhoff<p>
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>)
</p>
</div></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="close.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../posix__stream_descriptor_service.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../boost_asio.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="destroy.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
</body>
</html>
| {
"content_hash": "0f93ee0589c750754c789a5416f0e7d7",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 442,
"avg_line_length": 67.28846153846153,
"alnum_prop": 0.6396113175192912,
"repo_name": "nicecapj/crossplatfromMmorpgServer",
"id": "3640249b9e0ab288eb0e7de84aaa97c8a900db48",
"size": "3499",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "ThirdParty/boost_1_61_0/libs/asio/doc/html/boost_asio/reference/posix__stream_descriptor_service/construct.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "223360"
},
{
"name": "Batchfile",
"bytes": "33694"
},
{
"name": "C",
"bytes": "3967798"
},
{
"name": "C#",
"bytes": "2093216"
},
{
"name": "C++",
"bytes": "197077824"
},
{
"name": "CMake",
"bytes": "203207"
},
{
"name": "CSS",
"bytes": "427824"
},
{
"name": "CWeb",
"bytes": "174166"
},
{
"name": "Cuda",
"bytes": "52444"
},
{
"name": "DIGITAL Command Language",
"bytes": "6246"
},
{
"name": "Emacs Lisp",
"bytes": "7822"
},
{
"name": "Fortran",
"bytes": "1856"
},
{
"name": "Go",
"bytes": "8549"
},
{
"name": "HTML",
"bytes": "234971359"
},
{
"name": "IDL",
"bytes": "14"
},
{
"name": "Java",
"bytes": "3809828"
},
{
"name": "JavaScript",
"bytes": "1112586"
},
{
"name": "Lex",
"bytes": "1231"
},
{
"name": "M4",
"bytes": "135271"
},
{
"name": "Makefile",
"bytes": "1266088"
},
{
"name": "Max",
"bytes": "36857"
},
{
"name": "Objective-C",
"bytes": "2928243"
},
{
"name": "Objective-C++",
"bytes": "3527"
},
{
"name": "PHP",
"bytes": "59372"
},
{
"name": "Perl",
"bytes": "38649"
},
{
"name": "Perl6",
"bytes": "2053"
},
{
"name": "Protocol Buffer",
"bytes": "1576976"
},
{
"name": "Python",
"bytes": "3257634"
},
{
"name": "QML",
"bytes": "593"
},
{
"name": "QMake",
"bytes": "16692"
},
{
"name": "Rebol",
"bytes": "354"
},
{
"name": "Roff",
"bytes": "5189"
},
{
"name": "Ruby",
"bytes": "97584"
},
{
"name": "Shell",
"bytes": "787152"
},
{
"name": "Swift",
"bytes": "20519"
},
{
"name": "Tcl",
"bytes": "1172"
},
{
"name": "TeX",
"bytes": "32117"
},
{
"name": "Vim script",
"bytes": "3759"
},
{
"name": "XSLT",
"bytes": "552736"
},
{
"name": "Yacc",
"bytes": "19623"
}
],
"symlink_target": ""
} |
/**
* @author Lianna Eeftinck / https://github.com/Leeft
*/
/*
* Helper classes to generate a texture map for the text labels
*
* Based on: http://www.blackpawn.com/texts/lightmaps/default.html
*/
SCMAP.Knapsack = function ( canvas ) {
this.canvas = canvas;
this.rootNode = new SCMAP.Knapsack.Node( this );
this.rootNode.rectangle = new SCMAP.Knapsack.Rectangle( 0, 0, canvas.width - 1, canvas.height - 1 );
};
SCMAP.Knapsack.prototype = {
constructor: SCMAP.Knapsack,
insert: function insert( image ) {
var node = this.rootNode.insert( image );
if ( node !== null ) {
node.claim();
//var context = this.canvas.getContext('2d');
//context.lineWidth = 2.0;
//context.strokeStyle = 'rgba(0,0,255,1)';
//context.strokeRect( node.rectangle.left + 0.5, node.rectangle.top + 0.5,
// node.rectangle.width() - 1, node.rectangle.height() - 1 );
}
return node;
}
};
SCMAP.Knapsack.Rectangle = function ( left, top, right, bottom ) {
this.left = ( typeof left === 'number' ) ? Math.floor( left ) : 0;
this.top = ( typeof top === 'number' ) ? Math.floor( top ) : 0;
this.right = ( typeof right === 'number' ) ? Math.floor( right ) : 0;
this.bottom = ( typeof bottom === 'number' ) ? Math.floor( bottom ) : 0;
};
SCMAP.Knapsack.Rectangle.prototype = {
constructor: SCMAP.Knapsack.Node,
Xcentre: function Xcentre() {
return Math.floor( ( ( this.right - this.left ) / 2 ) + this.left ) - 0.5;
},
Ycentre: function Ycentre() {
return Math.floor( ( ( this.bottom - this.top ) / 2 ) + this.top ) - 0.5;
},
width: function width() {
return( this.right - this.left );
},
height: function height() {
return( this.bottom - this.top );
}
};
SCMAP.Knapsack.Node = function ( knapsack ) {
this.knapsack = knapsack;
this.canvas = knapsack.canvas;
this.leftChild = null;
this.rightChild = null;
this.rectangle = null;
this.imageID = null;
this.texture = null;
this.generateUUID = function generateUUID() {
return THREE.Math.generateUUID();
};
};
SCMAP.Knapsack.Node.prototype = {
constructor: SCMAP.Knapsack.Node,
claim: function claim( image ) {
this.imageID = this.generateUUID();
},
release: function release() {
if ( this.leftChild || this.rightChild ) {
throw new Error( "Can't release tree nodes" );
}
this.imageID = null;
this.clear();
return;
},
clear: function clear() {
var ctx = this.canvas.getContext('2d');
ctx.clearRect( this.rectangle.left, this.rectangle.top, this.rectangle.width() - 1, this.rectangle.height() - 1 );
},
clipContext: function clipContext() {
var ctx = this.canvas.getContext('2d');
ctx.save();
ctx.beginPath();
ctx.rect( this.rectangle.left + 1, this.rectangle.top + 1, this.rectangle.width() - 2, this.rectangle.height() - 2 );
ctx.clip();
ctx.translate( this.rectangle.Xcentre(), this.rectangle.Ycentre() );
return ctx;
},
restoreContext: function restoreContext() {
var ctx = this.canvas.getContext('2d');
ctx.restore();
},
setUV: function setUV() {
var uvExtremes = this.uvCoordinates();
this.texture.offset.x = uvExtremes[ 0 ];
this.texture.offset.y = uvExtremes[ 1 ];
this.texture.repeat.x = ( uvExtremes[ 2 ] - uvExtremes[ 0 ] );
this.texture.repeat.y = ( uvExtremes[ 3 ] - uvExtremes[ 1 ] );
},
uvCoordinates: function uvCoordinates() {
return [
this.rectangle.left / this.canvas.width,
1 - ( this.rectangle.bottom / this.canvas.height ),
this.rectangle.right / this.canvas.width,
1 - ( this.rectangle.top / this.canvas.height ),
];
},
insert: function insert( image ) {
// if we're not a leaf then
if ( this.leftChild || this.rightChild )
{
// (try inserting into first child)
var newNode = this.leftChild.insert( image );
if ( newNode instanceof SCMAP.Knapsack.Node ) {
return newNode;
}
// (no room, insert into second)
return this.rightChild.insert( image );
}
else
{
// (if there's already an image here, return)
if ( this.imageID ) {
return null;
}
// (if we're too small, return)
if ( ( image.width > this.rectangle.width() ) || ( image.height > this.rectangle.height() ) ) {
return null;
}
// (if we're just right, accept)
if ( image.width === this.rectangle.width() && image.height === this.rectangle.height() ) {
return this;
}
// (otherwise, gotta split this node and create some kids)
this.leftChild = new SCMAP.Knapsack.Node( this );
this.rightChild = new SCMAP.Knapsack.Node( this );
// (decide which way to split)
var dw = this.rectangle.width() - image.width;
var dh = this.rectangle.height() - image.height;
if ( dw > dh )
{
this.leftChild.rectangle = new SCMAP.Knapsack.Rectangle(
this.rectangle.left, this.rectangle.top, this.rectangle.left + image.width, this.rectangle.bottom
);
this.rightChild.rectangle = new SCMAP.Knapsack.Rectangle(
this.rectangle.left + image.width, this.rectangle.top, this.rectangle.right, this.rectangle.bottom
);
}
else
{
this.leftChild.rectangle = new SCMAP.Knapsack.Rectangle(
this.rectangle.left, this.rectangle.top, this.rectangle.right, this.rectangle.top + image.height
);
this.rightChild.rectangle = new SCMAP.Knapsack.Rectangle(
this.rectangle.left, this.rectangle.top + image.height, this.rectangle.right, this.rectangle.bottom
);
}
//var context = this.canvas.getContext('2d');
//context.lineWidth = 1.0;
//context.strokeStyle = 'rgba(255,0,0,1)';
//context.strokeRect( this.leftChild.rectangle.left, this.leftChild.rectangle.top, this.leftChild.rectangle.width(), this.leftChild.rectangle.height() );
//context.lineWidth = 1.0;
//context.strokeStyle = 'rgba(0,255,0,1)';
//context.strokeRect( this.rightChild.rectangle.left, this.rightChild.rectangle.top, this.rightChild.rectangle.width(), this.rightChild.rectangle.height() );
// Recurse into first child we created
return this.leftChild.insert( image );
}
}
};
// EOF
| {
"content_hash": "f99f72dedfec72d9b0e497458562f187",
"timestamp": "",
"source": "github",
"line_count": 205,
"max_line_length": 166,
"avg_line_length": 32.41951219512195,
"alnum_prop": 0.5925368642792658,
"repo_name": "SakyaStelios/Titan",
"id": "73f55fe694e5a236fcd16f294d0fc01a526a01b4",
"size": "6646",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "js/SCMAP/Knapsack.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "16176"
},
{
"name": "HTML",
"bytes": "24277"
},
{
"name": "JavaScript",
"bytes": "742552"
}
],
"symlink_target": ""
} |
package config_service
import (
"strings"
cfgsvc "github.com/Flipkart/config-service/client-go"
"github.com/kelseyhightower/confd/log"
"errors"
"reflect"
"github.com/pquerna/ffjson/ffjson"
"strconv"
)
// Client provides a wrapper around the zookeeper client
type Client struct {
client *cfgsvc.ConfigServiceClient
}
type BucketListener struct{
watchResp chan *watchResponse
currentIndex uint64
}
type watchResponse struct {
waitIndex uint64
err error
}
func (this *BucketListener) Connected(bucketName string) {
log.Info("Connected! " + bucketName)
}
func (this *BucketListener) Disconnected(bucketName string, err error) {
log.Info("Disconnected! " + bucketName)
}
func (this *BucketListener) Deleted(bucketName string) {
log.Info("deleted " + bucketName)
this.watchResp <- &watchResponse{waitIndex: 0, err: errors.New(bucketName + " was deleted")}
}
func (this *BucketListener) Updated(oldBucket *cfgsvc.Bucket, newBucket *cfgsvc.Bucket) {
this.watchResp <- &watchResponse{waitIndex:this.currentIndex+1, err: nil}
}
func NewConfigClient(machines []string) (*Client, error) {
c, err := cfgsvc.NewConfigServiceClient(50) //*10)
if err != nil {
panic(err)
}
return &Client{c}, nil
}
func (c *Client) GetValues(keys []string) (map[string]string, error) {
vars := make(map[string]string)
for _, v := range keys {
bucketsKey := strings.Split(strings.TrimPrefix(v, "/"), "/")
buckets := strings.Split(bucketsKey[0], ",")
key := bucketsKey[1]
dynamicBuckets, err := c.getDynamicBuckets(buckets)
if err != nil {
return vars, err
}
for _, dynamicBucket := range dynamicBuckets {
var requestedKeys []string
if key == "*" {
//when key is "*" get all keys in a bucket,
bucketKeys := dynamicBucket.GetKeys()
requestedKeys = []string{}
for k, _ := range bucketKeys {
requestedKeys = append(requestedKeys, k)
}
} else {
requestedKeys = []string{key}
}
//For each key in bucket store value in variable named 'vars'
for _, k := range requestedKeys {
val := dynamicBucket.GetKeys()[k]
if val == nil {
continue
}
valType := reflect.TypeOf(val).Kind()
if valType == reflect.Slice {
data, err := ffjson.Marshal(val)
if err != nil {
log.Error("Failed decoding from JSON")
} else {
vars[k] = string(data[:])
}
} else {
switch val.(type) {
case int, int64:
vars[k] = strconv.FormatInt(val.(int64), 64)
case string:
vars[k] = val.(string)
case bool:
vars[k] = strconv.FormatBool(val.(bool))
case float32, float64:
vars[k] = strconv.FormatFloat(val.(float64), 'f', -1, 64)
}
}
}
}
}
return vars, nil
}
func (c *Client) getDynamicBuckets(buckets []string) ([]*cfgsvc.DynamicBucket, error) {
var dynamicBuckets []*cfgsvc.DynamicBucket
for _, bucket := range buckets {
bucketName := strings.TrimSpace(bucket)
dynamicBucket, err := c.client.GetDynamicBucket(bucketName)
if err != nil {
return dynamicBuckets, err
}
dynamicBuckets = append(dynamicBuckets, dynamicBucket)
}
return dynamicBuckets, nil
}
func setupDynamicBucketListeners(dynamicBuckets []*cfgsvc.DynamicBucket, bucketListener *BucketListener) {
for _, dynamicBucket := range dynamicBuckets {
dynamicBucket.AddListeners(bucketListener)
}
}
func removeDynamicBucketListeners(dynamicBuckets []*cfgsvc.DynamicBucket, bucketListener *BucketListener) {
for _, dynamicBucket := range dynamicBuckets {
dynamicBucket.RemoveListeners(bucketListener)
}
}
func (c *Client) WatchPrefix(prefix string, waitIndex uint64, stopChan chan bool) (uint64, error) {
prefix = strings.TrimPrefix(prefix, "/")
prefixes := strings.Split(prefix, ",")
dynamicBuckets, err := c.getDynamicBuckets(prefixes)
if err != nil {
return waitIndex, err
}
if waitIndex == 0 {
return waitIndex+1, nil
} else {
watchResp := make(chan *watchResponse, len(dynamicBuckets))
bucketListener := &BucketListener{watchResp: watchResp, currentIndex: waitIndex}
setupDynamicBucketListeners(dynamicBuckets, bucketListener)
select {
case watchResp := <- watchResp:
removeDynamicBucketListeners(dynamicBuckets, bucketListener)
return watchResp.waitIndex, watchResp.err
case <-stopChan:
removeDynamicBucketListeners(dynamicBuckets, bucketListener)
return 0, nil
}
}
}
| {
"content_hash": "3b4a4254e7f29a247a058fa2a9ba8ee7",
"timestamp": "",
"source": "github",
"line_count": 166,
"max_line_length": 107,
"avg_line_length": 26.313253012048193,
"alnum_prop": 0.6913919413919414,
"repo_name": "Flipkart/confd",
"id": "584cba82a6400d302d625f2fe6b216426fc8e02e",
"size": "4368",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/github.com/kelseyhightower/confd/backends/config-service/client.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "62534"
},
{
"name": "Ruby",
"bytes": "7882"
},
{
"name": "Shell",
"bytes": "10169"
}
],
"symlink_target": ""
} |
/**
A subrule is defined as follow:
subrule#test(buffer, offset)
@param buffer {Buffer|String} buffer to look for the match
@param offset {Number} start looking at this offset, if < 0, then no match, return -1
@return {Number} next offset (-1 if no match)
subrule#next(buffer, offset)
called by subrule#test if successful
Required properties:
length {Number} length of the matched data. Set when running the subrule if
the value is non zero.
Special values:
-1: unknown, ignore in infinite loop detection
-2: unknown, check in infinite loop detection
idx {Number} index of the matched pattern if many possible. Default=-1
*/
//TODO special case: loops
var isArray = require('util').isArray
var buffertools = require('buffertools')
/**
Last subrule
*/
function lastSubRuleConst () {
this.length = 0
this.idx = -1
}
lastSubRuleConst.prototype.test = function (buf, offset) {
return offset
}
var lastSubRule = new lastSubRuleConst
/**
Empty subrule
*/
exports.emptySubRule = lastSubRule
/**
All subrule
To trick infinite loop detection, length is set to -1 then 1
to compensate for trimRight and trimLeft
*/
function allSubRuleConst () {
this.length = 0
this.idx = -1
this.next = lastSubRule
}
allSubRuleConst.prototype.test = function (buf) {
return buf.length
}
exports.allSubRule = new allSubRuleConst
/**
Property checker
@param {Object} object to check on
@param {String} property to be checked
@return {Boolean}
*/
var _has = Object.prototype.hasOwnProperty
function has (o, prop) {
return typeof o === 'object' && _has.call(o, prop)
}
// include("subrules/first/buffer.js")
function buffer_firstSubRule (buf, str) {
// Common properties
this.idx = -1
this.length = 1
this.next = lastSubRule
// Specific properties
this.buf = buf
this.str = str
}
buffer_firstSubRule.prototype.test = function (buf, offset) {
var isString = typeof buf === 'string'
var n = isString ? this.str.length : this.buf.length
if (buf.length < offset + n) return -1
if (isString) {
for (var p = this.str, i = 0; i < n; i++) {
if ( buf.charCodeAt(offset+i) !== p[i] ) return -1
}
} else {
for (var p = this.buf, i = 0; i < n; i++) {
if ( buf[offset+i] !== p[i] ) return -1
}
}
if (this.length > 0) this.length = n
return this.next.test(buf, offset + n)
}// include("subrules/first/buffer_array.js")
function buffer_array_firstSubRule (buf, str) {
// Common properties
this.idx = -1
this.length = 1
this.next = lastSubRule
// Specific properties
this.buf = buf
this.str = str
}
buffer_array_firstSubRule.prototype.test = function (buf, offset) {
var isString = typeof buf === 'string'
var list = isString ? this.str : this.buf
var delta = buf.length - offset
if (isString) {
nextEntry_String: for (var j = 0, len = list.length; j < len; j++) {
var p = list[j]
var n = p.length
if (delta < n) continue
for (var i = 0; i < n; i++) {
if ( buf.charCodeAt(offset+i) !== p[i] ) continue nextEntry_String
}
if (this.length > 0) this.length = n
this.idx = j
return this.next.test(buf, offset + n)
}
} else {
nextEntry_Buffer: for (var j = 0, len = list.length; j < len; j++) {
var p = list[j]
var n = p.length
if (delta < n) continue
for (var i = 0; i < n; i++) {
if ( buf[offset+i] !== p[i] ) continue nextEntry_Buffer
}
if (this.length > 0) this.length = n
this.idx = j
return this.next.test(buf, offset + n)
}
}
return -1
}
// include("subrules/first/buffer_array_loop.js")
function buffer_array_loop_firstSubRule (buf, str) {
// Common properties
this.idx = -1
this.length = 1
this.next = lastSubRule
// Specific properties
this.buf = buf
this.str = str
}
buffer_array_loop_firstSubRule.prototype.test = function (buf, offset) {
var isString = typeof buf === 'string'
var list = isString ? this.str : this.buf
var bufLen = buf.length
var len = list.length
var pos = offset
if (isString) {
loop_String: while (pos < bufLen) {
nextEntry_String: for (var j = 0; j < len; j++) {
var p = list[j]
var n = p.length
if (bufLen < pos + n) continue nextEntry_String
for (var i = 0; i < n; i++) {
if ( buf.charCodeAt(pos+i) !== p[i] ) continue nextEntry_String
}
// Match, try for more
pos += n
continue loop_String
}
// No match, end of the main loop
break
}
} else {
loop_Buffer: while (pos < bufLen) {
nextEntry_Buffer: for (var j = 0; j < len; j++) {
var p = list[j]
var n = p.length
if (bufLen < pos + n) continue nextEntry_Buffer
for (var i = 0; i < n; i++) {
if ( buf[pos+i] !== p[i] ) continue nextEntry_Buffer
}
// Match, try for more
pos += n
continue loop_Buffer
}
// No match, end of the main loop
break
}
}
// At least one match if the offset changed
return pos > offset ? this.next.test(buf, pos) : -1
}
// include("subrules/first/buffer_loop.js")
function buffer_loop_firstSubRule (buf, str) {
// Common properties
this.idx = -1
this.length = 1
this.next = lastSubRule
// Specific properties
this.buf = buf
this.str = str
}
buffer_loop_firstSubRule.prototype.test = function (buf, offset) {
var isString = typeof buf === 'string'
var bufLen = buf.length
var pos = offset
if (bufLen < offset + n) return -1
if (isString) {
var p = this.str
var n = p.length
loop_String: while (pos < bufLen) {
for (var i = 0; i < n; i++) {
if ( buf.charCodeAt(pos+i) !== p[i] ) break loop_String
}
pos += n
}
} else {
var p = this.buf
var n = p.length
loop_Buffer: while (pos < bufLen) {
for (var i = 0; i < n; i++) {
if ( buf[pos+i] !== p[i] ) break loop_Buffer
}
pos += n
}
}
if (this.length > 0) this.length = n
// At least one match if the offset changed
return pos > offset ? this.next.test(buf, pos) : -1
}// include("subrules/first/range_array_loop_object.js")
function range_array_loop_object_firstSubRule (start, end) {
// Common properties
this.idx = -1
this.length = 1
this.next = lastSubRule
// Specific properties
this.start = start
this.end = end
}
range_array_loop_object_firstSubRule.prototype.test = function (buf, offset) {
var isString = typeof buf === 'string'
var start = this.start
var end = this.end
var len = start.length // Same length as this.end
var pos = offset
var bufLen = buf.length
if (isString) {
loop_String: while (pos < bufLen) {
for (var i = 0; i < len; i++) {
var c = buf.charCodeAt(pos)
if ( c >= start[i] && c <= end[i] ) {
// Match, try for more
pos++
continue loop_String
}
}
// No match, end of the main loop
break
}
} else {
loop_Buffer: while (pos < bufLen) {
for (var i = 0; i < len; i++) {
var c = buf[offset]
if ( c >= start[i] && c <= end[i] ) {
// Match, try for more
pos++
continue loop_Buffer
}
}
// No match, end of the main loop
break
}
}
// At least one match if the offset changed
return pos > offset ? this.next.test(buf, pos) : -1
}
// include("subrules/first/range_array_object.js")
function range_array_object_firstSubRule (start, end) {
// Common properties
this.idx = -1
this.length = 1
this.next = lastSubRule
// Specific properties
this.start = start
this.end = end
}
range_array_object_firstSubRule.prototype.test = function (buf, offset) {
var isString = typeof buf === 'string'
var start = this.start
var end = this.end
var n = start.length // Same length as this.end
if (isString) {
for (var i = 0; i < n; i++) {
var c = buf.charCodeAt(offset)
if (
c >= start[i]
&& c <= end[i]
) {
this.idx = i
return this.next.test(buf, offset + 1)
}
}
} else {
for (var i = 0; i < n; i++) {
var c = buf[offset]
if (
c >= start[i]
&& c <= end[i]
) {
this.idx = i
return this.next.test(buf, offset + 1)
}
}
}
return -1
}// include("subrules/first/range_loop_object.js")
function range_loop_object_firstSubRule (start, end) {
// Common properties
this.idx = -1
this.length = 1
this.next = lastSubRule
// Specific properties
this.start = start
this.end = end
}
range_loop_object_firstSubRule.prototype.test = function (buf, offset) {
var isString = typeof buf === 'string'
var start = this.start
var end = this.end
var pos = offset
var bufLen = buf.length
if (isString) {
while (pos < bufLen) {
var c = buf.charCodeAt(pos)
// No match, end of the main loop
if ( c < start || c > end ) break
// Match, try for more
pos++
}
} else {
while (pos < bufLen) {
var c = buf[offset]
// No match, end of the main loop
if ( c < start || c > end ) break
// Match, try for more
pos++
}
}
// At least one match if the offset changed
return pos > offset ? this.next.test(buf, pos) : -1
}// include("subrules/first/range_object.js")
function range_object_firstSubRule (start, end) {
// Common properties
this.idx = -1
this.length = 1
this.next = lastSubRule
// Specific properties
this.start = start
this.end = end
}
range_object_firstSubRule.prototype.test = function (buf, offset) {
var c = typeof buf === 'string' ? buf.charCodeAt(offset) : buf[offset]
return c < this.start || c > this.end
? -1
: this.next.test(buf, offset + 1)
}// include("subrules/first/rangeend_array_object.js")
function rangeend_array_object_firstSubRule (end) {
// Common properties
this.idx = -1
this.length = 1
this.next = lastSubRule
// Specific properties
this.end = end
}
rangeend_array_object_firstSubRule.prototype.test = function (buf, offset) {
var isString = typeof buf === 'string'
var end = this.end
var n = end.length
if (isString) {
for (var i = 0; i < n; i++) {
if (
buf.charCodeAt(offset) <= end[i]
) {
this.idx = i
return this.next.test(buf, offset + 1)
}
}
} else {
for (var i = 0; i < n; i++) {
if (
buf[offset] >= end[i]
) {
this.idx = i
return this.next.test(buf, offset + 1)
}
}
}
return -1
}// include("subrules/first/rangeend_loop_object.js")
function rangeend_loop_object_firstSubRule (end) {
// Common properties
this.idx = -1
this.length = 1
this.next = lastSubRule
// Specific properties
this.end = end
}
rangeend_loop_object_firstSubRule.prototype.test = function (buf, offset) {
var isString = typeof buf === 'string'
var end = this.end
var pos = offset
var bufLen = buf.length
if (isString) {
while (pos < bufLen) {
var c = buf.charCodeAt(pos)
// No match, end of the main loop
if ( c > end ) break
// Match, try for more
pos++
}
} else {
while (pos < bufLen) {
var c = buf[offset]
// No match, end of the main loop
if ( c > end ) break
// Match, try for more
pos++
}
}
// At least one match if the offset changed
return pos > offset ? this.next.test(buf, pos) : -1
}// include("subrules/first/rangeend_object.js")
function rangeend_object_firstSubRule (end) {
// Common properties
this.idx = -1
this.length = 1
this.next = lastSubRule
// Specific properties
this.end = end
}
rangeend_object_firstSubRule.prototype.test = function (buf, offset) {
var isString = typeof buf === 'string'
if (isString) {
if (
buf.charCodeAt(offset) > this.end
)
return -1
} else {
if (
buf[offset] > this.end
)
return -1
}
return this.next.test(buf, offset + 1)
}// include("subrules/first/rangestart_array_object.js")
function rangestart_array_object_firstSubRule (start) {
// Common properties
this.idx = -1
this.length = 1
this.next = lastSubRule
// Specific properties
this.start = start
}
rangestart_array_object_firstSubRule.prototype.test = function (buf, offset) {
var isString = typeof buf === 'string'
var start = this.start
var n = start.length
if (isString) {
for (var i = 0; i < n; i++) {
if (
buf.charCodeAt(offset) >= start[i]
) {
this.idx = i
return this.next.test(buf, offset + 1)
}
}
} else {
for (var i = 0; i < n; i++) {
if (
buf[offset] >= start[i]
) {
this.idx = i
return this.next.test(buf, offset + 1)
}
}
}
return -1
}// include("subrules/first/rangestart_loop_object.js")
function rangestart_loop_object_firstSubRule (start) {
// Common properties
this.idx = -1
this.length = 1
this.next = lastSubRule
// Specific properties
this.start = start
}
rangestart_loop_object_firstSubRule.prototype.test = function (buf, offset) {
var isString = typeof buf === 'string'
var start = this.start
var pos = offset
var bufLen = buf.length
if (isString) {
while (pos < bufLen) {
var c = buf.charCodeAt(pos)
// No match, end of the main loop
if ( c < start ) break
// Match, try for more
pos++
}
} else {
while (pos < bufLen) {
var c = buf[offset]
// No match, end of the main loop
if ( c < start ) break
// Match, try for more
pos++
}
}
// At least one match if the offset changed
return pos > offset ? this.next.test(buf, pos) : -1
}// include("subrules/first/rangestart_object.js")
function rangestart_object_firstSubRule (start) {
// Common properties
this.idx = -1
this.length = 1
this.next = lastSubRule
// Specific properties
this.start = start
}
rangestart_object_firstSubRule.prototype.test = function (buf, offset) {
var c = typeof buf === 'string' ? buf.charCodeAt(offset) : buf[offset]
return c < this.start
? -1
: this.next.test(buf, offset + 1)
}
function sameTypeArray (list) {
return list.every(function (i) { return typeof list[0] === typeof i })
}
/**
* Return the type of an item
* @param {...} item to check
* @return {String} type
*/
function typeOf (rule) {
var ruleType = typeof rule
return ruleType === 'function'
? ruleType
: ruleType !== 'object'
? (rule.length === 0
? 'noop'
: (rule === 0
? 'zero'
: ruleType
)
)
: Buffer.isBuffer(rule)
? 'buffer'
: !isArray(rule)
? ((has(rule, 'start') && has(rule, 'end')
? 'range'
: has(rule, 'start')
? 'rangestart'
: has(rule, 'end')
? 'rangeend'
: has(rule, 'firstOf')
? (( (isArray(rule.firstOf) && sameTypeArray(rule.firstOf) )
|| typeof rule.firstOf === 'string'
)
&& rule.firstOf.length > 1
)
? 'firstof'
: 'invalid firstof'
: 'invalid'
)
+ '_object'
)
: !sameTypeArray(rule)
? 'multi types array'
: ((Buffer.isBuffer( rule[0] )
? 'buffer'
: typeof rule[0]
)
+ '_array'
)
}
function stringCode (c) {
return c.charCodeAt(0)
}
function toCodes (s) {
return s.split('').map(stringCode)
}
function toRanges (list) {
return typeof list === 'string'
? (list.length === 1
? stringCode(list)
: list.split('').map(stringCode)
)
: isArray(list)
? list.map(stringCode)
: typeof list === 'number'
? list
//TODO only strings and numbers supported
: ''
}
function toFirstOf (list, encoding) {
return typeof list === 'string'
? [
list.split('').map( function (i) { return new Buffer(i, encoding) } )
, list.split('')
]
: typeof list[0] === 'string'
? [
list.map( function (i) { return new Buffer(i, encoding) } )
// Filter out empty values
.filter(function (i) { return i.length > 0 })
, list
]
: Buffer.isBuffer(list[0])
? [
list
, list.map( function (i) { return i.toString(encoding) } )
// Filter out empty values
.filter(function (i) { return i.length > 0 })
]
: []
}
exports.firstSubRule = function (rule, props, encoding, single) {
if (rule === null || rule === undefined)
throw new Error('Tokenizer#addRule: Invalid rule ' + rule + ' (function/string/integer/array only)')
var type = typeOf(rule)
var loop = single && props.ignore && props.continue[0] === -1 && !props.next[0]
var isString = false
switch (type) {
case 'zero':
return new zero_SubRule
case 'noop':
return new noop_SubRule
case 'function':
return new function_SubRule(rule)
case 'number':
if (rule < 0)
throw new Error('SubRule: Number cannot be negative: ' + rule)
return new number_SubRule(rule)
case 'string':
isString = true
case 'buffer':
return new (loop
? buffer_loop_firstSubRule
: buffer_firstSubRule
)(
isString ? new Buffer(rule, encoding) : rule
, isString ? toCodes(rule) : toCodes( rule.toString(encoding) )
)
// Arrays
case 'function_array':
return new function_arraySubRule(rule)
case 'number_array':
return new number_arraySubRule(rule)
case 'string_array':
isString = true
case 'buffer_array':
return new ( loop
? buffer_array_loop_firstSubRule
: buffer_array_firstSubRule
)(
isString
? rule.map( function (i) { return new Buffer(i, encoding) } )
: rule
, isString
? rule.map(toCodes)
: rule.map( function (i) { return toCodes( i.toString(encoding) ) } )
)
// {start, end}
case 'range_object':
var start = toRanges(rule.start)
var end = toRanges(rule.end)
// Force the start or end
if (typeof start === 'number' && typeof end !== 'number') end = end[0]
if (typeof end === 'number' && typeof start !== 'number') start = start[0]
if (typeof start === 'number')
return new (loop
? range_loop_object_firstSubRule
: range_object_firstSubRule
)
(start, end)
if (start.length === 0 || start.length !== end.length)
throw new Error('Tokenizer#addRule: Invalid Range: bad sizes: '
+ ' start=' + start.length
+ ' end=' + end.length
)
return new ( loop
? range_array_loop_object_firstSubRule
: range_array_object_firstSubRule
)
(start, end)
case 'rangestart_object':
var start = toRanges(rule.start)
if (typeof start === 'number')
return new (loop
? rangestart_loop_object_firstSubRule
: rangestart_object_firstSubRule
)
(start)
if (start.length === 0)
throw new Error('Tokenizer#addRule: Invalid Range: empty start')
return new rangestart_array_object_firstSubRule(start)
case 'rangeend_object':
var end = toRanges(rule.end)
if (typeof end === 'number')
return new (loop
? rangeend_loop_object_firstSubRule
: rangeend_object_firstSubRule
)
(end)
if (end.length === 0)
throw new Error('Tokenizer#addRule: Invalid Range: empty end')
return new rangeend_array_object_firstSubRule(end)
case 'firstof_object':
throw new Error('Tokenizer#addRule: firstOf subrule not supported as first subrule')
default:
throw new Error('Tokenizer#addRule: Invalid rule ' + type + ' (function/string/integer/array only)')
}
}
// include("subrules/buffer.js")
function buffer_SubRule (buf, str) {
// Common properties
this.idx = -1
this.length = 1
this.next = lastSubRule
this.prev = null
// Specific properties
this.buf = buf
this.str = str
}
buffer_SubRule.prototype.test = function (buf, offset) {
var isString = typeof buf === 'string'
var n = isString ? this.str.length : this.buf.length
if (buf.length < offset + n) return -1
var i = buf.indexOf( isString ? this.str : this.buf, offset)
if (this.length > 0) this.length = n
return i < 0 ? -1 : this.next.test(buf, i + n)
}// include("subrules/buffer_array.js")
function buffer_arraySubRule (buf, str) {
// Common properties
this.idx = -1
this.length = 1
this.next = lastSubRule
this.prev = null
// Specific properties
this.buf = buf
this.str = str
}
buffer_arraySubRule.prototype.test = function (buf, offset) {
var isString = typeof buf === 'string'
var list = isString ? this.str : this.buf
var delta = buf.length - offset
for (var j = 0, len = list.length; j < len; j++) {
var p = list[j]
var n = p.length
if (delta < n) continue
var i = buf.indexOf(p, offset)
if (i >= 0) {
if (this.length > 0) this.length = n
this.idx = j
return this.next.test(buf, i + n)
}
}
return -1
}// include("subrules/buffer_escaped.js")
function buffer_escapedSubRule (buf, str, esc) {
// Common properties
this.idx = -1
this.length = 1
this.next = lastSubRule
this.prev = null
// Specific properties
this.buf = buf
this.str = str
this.esc = esc.charCodeAt(0)
}
buffer_escapedSubRule.prototype.test = function (buf, offset) {
var isString = typeof buf === 'string'
var pattern = isString ? this.str : this.buf
var n = pattern.length
var esc = this.esc
var len = buf.length
if (len < offset + n) return -1
var i = -1
if (isString) {
while (offset < len) {
i = buf.indexOf(pattern, offset)
if (i <= 0) break
for (var esc_i = i, esc_num = 0; esc_i > 0 && buf.charCodeAt(--esc_i) === esc; esc_num++) {}
if ( (esc_num % 2) === 0 ) return this.next.test(buf, i + n)
offset = i + 1
}
} else {
while (offset < len) {
i = buf.indexOf(pattern, offset)
if (i <= 0) break
for (var esc_i = i, esc_num = 0; esc_i > 0 && buf[--esc_i] === esc; esc_num++) {}
if ( (esc_num % 2) === 0 ) return this.next.test(buf, i + n)
offset = i + 1
}
}
if (this.length > 0) this.length = n
return i < 0 ? -1 : this.next.test(buf, n)
}// include("subrules/buffer_escaped_array.js")
function buffer_escaped_arraySubRule (buf, str, esc) {
// Common properties
this.idx = -1
this.length = 1
this.next = lastSubRule
this.prev = null
// Specific properties
this.buf = buf
this.str = str
this.esc = esc.charCodeAt(0)
}
buffer_escaped_arraySubRule.prototype.test = function (buf, offset) {
var isString = typeof buf === 'string'
var list = isString ? this.str : this.buf
var len = buf.length
for (var j = 0, num = list.length; j < num; j++) {
var p = list[j]
var n = p.length
if (buf.length < offset + n) continue
var i = -1
if (isString) {
while (offset < len) {
i = buf.indexOf(p, offset)
if (i < 0) break
for (var esc_i = i, esc_num = 0; esc_i > 0 && buf.charCodeAt(--esc_i) === this.esc; esc_num++) {}
if ( (esc_num % 2) === 0 ) {
if (this.length > 0) this.length = n
this.idx = j
return this.next.test(buf, i + n)
}
offset = i + 1
}
} else {
while (offset < len) {
i = buf.indexOf(p, offset)
if (i < 0) break
for (var esc_i = i, esc_num = 0; esc_i > 0 && buf[--esc_i] === this.esc; esc_num++) {}
if ( (esc_num % 2) === 0 ) {
if (this.length > 0) this.length = n
this.idx = j
return this.next.test(buf, i + n)
}
offset = i + 1
}
}
}
return -1
}// include("subrules/firstof.js")
function firstof_object_SubRule (buf, str) {
// Common properties
this.idx = -1
this.length = 1
this.next = lastSubRule
this.prev = null
// Specific properties
this.buf = buf
this.str = str
}
firstof_object_SubRule.prototype.test = function (buf, offset) {
var isString = typeof buf === 'string'
var list = isString ? this.str : this.buf
var _buf = buf
var _offset = offset
var pattern
var idx = -1
for (var j = 0, len = list.length; j < len; j++) {
var p = list[j]
var i = _buf.indexOf( p, _offset )
if (i < 0) continue
pattern = p
idx = j
_buf = _buf.slice(_offset, i)
_offset = 0
}
this.idx = idx
if (idx < 0) return -1
if (this.length > 0) this.length = pattern.length
return this.next.test(buf, offset + _buf.length + pattern.length)
}// include("subrules/firstof_escaped.js")
function firstof_escaped_object_SubRule (buf, str, esc) {
// Common properties
this.idx = -1
this.length = 1
this.next = lastSubRule
this.prev = null
// Specific properties
this.buf = buf
this.str = str
this.esc = esc.charCodeAt(0)
}
firstof_escaped_object_SubRule.prototype.test = function (buf, offset) {
var isString = typeof buf === 'string'
var list = isString ? this.str : this.buf
var _buf = buf
var _offset = offset
var pattern
var from = offset
var n = buf.length
this.idx = -1
if (isString) {
for (var j = 0, len = list.length; j < len && from < n; j++) {
var p = list[j]
var i = _buf.indexOf( p, from )
if (i >= 0) {
// Look for escape char
for (var esc_i = i, esc_num = 0; esc_i > 0 && _buf.charCodeAt(--esc_i) === this.esc; esc_num++) {}
if ( (esc_num % 2) === 0 ) {
pattern = p
if (this.length > 0) this.length = p.length
this.idx = j
_buf = _buf.slice(_offset, i)
_offset = 0
} else {
// Escaped: ignore this match
from++
}
}
}
} else {
for (var j = 0, len = list.length; j < len && from < n; j++) {
var p = list[j]
var i = _buf.indexOf( p, from )
if (i >= 0) {
// Look for escape char
for (var esc_i = i, esc_num = 0; esc_i > 0 && _buf[--esc_i] === this.esc; esc_num++) {}
if ( (esc_num % 2) === 0 ) {
pattern = p
if (this.length > 0) this.length = p.length
this.idx = j
_buf = _buf.slice(_offset, i)
_offset = 0
} else {
// Escaped: ignore this match
from++
}
}
}
}
if (this.idx < 0) return -1
return this.next.test(buf, offset + _buf.length + pattern.length)
}// include("subrules/function.js")
function function_SubRule (fn) {
// Common properties
this.idx = -1
this.length = -2
this.next = lastSubRule
this.prev = null
// Specific properties
this.fn = fn
}
function_SubRule.prototype.test = function (buf, offset) {
var res = this.fn.call(this, buf, offset)
if (typeof res !== 'number' || res < 0) return -1
if (this.length !== 0) this.length = res
return this.next.test(buf, offset + res)
}// include("subrules/function_array.js")
function function_arraySubRule (list) {
// Common properties
this.idx = -1
this.length = -2
this.next = lastSubRule
this.prev = null
// Specific properties
this.list = list
}
function_arraySubRule.prototype.test = function (buf, offset) {
var list = this.list
for (var i = 0, n = list.length; i < n; i++) {
var res = list[i].call(this, buf, offset)
if (typeof res === 'number' && res >= 0) {
this.idx = i
if (this.length !== 0) this.length = res
return this.next.test(buf, offset + res)
}
}
return -1
}// include("subrules/noop.js")
function noop_SubRule () {
// Common properties
this.idx = -1
this.length = 0
this.next = lastSubRule
this.prev = null
}
noop_SubRule.prototype.test = function (buf, offset) {
return this.next.test(buf, offset)
}// include("subrules/number.js")
function number_SubRule (n) {
// Common properties
this.idx = -1
this.length = n
this.next = lastSubRule
this.prev = null
// Specific properties
this.n = n
}
number_SubRule.prototype.test = function (buf, offset) {
return offset + this.n <= buf.length
? this.next.test(buf, offset + this.n)
: -1
}// include("subrules/number_array.js")
function number_arraySubRule (list) {
// Common properties
this.idx = -1
this.length = 1
this.next = lastSubRule
this.prev = null
// Specific properties
// Filter out zero values
this.list = list.filter(function (v) { return v !== 0 })
this.hasZero = (list.length > this.list.length)
}
number_arraySubRule.prototype.test = function (buf, offset) {
var list = this.list
var delta = buf.length - offset
if (delta === 0) return this.hasZero ? this.next.test(buf, offset) : -1
for (var i = 0, len = list.length; i < len; i++) {
if ( list[i] <= delta ) {
if (this.length > 0) this.length = list[i]
this.idx = i
return this.next.test(buf, offset + list[i])
}
}
return -1
}// include("subrules/zero.js")
function zero_SubRule () {
// Common properties
this.idx = -1
this.length = 0
this.next = lastSubRule
this.prev = null
}
zero_SubRule.prototype.test = function (buf, offset) {
return offset === buf.length
? this.next.test(buf, offset)
: -1
}
exports.SubRule = function (rule, props, encoding) {
if (rule === null || rule === undefined)
throw new Error('Tokenizer#addRule: Invalid rule ' + rule + ' (function/string/integer/array only)')
var type = typeOf(rule)
var isString = false
switch (type) {
case 'zero':
return new zero_SubRule
case 'noop':
return new noop_SubRule
case 'function':
return new function_SubRule(rule)
case 'number':
if (rule < 0)
throw new Error('SubRule: Number cannot be negative: ' + rule)
return new number_SubRule(rule)
case 'string':
return new ( props.escape ? buffer_escapedSubRule : buffer_SubRule )
( new Buffer(rule, encoding), rule, props.escape )
case 'buffer':
return new ( props.escape ? buffer_escapedSubRule : buffer_SubRule )
( rule, rule.toString(encoding), props.escape )
// Arrays
case 'function_array':
return new function_arraySubRule(rule)
case 'number_array':
return new number_arraySubRule(rule)
case 'string_array':
isString = true
case 'buffer_array':
return new ( props.escape ? buffer_escaped_arraySubRule : buffer_arraySubRule )
(
isString
? rule.map( function (i) { return new Buffer(i, encoding) } )
: rule
, isString
? rule
: rule.map( function (i) { return i.toString(encoding) } )
, props.escape
)
// {firstof}
case 'firstof_object':
var firstof = toFirstOf(rule.firstOf, encoding)
if (firstof.length === 0)
throw new Error('Tokenizer#addRule: Invalid firstOf')
return new ( props.escape ? firstof_escaped_object_SubRule : firstof_object_SubRule)
( firstof[0], firstof[1], props.escape )
default:
throw new Error('Tokenizer#addRule: Invalid rule ' + type + ' (function/string/integer/array only)')
}
}
| {
"content_hash": "dcaf78651d1ce69915557f8e445d7e88",
"timestamp": "",
"source": "github",
"line_count": 1276,
"max_line_length": 106,
"avg_line_length": 23.845611285266457,
"alnum_prop": 0.5997305025142143,
"repo_name": "pierrec/node-atok",
"id": "319545a49a2fbf4a10110643f170abf7b133a7cb",
"size": "30427",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/subrule.js",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "297241"
}
],
"symlink_target": ""
} |
class CreateDocuments < ActiveRecord::Migration
def change
create_table :documents do |t|
t.string :title, null: false
t.timestamps null: false
end
end
end
| {
"content_hash": "ff0d520449324a923879fcea18d64927",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 47,
"avg_line_length": 20.11111111111111,
"alnum_prop": 0.6795580110497238,
"repo_name": "bjoernalbers/faxomat",
"id": "9453078113cd72aa0f65c30738d237c171b9f5c3",
"size": "181",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "db/migrate/20160329133325_create_documents.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "781"
},
{
"name": "CoffeeScript",
"bytes": "211"
},
{
"name": "Dockerfile",
"bytes": "658"
},
{
"name": "HTML",
"bytes": "31464"
},
{
"name": "JavaScript",
"bytes": "686"
},
{
"name": "Ruby",
"bytes": "295737"
},
{
"name": "SCSS",
"bytes": "176"
},
{
"name": "Shell",
"bytes": "149"
}
],
"symlink_target": ""
} |
/*
* Qt4 bitcoin GUI.
*
* W.J. van der Laan 2011-2012
* The Bitcoin Developers 2011-2012
*/
#include <QApplication>
#include "bitcoingui.h"
#include "transactiontablemodel.h"
#include "addressbookpage.h"
#include "sendcoinsdialog.h"
#include "signverifymessagedialog.h"
#include "optionsdialog.h"
#include "aboutdialog.h"
#include "clientmodel.h"
#include "walletmodel.h"
#include "editaddressdialog.h"
#include "optionsmodel.h"
#include "transactiondescdialog.h"
#include "addresstablemodel.h"
#include "transactionview.h"
#include "overviewpage.h"
#include "bitcoinunits.h"
#include "guiconstants.h"
#include "askpassphrasedialog.h"
#include "notificator.h"
#include "guiutil.h"
#include "rpcconsole.h"
#include "wallet.h"
#include "init.h"
#include "ui_interface.h"
#include "masternodemanager.h"
#ifdef Q_OS_MAC
#include "macdockiconhandler.h"
#endif
#include <QMenuBar>
#include <QMenu>
#include <QIcon>
#include <QVBoxLayout>
#include <QToolBar>
#include <QStatusBar>
#include <QLabel>
#include <QMessageBox>
#include <QMimeData>
#include <QProgressBar>
#include <QStackedWidget>
#include <QDateTime>
#include <QMovie>
#include <QFileDialog>
#include <QDesktopServices>
#include <QTimer>
#include <QDragEnterEvent>
#include <QUrl>
#include <QMimeData>
#include <QStyle>
#include <QToolButton>
#include <QScrollArea>
#include <QScroller>
#include <QFont>
#include <QFontDatabase>
#include <iostream>
extern bool fOnlyTor;
extern CWallet* pwalletMain;
extern int64_t nLastCoinStakeSearchInterval;
double GetPoSKernelPS();
BitcoinGUI::BitcoinGUI(QWidget *parent):
QMainWindow(parent),
clientModel(0),
walletModel(0),
toolbar(0),
encryptWalletAction(0),
changePassphraseAction(0),
unlockWalletAction(0),
lockWalletAction(0),
aboutQtAction(0),
trayIcon(0),
notificator(0),
rpcConsole(0),
prevBlocks(0),
nWeight(0)
{
resize(850+95, 550);
setWindowTitle(tr("8bit") + " - " + tr("Wallet"));
#ifndef Q_OS_MAC
qApp->setWindowIcon(QIcon(":icons/bitcoin"));
setWindowIcon(QIcon(":icons/bitcoin"));
#else
//setUnifiedTitleAndToolBarOnMac(true);
QApplication::setAttribute(Qt::AA_DontShowIconsInMenus);
#endif
QFile res(":/fonts/8bit");
res.open(QIODevice::ReadOnly);
QFontDatabase::addApplicationFontFromData(res.readAll());
//QFont f;
//f.setFamily("Mozart NBP");
//f.setPointSize(14);
qApp->setFont(QFont("Mozart NBP",14));
// Accept D&D of URIs
setAcceptDrops(true);
// Create actions for the toolbar, menu bar and tray/dock icon
createActions();
// Create application menu bar
createMenuBar();
// Create the toolbars
createToolBars();
// Create the tray icon (or setup the dock icon)
createTrayIcon();
// Create tabs
overviewPage = new OverviewPage();
//overviewWidget = new QWidget();
//overviewWidget->setSizePolicy(QSizePolicy::Expanding, QSizePolicy::Expanding);
//QHBoxLayout *overviewLayout = new QHBoxLayout();
//overviewLayout->setContentsMargins(0, 0, 0, 0);
//overviewLayout->setObjectName(QString::fromUtf8("overviewLayout"));
//overviewWidget->setLayout(overviewLayout);
//overviewScroll = new QScrollArea(overviewPage);//overviewWidget);
//overviewScroll->setHorizontalScrollBarPolicy(Qt::ScrollBarAlwaysOff);
//overviewScroll->setVerticalScrollBarPolicy(Qt::ScrollBarAlwaysOff);
//overviewScroll->setWidget(overviewPage);
//overviewScroll->setWidgetResizable(true);
//QScroller::grabGesture(overviewScroll, QScroller::LeftMouseButtonGesture); //Replace TouchGesture with LeftMouseButtonGesture and it works flawlessly
//transactionsPage = new QWidget(this);
//QVBoxLayout *vbox = new QVBoxLayout();
//transactionView = new TransactionView(this);
//vbox->addWidget(transactionView);
//transactionsPage->setLayout(vbox);
//addressBookPage = new AddressBookPage(AddressBookPage::ForEditing, AddressBookPage::SendingTab);
//receiveCoinsPage = new AddressBookPage(AddressBookPage::ForEditing, AddressBookPage::ReceivingTab);
//sendCoinsPage = new SendCoinsDialog(this);
signVerifyMessageDialog = new SignVerifyMessageDialog(this);
centralStackedWidget = new QStackedWidget(this);
centralStackedWidget->setContentsMargins(0, 0, 0, 0);
centralStackedWidget->addWidget(overviewPage);
//centralStackedWidget->addWidget(overviewWidget);
//centralStackedWidget->addWidget(transactionsPage);
//centralStackedWidget->addWidget(addressBookPage);
//centralStackedWidget->addWidget(receiveCoinsPage);
//centralStackedWidget->addWidget(sendCoinsPage);
QWidget *centralWidget = new QWidget();
QVBoxLayout *centralLayout = new QVBoxLayout(centralWidget);
centralLayout->setContentsMargins(0,0,0,0);
centralWidget->setContentsMargins(0,0,0,0);
centralLayout->addWidget(centralStackedWidget);
setCentralWidget(centralWidget);
// Create status bar
statusBar();
// Disable size grip because it looks ugly and nobody needs it
//statusBar()->setSizeGripEnabled(false);
// Status bar notification icons
QWidget *frameBlocks = new QWidget();
frameBlocks->setContentsMargins(0,0,0,0);
frameBlocks->setSizePolicy(QSizePolicy::Preferred, QSizePolicy::Preferred);
frameBlocks->setStyleSheet("QWidget { background: none; margin-bottom: 5px; }");
QHBoxLayout *frameBlocksLayout = new QHBoxLayout(frameBlocks);
frameBlocksLayout->setContentsMargins(3,0,3,0);
frameBlocksLayout->setSpacing(3);
frameBlocksLayout->setAlignment(Qt::AlignHCenter);
labelEncryptionIcon = new QLabel();
labelStakingIcon = new QLabel();
labelConnectionsIcon = new QLabel();
labelBlocksIcon = new QLabel();
frameBlocksLayout->addStretch();
frameBlocksLayout->addWidget(labelEncryptionIcon);
frameBlocksLayout->addStretch();
frameBlocksLayout->addWidget(labelStakingIcon);
frameBlocksLayout->addStretch();
frameBlocksLayout->addWidget(labelConnectionsIcon);
frameBlocksLayout->addStretch();
frameBlocksLayout->addWidget(labelBlocksIcon);
frameBlocksLayout->addStretch();
frameBlocksLayout->addWidget(netLabel);
toolbar->addWidget(frameBlocks);
if (GetBoolArg("-staking", true))
{
QTimer *timerStakingIcon = new QTimer(labelStakingIcon);
connect(timerStakingIcon, SIGNAL(timeout()), this, SLOT(updateStakingIcon()));
timerStakingIcon->start(30 * 1000);
updateStakingIcon();
}
// Progress bar and label for blocks download
progressBarLabel = new QLabel();
progressBarLabel->setVisible(false);
progressBar = new QProgressBar();
progressBar->setAlignment(Qt::AlignCenter);
progressBar->setVisible(false);
if (!fUseBlackTheme)
{
// Override style sheet for progress bar for styles that have a segmented progress bar,
// as they make the text unreadable (workaround for issue #1071)
// See https://qt-project.org/doc/qt-4.8/gallery.html
QString curStyle = qApp->style()->metaObject()->className();
if(curStyle == "QWindowsStyle" || curStyle == "QWindowsXPStyle")
{
progressBar->setStyleSheet("QProgressBar { background-color: #e8e8e8; border: 1px solid grey; border-radius: 7px; padding: 1px; text-align: center; } QProgressBar::chunk { background: QLinearGradient(x1: 0, y1: 0, x2: 1, y2: 0, stop: 0 #FF8000, stop: 1 orange); border-radius: 7px; margin: 0px; }");
}
}
statusBar()->addWidget(progressBarLabel);
statusBar()->addWidget(progressBar);
//statusBar()->addPermanentWidget(frameBlocks);
//statusBar()->setObjectName("statusBar");
//statusBar()->setStyleSheet("#statusBar { background-color: qradialgradient(cx: -0.8, cy: 0, fx: -0.8, fy: 0, radius: 0.6, stop: 0 #404040, stop: 1 #101010); }");
syncIconMovie = new QMovie(fUseBlackTheme ? ":/movies/update_spinner_black" : ":/movies/update_spinner", "mng", this);
// Clicking on a transaction on the overview page simply sends you to transaction history page
connect(overviewPage, SIGNAL(transactionClicked(QModelIndex)), this, SLOT(gotoHistoryPage()));
//connect(overviewPage, SIGNAL(transactionClicked(QModelIndex)), transactionView, SLOT(focusTransaction(QModelIndex)));
// Double-clicking on a transaction on the transaction history page shows details
//connect(transactionView, SIGNAL(doubleClicked(QModelIndex)), transactionView, SLOT(showDetails()));
rpcConsole = new RPCConsole(this);
connect(openRPCConsoleAction, SIGNAL(triggered()), rpcConsole, SLOT(show()));
// prevents an oben debug window from becoming stuck/unusable on client shutdown
connect(quitAction, SIGNAL(triggered()), rpcConsole, SLOT(hide()));
// Clicking on "Verify Message" in the address book sends you to the verify message tab
//connect(addressBookPage, SIGNAL(verifyMessage(QString)), this, SLOT(gotoVerifyMessageTab(QString)));
// Clicking on "Sign Message" in the receive coins page sends you to the sign message tab
//connect(receiveCoinsPage, SIGNAL(signMessage(QString)), this, SLOT(gotoSignMessageTab(QString)));
gotoOverviewPage();
}
BitcoinGUI::~BitcoinGUI()
{
if(trayIcon) // Hide tray icon, as deleting will let it linger until quit (on Ubuntu)
trayIcon->hide();
#ifdef Q_OS_MAC
delete appMenuBar;
#endif
}
void BitcoinGUI::createActions()
{
QActionGroup *tabGroup = new QActionGroup(this);
overviewAction = new QAction(QIcon(":/icons/overview"), tr("&Dashboard"), this);
overviewAction->setToolTip(tr("Show general overview of wallet"));
overviewAction->setCheckable(true);
overviewAction->setShortcut(QKeySequence(Qt::ALT + Qt::Key_1));
tabGroup->addAction(overviewAction);
receiveCoinsAction = new QAction(QIcon(":/icons/receiving_addresses"), tr("&Receive"), this);
receiveCoinsAction->setToolTip(tr("Show the list of addresses for receiving payments"));
receiveCoinsAction->setCheckable(true);
receiveCoinsAction->setShortcut(QKeySequence(Qt::ALT + Qt::Key_2));
tabGroup->addAction(receiveCoinsAction);
sendCoinsAction = new QAction(QIcon(":/icons/send"), tr("&Send"), this);
sendCoinsAction->setToolTip(tr("Send coins to a 8Bit address"));
sendCoinsAction->setCheckable(true);
sendCoinsAction->setShortcut(QKeySequence(Qt::ALT + Qt::Key_3));
tabGroup->addAction(sendCoinsAction);
historyAction = new QAction(QIcon(":/icons/history"), tr("&Transactions"), this);
historyAction->setToolTip(tr("Browse transaction history"));
historyAction->setCheckable(true);
historyAction->setShortcut(QKeySequence(Qt::ALT + Qt::Key_4));
tabGroup->addAction(historyAction);
addressBookAction = new QAction(QIcon(":/icons/address-book"), tr("&Address Book"), this);
addressBookAction->setToolTip(tr("Edit the list of stored addresses and labels"));
addressBookAction->setCheckable(true);
addressBookAction->setShortcut(QKeySequence(Qt::ALT + Qt::Key_5));
tabGroup->addAction(addressBookAction);
masternodeManagerAction = new QAction(QIcon(":/icons/bitcoin"), tr("&Masternodes"), this);
masternodeManagerAction->setToolTip(tr("Show 8Bit Nodes status and configure your nodes."));
masternodeManagerAction->setCheckable(true);
tabGroup->addAction(masternodeManagerAction);
connect(overviewAction, SIGNAL(triggered()), this, SLOT(showNormalIfMinimized()));
connect(overviewAction, SIGNAL(triggered()), this, SLOT(gotoOverviewPage()));
connect(receiveCoinsAction, SIGNAL(triggered()), this, SLOT(showNormalIfMinimized()));
connect(receiveCoinsAction, SIGNAL(triggered()), this, SLOT(gotoReceiveCoinsPage()));
connect(sendCoinsAction, SIGNAL(triggered()), this, SLOT(showNormalIfMinimized()));
connect(sendCoinsAction, SIGNAL(triggered()), this, SLOT(gotoSendCoinsPage()));
connect(historyAction, SIGNAL(triggered()), this, SLOT(showNormalIfMinimized()));
connect(historyAction, SIGNAL(triggered()), this, SLOT(gotoHistoryPage()));
connect(addressBookAction, SIGNAL(triggered()), this, SLOT(showNormalIfMinimized()));
connect(addressBookAction, SIGNAL(triggered()), this, SLOT(gotoAddressBookPage()));
connect(masternodeManagerAction, SIGNAL(triggered()), this, SLOT(showNormalIfMinimized()));
connect(masternodeManagerAction, SIGNAL(triggered()), this, SLOT(gotoMasternodeManagerPage()));
quitAction = new QAction(tr("E&xit"), this);
quitAction->setToolTip(tr("Quit application"));
quitAction->setShortcut(QKeySequence(Qt::CTRL + Qt::Key_Q));
quitAction->setMenuRole(QAction::QuitRole);
aboutAction = new QAction(tr("&About 8Bit"), this);
aboutAction->setToolTip(tr("Show information about 8Bit"));
aboutAction->setMenuRole(QAction::AboutRole);
aboutQtAction = new QAction(tr("About &Qt"), this);
aboutQtAction->setToolTip(tr("Show information about Qt"));
aboutQtAction->setMenuRole(QAction::AboutQtRole);
optionsAction = new QAction(tr("&Options..."), this);
optionsAction->setToolTip(tr("Modify configuration options for 8Bit"));
optionsAction->setMenuRole(QAction::PreferencesRole);
toggleHideAction = new QAction(QIcon(":/icons/bitcoin"), tr("&Show / Hide"), this);
encryptWalletAction = new QAction(tr("&Encrypt Wallet..."), this);
encryptWalletAction->setToolTip(tr("Encrypt or decrypt wallet"));
backupWalletAction = new QAction(tr("&Backup Wallet..."), this);
backupWalletAction->setToolTip(tr("Backup wallet to another location"));
changePassphraseAction = new QAction(tr("&Change Passphrase..."), this);
changePassphraseAction->setToolTip(tr("Change the passphrase used for wallet encryption"));
unlockWalletAction = new QAction(tr("&Unlock Wallet..."), this);
unlockWalletAction->setToolTip(tr("Unlock wallet"));
lockWalletAction = new QAction(tr("&Lock Wallet"), this);
lockWalletAction->setToolTip(tr("Lock wallet"));
signMessageAction = new QAction(tr("Sign &message..."), this);
verifyMessageAction = new QAction(tr("&Verify message..."), this);
exportAction = new QAction(tr("&Export..."), this);
exportAction->setToolTip(tr("Export the data in the current tab to a file"));
openRPCConsoleAction = new QAction(tr("&Debug window"), this);
openRPCConsoleAction->setToolTip(tr("Open debugging and diagnostic console"));
connect(quitAction, SIGNAL(triggered()), qApp, SLOT(quit()));
connect(aboutAction, SIGNAL(triggered()), this, SLOT(aboutClicked()));
connect(aboutQtAction, SIGNAL(triggered()), qApp, SLOT(aboutQt()));
connect(optionsAction, SIGNAL(triggered()), this, SLOT(optionsClicked()));
connect(toggleHideAction, SIGNAL(triggered()), this, SLOT(toggleHidden()));
connect(encryptWalletAction, SIGNAL(triggered()), this, SLOT(encryptWallet()));
connect(backupWalletAction, SIGNAL(triggered()), this, SLOT(backupWallet()));
connect(changePassphraseAction, SIGNAL(triggered()), this, SLOT(changePassphrase()));
connect(unlockWalletAction, SIGNAL(triggered()), this, SLOT(unlockWallet()));
connect(lockWalletAction, SIGNAL(triggered()), this, SLOT(lockWallet()));
connect(signMessageAction, SIGNAL(triggered()), this, SLOT(gotoSignMessageTab()));
connect(verifyMessageAction, SIGNAL(triggered()), this, SLOT(gotoVerifyMessageTab()));
}
void BitcoinGUI::createMenuBar()
{
#ifdef Q_OS_MAC
appMenuBar = new QMenuBar();
#else
appMenuBar = menuBar();
#endif
// Configure the menus
QMenu *file = appMenuBar->addMenu(tr("&File"));
file->addAction(backupWalletAction);
file->addAction(exportAction);
file->addAction(signMessageAction);
file->addAction(verifyMessageAction);
file->addSeparator();
file->addAction(quitAction);
QMenu *settings = appMenuBar->addMenu(tr("&Settings"));
settings->addAction(encryptWalletAction);
settings->addAction(changePassphraseAction);
settings->addAction(unlockWalletAction);
settings->addAction(lockWalletAction);
settings->addSeparator();
settings->addAction(optionsAction);
QMenu *help = appMenuBar->addMenu(tr("&Help"));
help->addAction(openRPCConsoleAction);
help->addSeparator();
help->addAction(aboutAction);
help->addAction(aboutQtAction);
}
static QWidget* makeToolBarSpacer()
{
QWidget* spacer = new QWidget();
spacer->setSizePolicy(QSizePolicy::Expanding, QSizePolicy::Expanding);
spacer->setStyleSheet("QWidget { background: none; }");
return spacer;
}
void BitcoinGUI::createToolBars()
{
toolbar = new QToolBar(tr("Tabs toolbar"));
toolbar->setToolButtonStyle(Qt::ToolButtonTextBesideIcon);
toolbar->setContextMenuPolicy(Qt::PreventContextMenu);
//toolbar->setObjectName("tabs");
//toolbar->setStyleSheet("#tabs { background-color: qradialgradient(cx: -0.8, cy: 0, fx: -0.8, fy: 0, radius: 0.6, stop: 0 #404040, stop: 1 #101010); }");
//QLabel* header = new QLabel();
//header->setMinimumSize(48, 48);
//header->setSizePolicy(QSizePolicy::Fixed, QSizePolicy::Fixed);
//header->setPixmap(QPixmap(":/icons/bitcoin"));
//header->setMaximumSize(48,48);
//header->setScaledContents(true);
//toolbar->addWidget(header);
if (fUseBlackTheme)
{
QWidget* header = new QWidget();
header->setMinimumSize(160, 146);
header->setSizePolicy(QSizePolicy::Fixed, QSizePolicy::Fixed);
header->setStyleSheet("QWidget { background-color: rgb(24,26,30); background-repeat: no-repeat; background-image: url(:/images/header); background-position: top center; margin-top:30px; }");
toolbar->addWidget(header);
toolbar->addWidget(makeToolBarSpacer());
}
//QMenu *toolbarMenu = new QMenu();
//toolbarMenu->addAction(overviewAction);
//toolbarMenu->addAction(receiveCoinsAction);
//toolbarMenu->addAction(sendCoinsAction);
//toolbarMenu->addAction(historyAction);
//toolbarMenu->addAction(addressBookAction);
//toolbarMenu->addAction(masternodeManagerAction);
//QToolButton* menuToolButton = new QToolButton();
//menuToolButton->setToolButtonStyle(Qt::ToolButtonIconOnly);
//menuToolButton->setMenu(toolbarMenu);
//menuToolButton->setPopupMode(QToolButton::InstantPopup);
//QAction* menuAction = new QAction(QIcon(":/icons/overview"), tr("&Menu"), this);
//menuAction->setToolTip(tr("Access 8Bit Wallet Tabs"));
//menuAction->setCheckable(false);
//connect(menuAction, SIGNAL(triggered()), this, SLOT(menuToolButton->showMenu()));
//menuToolButton->setDefaultAction(menuAction);
//toolbar->addWidget(menuToolButton);
toolbar->addAction(overviewAction);
toolbar->addAction(receiveCoinsAction);
toolbar->addAction(sendCoinsAction);
toolbar->addAction(historyAction);
toolbar->addAction(addressBookAction);
toolbar->addAction(masternodeManagerAction);
netLabel = new QLabel();
toolbar->addWidget(makeToolBarSpacer());
netLabel->setObjectName("netLabel");
netLabel->setStyleSheet("#netLabel { color: #efefef; }");
//toolbar->addWidget(netLabel);
toolbar->setOrientation(Qt::Vertical);
toolbar->setMovable(false);
addToolBar(Qt::LeftToolBarArea, toolbar);
//int w = 0;
//foreach(QAction *action, toolbar->actions()) {
// w = std::max(w, toolbar->widgetForAction(action)->width());
//}
foreach(QAction *action, toolbar->actions()) {
toolbar->widgetForAction(action)->setFixedWidth(150);
}
}
void BitcoinGUI::setClientModel(ClientModel *clientModel)
{
if(!fOnlyTor)
netLabel->setText("CLEARNET");
else
{
if(!IsLimited(NET_TOR))
{
netLabel->setMinimumSize(16, 16);
netLabel->setSizePolicy(QSizePolicy::Fixed, QSizePolicy::Fixed);
netLabel->setPixmap(QPixmap(":/icons/onion"));
netLabel->setMaximumSize(16,16);
netLabel->setScaledContents(true);
}
}
this->clientModel = clientModel;
if(clientModel)
{
// Replace some strings and icons, when using the testnet
if(clientModel->isTestNet())
{
setWindowTitle(windowTitle() + QString(" ") + tr("[testnet]"));
#ifndef Q_OS_MAC
qApp->setWindowIcon(QIcon(":icons/bitcoin_testnet"));
setWindowIcon(QIcon(":icons/bitcoin_testnet"));
#else
MacDockIconHandler::instance()->setIcon(QIcon(":icons/bitcoin_testnet"));
#endif
if(trayIcon)
{
trayIcon->setToolTip(tr("8Bit client") + QString(" ") + tr("[testnet]"));
trayIcon->setIcon(QIcon(":/icons/toolbar_testnet"));
toggleHideAction->setIcon(QIcon(":/icons/toolbar_testnet"));
}
}
// Keep up to date with client
setNumConnections(clientModel->getNumConnections());
connect(clientModel, SIGNAL(numConnectionsChanged(int)), this, SLOT(setNumConnections(int)));
setNumBlocks(clientModel->getNumBlocks());
connect(clientModel, SIGNAL(numBlocksChanged(int)), this, SLOT(setNumBlocks(int)));
// Receive and report messages from network/worker thread
connect(clientModel, SIGNAL(message(QString,QString,bool,unsigned int)), this, SLOT(message(QString,QString,bool,unsigned int)));
overviewPage->setClientModel(clientModel);
rpcConsole->setClientModel(clientModel);
//addressBookPage->setOptionsModel(clientModel->getOptionsModel());
//receiveCoinsPage->setOptionsModel(clientModel->getOptionsModel());
}
}
void BitcoinGUI::setWalletModel(WalletModel *walletModel)
{
this->walletModel = walletModel;
if(walletModel)
{
// Receive and report messages from wallet thread
connect(walletModel, SIGNAL(message(QString,QString,bool,unsigned int)), this, SLOT(message(QString,QString,bool,unsigned int)));
// Put transaction list in tabs
//transactionView->setModel(walletModel);
overviewPage->setWalletModel(walletModel);
//addressBookPage->setModel(walletModel->getAddressTableModel());
//receiveCoinsPage->setModel(walletModel->getAddressTableModel());
//sendCoinsPage->setModel(walletModel);
signVerifyMessageDialog->setModel(walletModel);
setEncryptionStatus(walletModel->getEncryptionStatus());
connect(walletModel, SIGNAL(encryptionStatusChanged(int)), this, SLOT(setEncryptionStatus(int)));
// Balloon pop-up for new transaction
connect(walletModel->getTransactionTableModel(), SIGNAL(rowsInserted(QModelIndex,int,int)),
this, SLOT(incomingTransaction(QModelIndex,int,int)));
// Ask for passphrase if needed
connect(walletModel, SIGNAL(requireUnlock()), this, SLOT(unlockWallet()));
}
}
void BitcoinGUI::createTrayIcon()
{
QMenu *trayIconMenu;
#ifndef Q_OS_MAC
trayIcon = new QSystemTrayIcon(this);
trayIconMenu = new QMenu(this);
trayIcon->setContextMenu(trayIconMenu);
trayIcon->setToolTip(tr("8Bit client"));
trayIcon->setIcon(QIcon(":/icons/toolbar"));
connect(trayIcon, SIGNAL(activated(QSystemTrayIcon::ActivationReason)),
this, SLOT(trayIconActivated(QSystemTrayIcon::ActivationReason)));
trayIcon->show();
#else
// Note: On Mac, the dock icon is used to provide the tray's functionality.
MacDockIconHandler *dockIconHandler = MacDockIconHandler::instance();
dockIconHandler->setMainWindow((QMainWindow *)this);
trayIconMenu = dockIconHandler->dockMenu();
#endif
// Configuration of the tray icon (or dock icon) icon menu
trayIconMenu->addAction(toggleHideAction);
trayIconMenu->addSeparator();
trayIconMenu->addAction(receiveCoinsAction);
trayIconMenu->addAction(sendCoinsAction);
trayIconMenu->addSeparator();
trayIconMenu->addAction(signMessageAction);
trayIconMenu->addAction(verifyMessageAction);
trayIconMenu->addSeparator();
trayIconMenu->addAction(optionsAction);
trayIconMenu->addAction(openRPCConsoleAction);
#ifndef Q_OS_MAC // This is built-in on Mac
trayIconMenu->addSeparator();
trayIconMenu->addAction(quitAction);
#endif
notificator = new Notificator(qApp->applicationName(), trayIcon);
}
#ifndef Q_OS_MAC
void BitcoinGUI::trayIconActivated(QSystemTrayIcon::ActivationReason reason)
{
if(reason == QSystemTrayIcon::Trigger)
{
// Click on system tray icon triggers show/hide of the main window
toggleHideAction->trigger();
}
}
#endif
void BitcoinGUI::optionsClicked()
{
if(!clientModel || !clientModel->getOptionsModel())
return;
OptionsDialog dlg;
dlg.setModel(clientModel->getOptionsModel());
dlg.exec();
}
void BitcoinGUI::aboutClicked()
{
AboutDialog dlg;
dlg.setModel(clientModel);
dlg.exec();
}
void BitcoinGUI::setNumConnections(int count)
{
QString icon;
switch(count)
{
case 0: icon = fUseBlackTheme ? ":/icons/black/connect_0" : ":/icons/connect_0"; break;
case 1: case 2: case 3: icon = fUseBlackTheme ? ":/icons/black/connect_1" : ":/icons/connect_1"; break;
case 4: case 5: case 6: icon = fUseBlackTheme ? ":/icons/black/connect_2" : ":/icons/connect_2"; break;
case 7: case 8: case 9: icon = fUseBlackTheme ? ":/icons/black/connect_3" : ":/icons/connect_3"; break;
default: icon = fUseBlackTheme ? ":/icons/black/connect_4" : ":/icons/connect_4"; break;
}
labelConnectionsIcon->setPixmap(QIcon(icon).pixmap(STATUSBAR_ICONSIZE,STATUSBAR_ICONSIZE));
labelConnectionsIcon->setToolTip(tr("%n active connection(s) to 8Bit network", "", count));
}
void BitcoinGUI::setNumBlocks(int count)
{
QString tooltip;
QDateTime lastBlockDate = clientModel->getLastBlockDate();
QDateTime currentDate = QDateTime::currentDateTime();
int totalSecs = GetTime() - Params().GenesisBlock().GetBlockTime();
int secs = lastBlockDate.secsTo(currentDate);
tooltip = tr("Processed %1 blocks of transaction history.").arg(count);
// Set icon state: spinning if catching up, tick otherwise
if(secs < 90*60)
{
tooltip = tr("Up to date") + QString(".<br>") + tooltip;
labelBlocksIcon->setPixmap(QIcon(fUseBlackTheme ? ":/icons/black/synced" : ":/icons/synced").pixmap(STATUSBAR_ICONSIZE, STATUSBAR_ICONSIZE));
overviewPage->showOutOfSyncWarning(false);
progressBarLabel->setVisible(false);
progressBar->setVisible(false);
}
else
{
// Represent time from last generated block in human readable text
QString timeBehindText;
const int HOUR_IN_SECONDS = 60*60;
const int DAY_IN_SECONDS = 24*60*60;
const int WEEK_IN_SECONDS = 7*24*60*60;
const int YEAR_IN_SECONDS = 31556952; // Average length of year in Gregorian calendar
if(secs < 2*DAY_IN_SECONDS)
{
timeBehindText = tr("%n hour(s)","",secs/HOUR_IN_SECONDS);
}
else if(secs < 2*WEEK_IN_SECONDS)
{
timeBehindText = tr("%n day(s)","",secs/DAY_IN_SECONDS);
}
else if(secs < YEAR_IN_SECONDS)
{
timeBehindText = tr("%n week(s)","",secs/WEEK_IN_SECONDS);
}
else
{
int years = secs / YEAR_IN_SECONDS;
int remainder = secs % YEAR_IN_SECONDS;
timeBehindText = tr("%1 and %2").arg(tr("%n year(s)", "", years)).arg(tr("%n week(s)","", remainder/WEEK_IN_SECONDS));
}
progressBarLabel->setText(tr(clientModel->isImporting() ? "Importing blocks..." : "Synchronizing with network..."));
progressBarLabel->setVisible(true);
progressBar->setFormat(tr("%1 behind").arg(timeBehindText));
progressBar->setMaximum(totalSecs);
progressBar->setValue(totalSecs - secs);
progressBar->setVisible(true);
tooltip = tr("Catching up...") + QString("<br>") + tooltip;
labelBlocksIcon->setMovie(syncIconMovie);
if(count != prevBlocks)
syncIconMovie->jumpToNextFrame();
prevBlocks = count;
overviewPage->showOutOfSyncWarning(true);
tooltip += QString("<br>");
tooltip += tr("Last received block was generated %1 ago.").arg(timeBehindText);
tooltip += QString("<br>");
tooltip += tr("Transactions after this will not yet be visible.");
}
// Don't word-wrap this (fixed-width) tooltip
tooltip = QString("<nobr>") + tooltip + QString("</nobr>");
labelBlocksIcon->setToolTip(tooltip);
progressBarLabel->setToolTip(tooltip);
progressBar->setToolTip(tooltip);
statusBar()->setVisible(true);
}
void BitcoinGUI::message(const QString &title, const QString &message, bool modal, unsigned int style)
{
QString strTitle = tr("8Bit") + " - ";
// Default to information icon
int nMBoxIcon = QMessageBox::Information;
int nNotifyIcon = Notificator::Information;
// Check for usage of predefined title
switch (style) {
case CClientUIInterface::MSG_ERROR:
strTitle += tr("Error");
break;
case CClientUIInterface::MSG_WARNING:
strTitle += tr("Warning");
break;
case CClientUIInterface::MSG_INFORMATION:
strTitle += tr("Information");
break;
default:
strTitle += title; // Use supplied title
}
// Check for error/warning icon
if (style & CClientUIInterface::ICON_ERROR) {
nMBoxIcon = QMessageBox::Critical;
nNotifyIcon = Notificator::Critical;
}
else if (style & CClientUIInterface::ICON_WARNING) {
nMBoxIcon = QMessageBox::Warning;
nNotifyIcon = Notificator::Warning;
}
// Display message
if (modal) {
// Check for buttons, use OK as default, if none was supplied
QMessageBox::StandardButton buttons;
if (!(buttons = (QMessageBox::StandardButton)(style & CClientUIInterface::BTN_MASK)))
buttons = QMessageBox::Ok;
QMessageBox mBox((QMessageBox::Icon)nMBoxIcon, strTitle, message, buttons);
mBox.exec();
}
else
notificator->notify((Notificator::Class)nNotifyIcon, strTitle, message);
}
void BitcoinGUI::changeEvent(QEvent *e)
{
QMainWindow::changeEvent(e);
#ifndef Q_OS_MAC // Ignored on Mac
if(e->type() == QEvent::WindowStateChange)
{
if(clientModel && clientModel->getOptionsModel()->getMinimizeToTray())
{
QWindowStateChangeEvent *wsevt = static_cast<QWindowStateChangeEvent*>(e);
if(!(wsevt->oldState() & Qt::WindowMinimized) && isMinimized())
{
QTimer::singleShot(0, this, SLOT(hide()));
e->ignore();
}
}
}
#endif
}
void BitcoinGUI::closeEvent(QCloseEvent *event)
{
if(clientModel)
{
#ifndef Q_OS_MAC // Ignored on Mac
if(!clientModel->getOptionsModel()->getMinimizeToTray() &&
!clientModel->getOptionsModel()->getMinimizeOnClose())
{
qApp->quit();
}
#endif
}
QMainWindow::closeEvent(event);
}
void BitcoinGUI::askFee(qint64 nFeeRequired, bool *payFee)
{
if (!clientModel || !clientModel->getOptionsModel())
return;
QString strMessage = tr("This transaction is over the size limit. You can still send it for a fee of %1, "
"which goes to the nodes that process your transaction and helps to support the network. "
"Do you want to pay the fee?").arg(BitcoinUnits::formatWithUnit(clientModel->getOptionsModel()->getDisplayUnit(), nFeeRequired));
QMessageBox::StandardButton retval = QMessageBox::question(
this, tr("Confirm transaction fee"), strMessage,
QMessageBox::Yes|QMessageBox::Cancel, QMessageBox::Yes);
*payFee = (retval == QMessageBox::Yes);
}
void BitcoinGUI::incomingTransaction(const QModelIndex & parent, int start, int end)
{
if(!walletModel || !clientModel)
return;
TransactionTableModel *ttm = walletModel->getTransactionTableModel();
qint64 amount = ttm->index(start, TransactionTableModel::Amount, parent)
.data(Qt::EditRole).toULongLong();
if(!clientModel->inInitialBlockDownload())
{
// On new transaction, make an info balloon
// Unless the initial block download is in progress, to prevent balloon-spam
QString date = ttm->index(start, TransactionTableModel::Date, parent)
.data().toString();
QString type = ttm->index(start, TransactionTableModel::Type, parent)
.data().toString();
QString address = ttm->index(start, TransactionTableModel::ToAddress, parent)
.data().toString();
QIcon icon = qvariant_cast<QIcon>(ttm->index(start,
TransactionTableModel::ToAddress, parent)
.data(Qt::DecorationRole));
notificator->notify(Notificator::Information,
(amount)<0 ? tr("Sent transaction") :
tr("Incoming transaction"),
tr("Date: %1\n"
"Amount: %2\n"
"Type: %3\n"
"Address: %4\n")
.arg(date)
.arg(BitcoinUnits::formatWithUnit(walletModel->getOptionsModel()->getDisplayUnit(), amount, true))
.arg(type)
.arg(address), icon);
}
}
void BitcoinGUI::clearWidgets()
{
centralStackedWidget->setCurrentWidget(centralStackedWidget->widget(0));
for(int i = centralStackedWidget->count(); i>0; i--){
QWidget* widget = centralStackedWidget->widget(i);
centralStackedWidget->removeWidget(widget);
widget->deleteLater();
}
}
void BitcoinGUI::gotoMasternodeManagerPage()
{
masternodeManagerAction->setChecked(true);
clearWidgets();
masternodeManagerPage = new MasternodeManager(this);
centralStackedWidget->addWidget(masternodeManagerPage);
centralStackedWidget->setCurrentWidget(masternodeManagerPage);
exportAction->setEnabled(false);
disconnect(exportAction, SIGNAL(triggered()), 0, 0);
}
void BitcoinGUI::gotoOverviewPage()
{
overviewAction->setChecked(true);
clearWidgets();
centralStackedWidget->setCurrentWidget(overviewPage);
exportAction->setEnabled(false);
disconnect(exportAction, SIGNAL(triggered()), 0, 0);
}
void BitcoinGUI::gotoHistoryPage()
{
historyAction->setChecked(true);
clearWidgets();
transactionsPage = new QWidget(this);
QVBoxLayout *vbox = new QVBoxLayout();
transactionView = new TransactionView(this);
vbox->addWidget(transactionView);
transactionsPage->setLayout(vbox);
centralStackedWidget->addWidget(transactionsPage);
centralStackedWidget->setCurrentWidget(transactionsPage);
connect(transactionView, SIGNAL(doubleClicked(QModelIndex)), transactionView, SLOT(showDetails()));
transactionView->setModel(this->walletModel);
exportAction->setEnabled(true);
disconnect(exportAction, SIGNAL(triggered()), 0, 0);
connect(exportAction, SIGNAL(triggered()), transactionView, SLOT(exportClicked()));
}
void BitcoinGUI::gotoAddressBookPage()
{
addressBookAction->setChecked(true);
clearWidgets();
addressBookPage = new AddressBookPage(AddressBookPage::ForEditing, AddressBookPage::SendingTab);
addressBookPage->setOptionsModel(this->clientModel->getOptionsModel());
addressBookPage->setModel(this->walletModel->getAddressTableModel());
// Clicking on "Verify Message" in the address book sends you to the verify message tab
connect(addressBookPage, SIGNAL(verifyMessage(QString)), this, SLOT(gotoVerifyMessageTab(QString)));
centralStackedWidget->addWidget(addressBookPage);
centralStackedWidget->setCurrentWidget(addressBookPage);
exportAction->setEnabled(true);
disconnect(exportAction, SIGNAL(triggered()), 0, 0);
connect(exportAction, SIGNAL(triggered()), addressBookPage, SLOT(exportClicked()));
}
void BitcoinGUI::gotoReceiveCoinsPage()
{
receiveCoinsAction->setChecked(true);
clearWidgets();
receiveCoinsPage = new AddressBookPage(AddressBookPage::ForEditing, AddressBookPage::ReceivingTab);
receiveCoinsPage->setOptionsModel(this->clientModel->getOptionsModel());
receiveCoinsPage->setModel(this->walletModel->getAddressTableModel());
// Clicking on "Sign Message" in the receive coins page sends you to the sign message tab
connect(receiveCoinsPage, SIGNAL(signMessage(QString)), this, SLOT(gotoSignMessageTab(QString)));
centralStackedWidget->addWidget(receiveCoinsPage);
centralStackedWidget->setCurrentWidget(receiveCoinsPage);
exportAction->setEnabled(true);
disconnect(exportAction, SIGNAL(triggered()), 0, 0);
connect(exportAction, SIGNAL(triggered()), receiveCoinsPage, SLOT(exportClicked()));
}
void BitcoinGUI::gotoSendCoinsPage()
{
sendCoinsAction->setChecked(true);
sendCoinsPage = new SendCoinsDialog(this);
centralStackedWidget->addWidget(sendCoinsPage);
centralStackedWidget->setCurrentWidget(sendCoinsPage);
sendCoinsPage->setModel(this->walletModel);
exportAction->setEnabled(false);
disconnect(exportAction, SIGNAL(triggered()), 0, 0);
}
void BitcoinGUI::gotoSignMessageTab(QString addr)
{
// call show() in showTab_SM()
signVerifyMessageDialog->showTab_SM(true);
if(!addr.isEmpty())
signVerifyMessageDialog->setAddress_SM(addr);
}
void BitcoinGUI::gotoVerifyMessageTab(QString addr)
{
// call show() in showTab_VM()
signVerifyMessageDialog->showTab_VM(true);
if(!addr.isEmpty())
signVerifyMessageDialog->setAddress_VM(addr);
}
void BitcoinGUI::dragEnterEvent(QDragEnterEvent *event)
{
// Accept only URIs
if(event->mimeData()->hasUrls())
event->acceptProposedAction();
}
void BitcoinGUI::dropEvent(QDropEvent *event)
{
if(event->mimeData()->hasUrls())
{
int nValidUrisFound = 0;
QList<QUrl> uris = event->mimeData()->urls();
foreach(const QUrl &uri, uris)
{
if (sendCoinsPage->handleURI(uri.toString()))
nValidUrisFound++;
}
// if valid URIs were found
if (nValidUrisFound)
gotoSendCoinsPage();
else
notificator->notify(Notificator::Warning, tr("URI handling"), tr("URI can not be parsed! This can be caused by an invalid 8Bit address or malformed URI parameters."));
}
event->acceptProposedAction();
}
void BitcoinGUI::handleURI(QString strURI)
{
// URI has to be valid
if (sendCoinsPage->handleURI(strURI))
{
showNormalIfMinimized();
gotoSendCoinsPage();
}
else
notificator->notify(Notificator::Warning, tr("URI handling"), tr("URI can not be parsed! This can be caused by an invalid 8Bit address or malformed URI parameters."));
}
void BitcoinGUI::setEncryptionStatus(int status)
{
switch(status)
{
case WalletModel::Unencrypted:
labelEncryptionIcon->setPixmap(QIcon(fUseBlackTheme ? ":/icons/black/lock_open" : ":/icons/lock_open").pixmap(STATUSBAR_ICONSIZE,STATUSBAR_ICONSIZE));
labelEncryptionIcon->setToolTip(tr("Wallet is <b>not encrypted</b>"));
changePassphraseAction->setEnabled(false);
unlockWalletAction->setVisible(false);
lockWalletAction->setVisible(false);
encryptWalletAction->setEnabled(true);
break;
case WalletModel::Unlocked:
labelEncryptionIcon->setPixmap(QIcon(fUseBlackTheme ? ":/icons/black/lock_open" : ":/icons/lock_open").pixmap(STATUSBAR_ICONSIZE,STATUSBAR_ICONSIZE));
labelEncryptionIcon->setToolTip(tr("Wallet is <b>encrypted</b> and currently <b>unlocked</b>"));
changePassphraseAction->setEnabled(true);
unlockWalletAction->setVisible(false);
lockWalletAction->setVisible(true);
encryptWalletAction->setEnabled(false); // TODO: decrypt currently not supported
break;
case WalletModel::Locked:
labelEncryptionIcon->setPixmap(QIcon(fUseBlackTheme ? ":/icons/black/lock_closed" : ":/icons/lock_closed").pixmap(STATUSBAR_ICONSIZE,STATUSBAR_ICONSIZE));
labelEncryptionIcon->setToolTip(tr("Wallet is <b>encrypted</b> and currently <b>locked</b>"));
changePassphraseAction->setEnabled(true);
unlockWalletAction->setVisible(true);
lockWalletAction->setVisible(false);
encryptWalletAction->setEnabled(false); // TODO: decrypt currently not supported
break;
}
}
void BitcoinGUI::encryptWallet()
{
if(!walletModel)
return;
AskPassphraseDialog dlg(AskPassphraseDialog::Encrypt, this);
dlg.setModel(walletModel);
dlg.exec();
setEncryptionStatus(walletModel->getEncryptionStatus());
}
void BitcoinGUI::backupWallet()
{
QString saveDir = QDesktopServices::storageLocation(QDesktopServices::DocumentsLocation);
QString filename = QFileDialog::getSaveFileName(this, tr("Backup Wallet"), saveDir, tr("Wallet Data (*.dat)"));
if(!filename.isEmpty()) {
if(!walletModel->backupWallet(filename)) {
QMessageBox::warning(this, tr("Backup Failed"), tr("There was an error trying to save the wallet data to the new location."));
}
}
}
void BitcoinGUI::changePassphrase()
{
AskPassphraseDialog dlg(AskPassphraseDialog::ChangePass, this);
dlg.setModel(walletModel);
dlg.exec();
}
void BitcoinGUI::unlockWallet()
{
if(!walletModel)
return;
// Unlock wallet when requested by wallet model
if(walletModel->getEncryptionStatus() == WalletModel::Locked)
{
AskPassphraseDialog::Mode mode = sender() == unlockWalletAction ?
AskPassphraseDialog::UnlockStaking : AskPassphraseDialog::Unlock;
AskPassphraseDialog dlg(mode, this);
dlg.setModel(walletModel);
dlg.exec();
}
}
void BitcoinGUI::lockWallet()
{
if(!walletModel)
return;
walletModel->setWalletLocked(true);
}
void BitcoinGUI::showNormalIfMinimized(bool fToggleHidden)
{
// activateWindow() (sometimes) helps with keyboard focus on Windows
if (isHidden())
{
show();
activateWindow();
}
else if (isMinimized())
{
showNormal();
activateWindow();
}
else if (GUIUtil::isObscured(this))
{
raise();
activateWindow();
}
else if(fToggleHidden)
hide();
}
void BitcoinGUI::toggleHidden()
{
showNormalIfMinimized(true);
}
void BitcoinGUI::updateWeight()
{
if (!pwalletMain)
return;
TRY_LOCK(cs_main, lockMain);
if (!lockMain)
return;
TRY_LOCK(pwalletMain->cs_wallet, lockWallet);
if (!lockWallet)
return;
nWeight = pwalletMain->GetStakeWeight();
}
void BitcoinGUI::updateStakingIcon()
{
updateWeight();
if (nLastCoinStakeSearchInterval && nWeight)
{
uint64_t nWeight = this->nWeight;
uint64_t nNetworkWeight = GetPoSKernelPS();
unsigned nEstimateTime = GetTargetSpacing(nBestHeight) * nNetworkWeight / nWeight;
QString text;
if (nEstimateTime < 60)
{
text = tr("%n second(s)", "", nEstimateTime);
}
else if (nEstimateTime < 60*60)
{
text = tr("%n minute(s)", "", nEstimateTime/60);
}
else if (nEstimateTime < 24*60*60)
{
text = tr("%n hour(s)", "", nEstimateTime/(60*60));
}
else
{
text = tr("%n day(s)", "", nEstimateTime/(60*60*24));
}
nWeight /= COIN;
nNetworkWeight /= COIN;
labelStakingIcon->setPixmap(QIcon(fUseBlackTheme ? ":/icons/black/staking_on" : ":/icons/staking_on").pixmap(STATUSBAR_ICONSIZE,STATUSBAR_ICONSIZE));
labelStakingIcon->setToolTip(tr("Staking.<br>Your weight is %1<br>Network weight is %2<br>Expected time to earn reward is %3").arg(nWeight).arg(nNetworkWeight).arg(text));
}
else
{
labelStakingIcon->setPixmap(QIcon(fUseBlackTheme ? ":/icons/black/staking_off" : ":/icons/staking_off").pixmap(STATUSBAR_ICONSIZE,STATUSBAR_ICONSIZE));
if (pwalletMain && pwalletMain->IsLocked())
labelStakingIcon->setToolTip(tr("Not staking because wallet is locked"));
else if (vNodes.empty())
labelStakingIcon->setToolTip(tr("Not staking because wallet is offline"));
else if (IsInitialBlockDownload())
labelStakingIcon->setToolTip(tr("Not staking because wallet is syncing"));
else if (!nWeight)
labelStakingIcon->setToolTip(tr("Not staking because you don't have mature coins"));
else
labelStakingIcon->setToolTip(tr("Not staking"));
}
}
void BitcoinGUI::detectShutdown()
{
if (ShutdownRequested())
QMetaObject::invokeMethod(QCoreApplication::instance(), "quit", Qt::QueuedConnection);
}
| {
"content_hash": "9297271cebccdaefb42a9ac0fe9b6e38",
"timestamp": "",
"source": "github",
"line_count": 1189,
"max_line_length": 311,
"avg_line_length": 37.8309503784693,
"alnum_prop": 0.6829772570640937,
"repo_name": "8bit-dev/8bit",
"id": "36af0ed5e69e701fcd9720342d74859279dc4110",
"size": "44981",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/qt/bitcoingui.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "51312"
},
{
"name": "C",
"bytes": "38295"
},
{
"name": "C++",
"bytes": "3111936"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "HTML",
"bytes": "50620"
},
{
"name": "Makefile",
"bytes": "11728"
},
{
"name": "NSIS",
"bytes": "5914"
},
{
"name": "Objective-C",
"bytes": "858"
},
{
"name": "Objective-C++",
"bytes": "3517"
},
{
"name": "Python",
"bytes": "54355"
},
{
"name": "QMake",
"bytes": "15303"
},
{
"name": "Shell",
"bytes": "8509"
}
],
"symlink_target": ""
} |
module Dynflow
class ExecutionPlan::OutputReference < Serializable
include Algebrick::TypeCheck
# dereferences all OutputReferences in Hash-Array structure
def self.dereference(object, persistence)
case object
when Hash
object.reduce(Utils.indifferent_hash({})) do |h, (key, val)|
h.update(key => dereference(val, persistence))
end
when Array
object.map { |val| dereference(val, persistence) }
when self
object.dereference(persistence)
else
object
end
end
# dereferences all hashes representing OutputReferences in Hash-Array structure
def self.deserialize(value)
case value
when Hash
if value[:class] == self.to_s
new_from_hash(value)
else
value.reduce(Utils.indifferent_hash({})) do |h, (key, val)|
h.update(key => deserialize(val))
end
end
when Array
value.map { |val| deserialize(val) }
else
value
end
end
attr_reader :execution_plan_id, :step_id, :action_id, :subkeys
def initialize(execution_plan_id, step_id, action_id, subkeys = [])
@execution_plan_id = Type! execution_plan_id, String
@step_id = Type! step_id, Integer
@action_id = Type! action_id, Integer
Type! subkeys, Array
@subkeys = subkeys.map { |v| Type!(v, String, Symbol).to_s }.freeze
end
def [](subkey)
self.class.new(execution_plan_id, step_id, action_id, subkeys + [subkey])
end
def to_hash
recursive_to_hash class: self.class.to_s,
execution_plan_id: execution_plan_id,
step_id: step_id,
action_id: action_id,
subkeys: subkeys
end
def to_s
"Step(#{step_id}).output".tap do |ret|
ret << subkeys.map { |k| "[:#{k}]" }.join('') if subkeys.any?
end
end
alias_method :inspect, :to_s
def dereference(persistence)
action_data = persistence.adapter.load_action(execution_plan_id, action_id)
@subkeys.reduce(action_data[:output]) { |v, k| v.fetch k }
end
protected
def self.new_from_hash(hash)
check_class_matching hash
new(hash.fetch(:execution_plan_id),
hash.fetch(:step_id),
hash.fetch(:action_id),
hash.fetch(:subkeys))
end
end
end
| {
"content_hash": "79ce1094a1c989689d65b3a69a1deedf",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 83,
"avg_line_length": 29.094117647058823,
"alnum_prop": 0.5713708046906592,
"repo_name": "adamruzicka/dynflow",
"id": "4549506aef8e713aa9a25b97912ec23febcd8351",
"size": "2473",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "lib/dynflow/execution_plan/output_reference.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1603"
},
{
"name": "HTML",
"bytes": "10813"
},
{
"name": "JavaScript",
"bytes": "729"
},
{
"name": "Ruby",
"bytes": "514032"
},
{
"name": "Shell",
"bytes": "813"
}
],
"symlink_target": ""
} |
.class public Lcom/android/internal/telephony/CellBroadcastConfigInfo;
.super Ljava/lang/Object;
.source "CellBroadcastConfigInfo.java"
# instance fields
.field public channelConfigInfo:Ljava/lang/String;
.field public isAllLanguageOn:Z
.field public languageConfigInfo:Ljava/lang/String;
.field public mode:I
# direct methods
.method constructor <init>(ILjava/lang/String;Ljava/lang/String;Z)V
.locals 2
.param p1, "mode" # I
.param p2, "channels" # Ljava/lang/String;
.param p3, "languages" # Ljava/lang/String;
.param p4, "allOn" # Z
.prologue
const/4 v1, 0x0
.line 16
invoke-direct {p0}, Ljava/lang/Object;-><init>()V
.line 8
const/4 v0, 0x1
iput v0, p0, Lcom/android/internal/telephony/CellBroadcastConfigInfo;->mode:I
.line 10
iput-object v1, p0, Lcom/android/internal/telephony/CellBroadcastConfigInfo;->channelConfigInfo:Ljava/lang/String;
.line 12
iput-object v1, p0, Lcom/android/internal/telephony/CellBroadcastConfigInfo;->languageConfigInfo:Ljava/lang/String;
.line 14
const/4 v0, 0x0
iput-boolean v0, p0, Lcom/android/internal/telephony/CellBroadcastConfigInfo;->isAllLanguageOn:Z
.line 17
iput p1, p0, Lcom/android/internal/telephony/CellBroadcastConfigInfo;->mode:I
.line 18
iput-object p2, p0, Lcom/android/internal/telephony/CellBroadcastConfigInfo;->channelConfigInfo:Ljava/lang/String;
.line 19
iput-object p3, p0, Lcom/android/internal/telephony/CellBroadcastConfigInfo;->languageConfigInfo:Ljava/lang/String;
.line 20
iput-boolean p4, p0, Lcom/android/internal/telephony/CellBroadcastConfigInfo;->isAllLanguageOn:Z
.line 21
return-void
.end method
# virtual methods
.method public toString()Ljava/lang/String;
.locals 2
.prologue
.line 24
new-instance v0, Ljava/lang/StringBuilder;
invoke-direct {v0}, Ljava/lang/StringBuilder;-><init>()V
.line 25
.local v0, "ret":Ljava/lang/StringBuilder;
const-string v1, "CellBroadcastConfigInfo: mode = "
invoke-virtual {v0, v1}, Ljava/lang/StringBuilder;->append(Ljava/lang/String;)Ljava/lang/StringBuilder;
.line 26
iget v1, p0, Lcom/android/internal/telephony/CellBroadcastConfigInfo;->mode:I
invoke-virtual {v0, v1}, Ljava/lang/StringBuilder;->append(I)Ljava/lang/StringBuilder;
.line 27
const-string v1, ", channel = "
invoke-virtual {v0, v1}, Ljava/lang/StringBuilder;->append(Ljava/lang/String;)Ljava/lang/StringBuilder;
.line 28
iget-object v1, p0, Lcom/android/internal/telephony/CellBroadcastConfigInfo;->channelConfigInfo:Ljava/lang/String;
invoke-virtual {v0, v1}, Ljava/lang/StringBuilder;->append(Ljava/lang/String;)Ljava/lang/StringBuilder;
.line 29
const-string v1, ", language = "
invoke-virtual {v0, v1}, Ljava/lang/StringBuilder;->append(Ljava/lang/String;)Ljava/lang/StringBuilder;
.line 30
iget-boolean v1, p0, Lcom/android/internal/telephony/CellBroadcastConfigInfo;->isAllLanguageOn:Z
if-nez v1, :cond_0
.line 31
iget-object v1, p0, Lcom/android/internal/telephony/CellBroadcastConfigInfo;->languageConfigInfo:Ljava/lang/String;
invoke-virtual {v0, v1}, Ljava/lang/StringBuilder;->append(Ljava/lang/String;)Ljava/lang/StringBuilder;
.line 35
:goto_0
invoke-virtual {v0}, Ljava/lang/StringBuilder;->toString()Ljava/lang/String;
move-result-object v1
return-object v1
.line 33
:cond_0
const-string v1, "all"
invoke-virtual {v0, v1}, Ljava/lang/StringBuilder;->append(Ljava/lang/String;)Ljava/lang/StringBuilder;
goto :goto_0
.end method
| {
"content_hash": "4cd73adfc1c72969e5f58ed508d49377",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 119,
"avg_line_length": 29.338709677419356,
"alnum_prop": 0.7190764156129742,
"repo_name": "GaHoKwan/tos_device_meizu_mx4",
"id": "1e95ce110c1d63b42e91f0fc8e61082b9465e56b",
"size": "3638",
"binary": false,
"copies": "2",
"ref": "refs/heads/TPS-YUNOS",
"path": "patch/smali/pack/telephony-common.jar/smali/com/android/internal/telephony/CellBroadcastConfigInfo.smali",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "2407"
},
{
"name": "Groff",
"bytes": "8687"
},
{
"name": "Makefile",
"bytes": "31774"
},
{
"name": "Shell",
"bytes": "6226"
},
{
"name": "Smali",
"bytes": "350951922"
}
],
"symlink_target": ""
} |
typedef struct {
// The raw bytecode.
blob_t bytecode;
// The pool of constant values used by the bytecode.
value_t value_pool;
} code_cache_t;
// Updates the code cache according to the given frame. This must be called each
// time control moves from one frame to another.
static void code_cache_refresh(code_cache_t *cache, frame_t *frame) {
value_t code_block = frame_get_code_block(frame);
value_t bytecode = get_code_block_bytecode(code_block);
cache->bytecode = get_blob_data(bytecode);
cache->value_pool = get_code_block_value_pool(code_block);
}
// Records the current state of the given frame in the given escape state object
// such that restoring from the state will bring the frame back to the state
// it is now, modulo the given pc-offset which will have been added to the
// frame's pc.
static void capture_escape_state(value_t self, frame_t *frame, size_t pc_offset) {
value_t *stack_start = frame_get_stack_piece_bottom(frame);
escape_state_init(self,
frame->stack_pointer - stack_start,
frame->frame_pointer - stack_start,
frame->limit_pointer - stack_start,
frame->flags,
frame->pc + pc_offset);
}
// Restores the previous state of the interpreter from the given derived
// object's escape state.
static void restore_escape_state(frame_t *frame, value_t stack,
value_t destination) {
value_t target_piece = get_derived_object_host(destination);
if (!is_same_value(target_piece, frame->stack_piece)) {
set_stack_top_piece(stack, target_piece);
open_stack_piece(target_piece, frame);
}
value_t *stack_start = frame_get_stack_piece_bottom(frame);
value_t stack_pointer = get_escape_state_stack_pointer(destination);
frame->stack_pointer = stack_start + get_integer_value(stack_pointer);
value_t frame_pointer = get_escape_state_frame_pointer(destination);
frame->frame_pointer = stack_start + get_integer_value(frame_pointer);
value_t limit_pointer = get_escape_state_limit_pointer(destination);
frame->limit_pointer = stack_start + get_integer_value(limit_pointer);
frame->flags = get_escape_state_flags(destination);
value_t pc = get_escape_state_pc(destination);
frame->pc = (size_t) get_integer_value(pc);
}
// Returns the short value at the given offset from the current pc.
static short_t read_short(code_cache_t *cache, frame_t *frame, size_t offset) {
return blob_short_at(cache->bytecode, frame->pc + offset);
}
// Returns the value at the given offset from the current pc.
static value_t read_value(code_cache_t *cache, frame_t *frame, size_t offset) {
size_t index = read_short(cache, frame, offset);
return get_array_at(cache->value_pool, index);
}
// Returns the code that implements the given method object.
static value_t compile_method(runtime_t *runtime, value_t method) {
value_t method_ast = get_method_syntax(method);
value_t fragment = get_method_module_fragment(method);
assembler_t assm;
TRY(assembler_init(&assm, runtime, fragment, scope_get_bottom()));
TRY_FINALLY {
E_TRY_DEF(code, compile_method_body(&assm, method_ast));
E_RETURN(code);
} FINALLY {
assembler_dispose(&assm);
} YRT
}
// Gets the code from a method object, compiling the method if necessary.
static value_t ensure_method_code(runtime_t *runtime, value_t method) {
value_t code_ptr = get_method_code_ptr(method);
value_t code = get_freeze_cheat_value(code_ptr);
if (is_nothing(code)) {
TRY_SET(code, compile_method(runtime, method));
// TODO: this is not an initialization, it needs to be changed to a freeze
// cheat.
set_freeze_cheat_value(code_ptr, code);
}
return code;
}
// Reports a lookup error as if it were a signal. It's not one that can be
// caught though, it's mainly a trick to get the stack trace when lookup fails.
static value_t signal_lookup_error(runtime_t *runtime, value_t stack, frame_t *frame) {
frame->pc += kInvokeOperationSize;
return new_uncaught_signal_condition(true);
}
// Validates that the stack looks correct after execution completes normally.
static void validate_stack_on_normal_exit(frame_t *frame) {
value_t stack = get_stack_piece_stack(frame->stack_piece);
CHECK_TRUE("leftover barriers", is_nothing(get_stack_top_barrier(stack)));
}
// Checks whether to fire the next barrier on the way to the given destination.
// If there is a barrier to fire, fire it. Returns false iff a barrier was fired,
// true if we've arrived at the destination.
static bool maybe_fire_next_barrier(code_cache_t *cache, frame_t *frame,
runtime_t *runtime, value_t stack, value_t destination) {
CHECK_DOMAIN(vdDerivedObject, destination);
value_t next_barrier = get_stack_top_barrier(stack);
if (is_same_value(next_barrier, destination)) {
// We've arrived.
return true;
}
// Grab the next barrier's handler.
value_t payload = get_barrier_state_payload(next_barrier);
value_t previous = get_barrier_state_previous(next_barrier);
// Unhook the barrier from the barrier stack.
set_stack_top_barrier(stack, previous);
// Fire the exit action for the handler object.
if (in_genus(dgEnsureSection, next_barrier)) {
// Pop any previous state off the stack. If we've executed any
// code shards before the first will be the result from the shard
// the second will be the shard itself.
frame_pop_value(frame);
frame_pop_value(frame);
// Push the shard onto the stack as the subject since we may need it
// to refract access to outer variables.
frame_push_value(frame, next_barrier);
value_t argmap = ROOT(runtime, array_of_zero);
value_t code_block = payload;
push_stack_frame(runtime, stack, frame,
(size_t) get_code_block_high_water_mark(code_block), argmap);
frame_set_code_block(frame, code_block);
code_cache_refresh(cache, frame);
} else {
on_derived_object_exit(next_barrier);
}
return false;
}
static always_inline value_t get_caller_call_tags(frame_t *callee) {
// Get access to the caller's frame.
frame_iter_t iter = frame_iter_from_frame(callee);
bool advanced = frame_iter_advance(&iter);
CHECK_TRUE("error advancing to get caller tags", advanced);
frame_t *caller = frame_iter_get_current(&iter);
value_t caller_code = frame_get_code_block(caller);
// The caller's pc should be parked immediately after the invocation that
// caused the callee to be running.
size_t caller_pc = caller->pc;
CHECK_TRUE("caller not after invoke", caller_pc >= kInvokeOperationSize);
size_t invoke_pc = caller_pc - kInvokeOperationSize;
value_t bytecode = get_code_block_bytecode(caller_code);
blob_t data = get_blob_data(bytecode);
// Get the call tags from the caller's value pool.
size_t call_tags_index = blob_short_at(data, invoke_pc + 1);
value_t caller_value_pool = get_code_block_value_pool(caller_code);
value_t tags = get_array_at(caller_value_pool, call_tags_index);
CHECK_FAMILY(ofCallTags, tags);
return tags;
}
static always_inline value_t do_reify_arguments(runtime_t *runtime, frame_t *frame,
code_cache_t *cache) {
value_t argmap = frame_get_argument_map(frame);
value_t params = read_value(cache, frame, 1);
value_t tags = get_caller_call_tags(frame);
size_t argc = (size_t) get_array_length(argmap);
TRY_DEF(values, new_heap_array(runtime, argc));
TRY_DEF(reified, new_heap_reified_arguments(runtime, params, values,
argmap, tags));
for (size_t i = 0; i < argc; i++) {
// We have to get the raw arguments because extra arguments aren't
// accessible through frame_get_argument because it uses the param
// index and extra args don't have a param index.
value_t value = frame_detach_value(frame_get_raw_argument(frame, i));
set_array_at(values, i, value);
}
frame_push_value(frame, reified);
frame->pc += kReifyArgumentsOperationSize;
return success();
}
// Counter that increments for each opcode executed when interpreter topic
// logging is enabled. Can be helpful for debugging but is kind of a lame hack.
static uint64_t opcode_counter = 0;
// Counter that is used to schedule validation interrupts in expensive checks
// mode.
static uint64_t interrupt_counter = 0;
// Interval between forced validations. Must be a power of 2.
#define kForceValidateInterval 2048
// Expands to a block that checks whether it's time to force validation.
#define MAYBE_INTERRUPT() do { \
if ((++interrupt_counter & (kForceValidateInterval - 1)) == 0) { \
uint64_t serial = interrupt_counter / kForceValidateInterval; \
E_RETURN(new_force_validate_condition((uint32_t) serial)); \
} \
} while (false)
// Runs the given task within the given ambience until a condition is
// encountered or evaluation completes. This function also bails on and leaves
// it to the surrounding code to report error messages.
static value_t run_task_pushing_signals(value_t ambience, value_t task) {
CHECK_FAMILY(ofAmbience, ambience);
CHECK_FAMILY(ofTask, task);
value_t process = get_task_process(task);
value_t stack = get_task_stack(task);
runtime_t *runtime = get_ambience_runtime(ambience);
frame_t frame = open_stack(stack);
code_cache_t cache;
code_cache_refresh(&cache, &frame);
TRY_FINALLY {
while (true) {
opcode_t opcode = (opcode_t) read_short(&cache, &frame, 0);
TOPIC_INFO(Interpreter, "Opcode: %s (%i)", get_opcode_name(opcode),
++opcode_counter);
IF_EXPENSIVE_CHECKS_ENABLED(MAYBE_INTERRUPT());
switch (opcode) {
case ocPush: {
value_t value = read_value(&cache, &frame, 1);
frame_push_value(&frame, value);
frame.pc += kPushOperationSize;
break;
}
case ocPop: {
size_t count = read_short(&cache, &frame, 1);
for (size_t i = 0; i < count; i++)
frame_pop_value(&frame);
frame.pc += kPopOperationSize;
break;
}
case ocCheckStackHeight: {
size_t expected = read_short(&cache, &frame, 1);
size_t height = frame.stack_pointer - frame.frame_pointer;
CHECK_EQ("stack height", expected, height);
frame.pc += kCheckStackHeightOperationSize;
break;
}
case ocNewArray: {
size_t length = read_short(&cache, &frame, 1);
E_TRY_DEF(array, new_heap_array(runtime, length));
for (size_t i = 0; i < length; i++) {
value_t element = frame_pop_value(&frame);
set_array_at(array, length - i - 1, element);
}
frame_push_value(&frame, array);
frame.pc += kNewArrayOperationSize;
break;
}
case ocInvoke: {
// Look up the method in the method space.
value_t tags = read_value(&cache, &frame, 1);
CHECK_FAMILY(ofCallTags, tags);
value_t fragment = read_value(&cache, &frame, 2);
CHECK_FAMILY_OPT(ofModuleFragment, fragment);
value_t next_guards = read_value(&cache, &frame, 3);
CHECK_FAMILY_OPT(ofArray, next_guards);
value_t arg_map;
sigmap_input_layout_t layout = sigmap_input_layout_new(ambience, tags,
next_guards);
value_t method = lookup_method_full_from_frame(&layout, &frame, &arg_map);
if (in_condition_cause(ccLookupError, method))
E_RETURN(signal_lookup_error(runtime, stack, &frame));
// The lookup may have failed with a different condition. Check for that.
E_TRY(method);
E_TRY_DEF(code_block, ensure_method_code(runtime, method));
// Optimistically advance the pc to the operation we'll return to
// after this invocation, since the pc will be captured by pushing
// the new frame. If pushing fails we rewind.
frame.pc += kInvokeOperationSize;
// Push a new activation.
value_t pushed = push_stack_frame(runtime, stack, &frame,
(size_t) get_code_block_high_water_mark(code_block), arg_map);
if (is_condition(pushed)) {
// Pushing failed, usually because we ran out of memory. Rewind so
// we're ready to try again.
frame.pc -= kInvokeOperationSize;
E_RETURN(pushed);
}
frame_set_code_block(&frame, code_block);
code_cache_refresh(&cache, &frame);
break;
}
case ocSignalContinue: case ocSignalEscape: {
// Look up the method in the method space.
value_t tags = read_value(&cache, &frame, 1);
CHECK_FAMILY(ofCallTags, tags);
frame.pc += kSignalEscapeOperationSize;
value_t arg_map = whatever();
value_t handler = whatever();
sigmap_input_layout_t layout = sigmap_input_layout_new(ambience, tags,
nothing());
value_t method = lookup_signal_handler_method_from_frame(&layout,
&frame, &handler, &arg_map);
bool is_escape = (opcode == ocSignalEscape);
if (in_condition_cause(ccLookupError, method)) {
if (is_escape) {
// There was no handler for this so we have to escape out of the
// interpreter altogether. Push the signal frame onto the stack to
// record the state of it for the enclosing code.
E_TRY(push_stack_frame(runtime, stack, &frame, 1, nothing()));
// The stack tracing code expects all frames to have a valid code block
// object. The rest makes less of a difference.
frame_set_code_block(&frame, ROOT(runtime, empty_code_block));
E_RETURN(new_uncaught_signal_condition(is_escape));
} else {
// There was no handler but this is not an escape so we skip over
// the post-handler goto to the default block.
CHECK_EQ("signal not followed by goto", ocGoto,
read_short(&cache, &frame, 0));
frame.pc += kGotoOperationSize;
}
} else {
// We found a method. Invoke it.
E_TRY(method);
E_TRY_DEF(code_block, ensure_method_code(runtime, method));
E_TRY(push_stack_frame(runtime, stack, &frame,
(size_t) get_code_block_high_water_mark(code_block), arg_map));
frame_set_code_block(&frame, code_block);
CHECK_TRUE("subject not null", is_null(frame_get_argument(&frame, 0)));
frame_set_argument(&frame, 0, handler);
code_cache_refresh(&cache, &frame);
}
break;
}
case ocGoto: {
size_t delta = read_short(&cache, &frame, 1);
frame.pc += delta;
break;
}
case ocDelegateToLambda:
case ocDelegateToBlock: {
// This op only appears in the lambda and block delegator methods.
// They should never be executed because the delegation happens during
// method lookup. If we hit here something's likely wrong with the
// lookup process.
UNREACHABLE("delegate to lambda");
return new_condition(ccWat);
}
case ocBuiltin: {
value_t wrapper = read_value(&cache, &frame, 1);
builtin_implementation_t impl = (builtin_implementation_t) get_void_p_value(wrapper);
builtin_arguments_t args;
builtin_arguments_init(&args, runtime, &frame, process);
E_TRY_DEF(result, impl(&args));
frame_push_value(&frame, result);
frame.pc += kBuiltinOperationSize;
break;
}
case ocBuiltinMaybeEscape: {
value_t wrapper = read_value(&cache, &frame, 1);
builtin_implementation_t impl = (builtin_implementation_t) get_void_p_value(wrapper);
builtin_arguments_t args;
builtin_arguments_init(&args, runtime, &frame, process);
value_t result = impl(&args);
if (in_condition_cause(ccUncaughtSignal, result)) {
// The builtin failed. Find the appropriate signal handler and call
// it. The invocation record is at the top of the stack.
value_t tags = frame_pop_value(&frame);
CHECK_FAMILY(ofCallTags, tags);
value_t arg_map = whatever();
value_t handler = whatever();
sigmap_input_layout_t layout = sigmap_input_layout_new(ambience, tags,
nothing());
value_t method = lookup_signal_handler_method_from_frame(&layout,
&frame, &handler, &arg_map);
if (in_condition_cause(ccLookupError, method)) {
// Push the record back onto the stack to it's available to back
// tracing.
frame_push_value(&frame, tags);
frame.pc += kBuiltinMaybeEscapeOperationSize;
// There was no handler for this so we have to escape out of the
// interpreter altogether. Push the signal frame onto the stack to
// record the state of it for the enclosing code.
E_TRY(push_stack_frame(runtime, stack, &frame, 1, nothing()));
// The stack tracing code expects all frames to have a valid code block
// object. The rest makes less of a difference.
frame_set_code_block(&frame, ROOT(runtime, empty_code_block));
E_RETURN(new_uncaught_signal_condition(true));
}
// Either found a signal or encountered a different condition.
E_TRY(method);
// Skip forward to the point we want the signal to return to, the
// leave-or-fire-barrier op that will do the leaving.
size_t dest_offset = read_short(&cache, &frame, 2);
E_TRY_DEF(code_block, ensure_method_code(runtime, method));
frame.pc += dest_offset;
// Run the handler.
value_t pushed = push_stack_frame(runtime, stack, &frame,
(size_t) get_code_block_high_water_mark(code_block), arg_map);
if (is_condition(pushed)) {
frame.pc -= dest_offset;
E_RETURN(pushed);
}
frame_set_code_block(&frame, code_block);
CHECK_TRUE("subject not null", is_null(frame_get_argument(&frame, 0)));
frame_set_argument(&frame, 0, handler);
code_cache_refresh(&cache, &frame);
} else {
// The builtin didn't cause a condition so we can just keep going.
E_TRY(result);
frame_push_value(&frame, result);
frame.pc += kBuiltinMaybeEscapeOperationSize;
}
break;
}
case ocReturn: {
value_t result = frame_pop_value(&frame);
frame_pop_within_stack_piece(&frame);
code_cache_refresh(&cache, &frame);
frame_push_value(&frame, result);
break;
}
case ocStackBottom: {
value_t result = frame_pop_value(&frame);
validate_stack_on_normal_exit(&frame);
E_RETURN(result);
}
case ocStackPieceBottom: {
value_t top_piece = frame.stack_piece;
value_t result = frame_pop_value(&frame);
value_t next_piece = get_stack_piece_previous(top_piece);
set_stack_top_piece(stack, next_piece);
frame = open_stack(stack);
code_cache_refresh(&cache, &frame);
frame_push_value(&frame, result);
break;
}
case ocSlap: {
value_t value = frame_pop_value(&frame);
size_t argc = read_short(&cache, &frame, 1);
for (size_t i = 0; i < argc; i++)
frame_pop_value(&frame);
frame_push_value(&frame, value);
frame.pc += kSlapOperationSize;
break;
}
case ocNewReference: {
// Create the reference first so that if it fails we haven't clobbered
// the stack yet.
E_TRY_DEF(ref, new_heap_reference(runtime, nothing()));
value_t value = frame_pop_value(&frame);
set_reference_value(ref, value);
frame_push_value(&frame, ref);
frame.pc += kNewReferenceOperationSize;
break;
}
case ocSetReference: {
value_t ref = frame_pop_value(&frame);
CHECK_FAMILY(ofReference, ref);
value_t value = frame_peek_value(&frame, 0);
set_reference_value(ref, value);
frame.pc += kSetReferenceOperationSize;
break;
}
case ocGetReference: {
value_t ref = frame_pop_value(&frame);
CHECK_FAMILY(ofReference, ref);
value_t value = get_reference_value(ref);
frame_push_value(&frame, value);
frame.pc += kGetReferenceOperationSize;
break;
}
case ocLoadLocal: {
size_t index = read_short(&cache, &frame, 1);
value_t value = frame_get_local(&frame, index);
frame_push_value(&frame, value);
frame.pc += kLoadLocalOperationSize;
break;
}
case ocLoadGlobal: {
value_t path = read_value(&cache, &frame, 1);
CHECK_FAMILY(ofPath, path);
value_t fragment = read_value(&cache, &frame, 2);
CHECK_FAMILY_OPT(ofModuleFragment, fragment);
E_TRY_DEF(value, module_fragment_lookup_path_full(runtime, fragment, path));
frame_push_value(&frame, value);
frame.pc += kLoadGlobalOperationSize;
break;
}
case ocLoadArgument: {
size_t param_index = read_short(&cache, &frame, 1);
value_t value = frame_get_argument(&frame, param_index);
frame_push_value(&frame, value);
frame.pc += kLoadArgumentOperationSize;
break;
}
case ocReifyArguments: {
E_TRY(do_reify_arguments(runtime, &frame, &cache));
break;
}
case ocLoadRawArgument: {
size_t eval_index = read_short(&cache, &frame, 1);
value_t value = frame_get_raw_argument(&frame, eval_index);
frame_push_value(&frame, value);
frame.pc += kLoadRawArgumentOperationSize;
break;
}
case ocLoadRefractedArgument: {
size_t param_index = read_short(&cache, &frame, 1);
size_t block_depth = read_short(&cache, &frame, 2);
value_t subject = frame_get_argument(&frame, 0);
frame_t home = frame_empty();
get_refractor_refracted_frame(subject, block_depth, &home);
value_t value = frame_get_argument(&home, param_index);
frame_push_value(&frame, value);
frame.pc += kLoadRefractedArgumentOperationSize;
break;
}
case ocLoadRefractedLocal: {
size_t index = read_short(&cache, &frame, 1);
size_t block_depth = read_short(&cache, &frame, 2);
value_t subject = frame_get_argument(&frame, 0);
frame_t home = frame_empty();
get_refractor_refracted_frame(subject, block_depth, &home);
value_t value = frame_get_local(&home, index);
frame_push_value(&frame, value);
frame.pc += kLoadRefractedLocalOperationSize;
break;
}
case ocLoadLambdaCapture: {
size_t index = read_short(&cache, &frame, 1);
value_t subject = frame_get_argument(&frame, 0);
CHECK_FAMILY(ofLambda, subject);
value_t value = get_lambda_capture(subject, index);
frame_push_value(&frame, value);
frame.pc += kLoadLambdaCaptureOperationSize;
break;
}
case ocLoadRefractedCapture: {
size_t index = read_short(&cache, &frame, 1);
size_t block_depth = read_short(&cache, &frame, 2);
value_t subject = frame_get_argument(&frame, 0);
frame_t home = frame_empty();
get_refractor_refracted_frame(subject, block_depth, &home);
value_t lambda = frame_get_argument(&home, 0);
CHECK_FAMILY(ofLambda, lambda);
value_t value = get_lambda_capture(lambda, index);
frame_push_value(&frame, value);
frame.pc += kLoadRefractedLocalOperationSize;
break;
}
case ocLambda: {
value_t space = read_value(&cache, &frame, 1);
CHECK_FAMILY(ofMethodspace, space);
size_t capture_count = read_short(&cache, &frame, 2);
value_t captures;
E_TRY_DEF(lambda, new_heap_lambda(runtime, space, nothing()));
if (capture_count == 0) {
captures = ROOT(runtime, empty_array);
frame.pc += kLambdaOperationSize;
} else {
E_TRY_SET(captures, new_heap_array(runtime, capture_count));
// The pc gets incremented here because it is after we've done all
// the allocation but before anything has been popped off the stack.
// This way all the above is idempotent, and the below is guaranteed
// to succeed.
frame.pc += kLambdaOperationSize;
for (size_t i = 0; i < capture_count; i++)
set_array_at(captures, i, frame_pop_value(&frame));
}
set_lambda_captures(lambda, captures);
frame_push_value(&frame, lambda);
break;
}
case ocCreateBlock: {
value_t space = read_value(&cache, &frame, 1);
CHECK_FAMILY(ofMethodspace, space);
// Create the block object.
E_TRY_DEF(block, new_heap_block(runtime, nothing()));
// Create the stack section that describes the block.
value_t section = frame_alloc_derived_object(&frame, get_genus_descriptor(dgBlockSection));
set_barrier_state_payload(section, block);
refraction_point_init(section, &frame);
set_block_section_methodspace(section, space);
set_block_section(block, section);
value_validate(block);
value_validate(section);
// Push the block object.
frame_push_value(&frame, block);
frame.pc += kCreateBlockOperationSize;
break;
}
case ocCreateEnsurer: {
value_t code_block = read_value(&cache, &frame, 1);
value_t section = frame_alloc_derived_object(&frame,
get_genus_descriptor(dgEnsureSection));
set_barrier_state_payload(section, code_block);
refraction_point_init(section, &frame);
value_validate(section);
frame_push_value(&frame, section);
frame.pc += kCreateEnsurerOperationSize;
break;
}
case ocCallEnsurer: {
value_t value = frame_pop_value(&frame);
value_t shard = frame_pop_value(&frame);
frame_push_value(&frame, value);
frame_push_value(&frame, shard);
CHECK_GENUS(dgEnsureSection, shard);
value_t code_block = get_barrier_state_payload(shard);
CHECK_FAMILY(ofCodeBlock, code_block);
// Unregister the barrier before calling it, otherwise if we leave
// by escaping we'll end up calling it over again.
barrier_state_unregister(shard, stack);
frame.pc += kCallEnsurerOperationSize;
value_t argmap = ROOT(runtime, array_of_zero);
value_t pushed = push_stack_frame(runtime, stack, &frame,
(size_t) get_code_block_high_water_mark(code_block), argmap);
if (is_condition(pushed)) {
frame.pc -= kCallEnsurerOperationSize;
E_RETURN(pushed);
}
frame_set_code_block(&frame, code_block);
code_cache_refresh(&cache, &frame);
break;
}
case ocDisposeEnsurer: {
// Discard the result of the ensure block. If an ensure blocks needs
// to return a useful value it can do it via an escape.
frame_pop_value(&frame);
value_t shard = frame_pop_value(&frame);
CHECK_GENUS(dgEnsureSection, shard);
value_t value = frame_pop_value(&frame);
frame_destroy_derived_object(&frame, get_genus_descriptor(dgEnsureSection));
frame_push_value(&frame, value);
frame.pc += kDisposeEnsurerOperationSize;
break;
}
case ocInstallSignalHandler: {
value_t space = read_value(&cache, &frame, 1);
CHECK_FAMILY(ofMethodspace, space);
size_t dest_offset = read_short(&cache, &frame, 2);
// Allocate the derived object that's going to hold the signal handler
// state.
value_t section = frame_alloc_derived_object(&frame,
get_genus_descriptor(dgSignalHandlerSection));
// Initialize the handler.
set_barrier_state_payload(section, space);
refraction_point_init(section, &frame);
// Bring the frame state to the point we'll want to escape to (modulo
// the destination offset).
frame_push_value(&frame, section);
frame.pc += kInstallSignalHandlerOperationSize;
// Finally capture the escape state.
capture_escape_state(section, &frame, dest_offset);
value_validate(section);
break;
}
case ocUninstallSignalHandler: {
// The result has been left at the top of the stack.
value_t value = frame_pop_value(&frame);
value_t section = frame_pop_value(&frame);
CHECK_GENUS(dgSignalHandlerSection, section);
barrier_state_unregister(section, stack);
frame_destroy_derived_object(&frame, get_genus_descriptor(dgSignalHandlerSection));
frame_push_value(&frame, value);
frame.pc += kUninstallSignalHandlerOperationSize;
break;
}
case ocCreateEscape: {
size_t dest_offset = read_short(&cache, &frame, 1);
// Create an initially empty escape object.
E_TRY_DEF(escape, new_heap_escape(runtime, nothing()));
// Allocate the escape section on the stack, hooking the barrier into
// the barrier chain.
value_t section = frame_alloc_derived_object(&frame, get_genus_descriptor(dgEscapeSection));
// Point the state and object to each other.
set_barrier_state_payload(section, escape);
set_escape_section(escape, section);
// Get execution ready for the next operation.
frame_push_value(&frame, escape);
frame.pc += kCreateEscapeOperationSize;
// This is the execution state the escape will escape to (modulo the
// destination offset) so this is what we want to capture.
capture_escape_state(section, &frame, dest_offset);
break;
}
case ocLeaveOrFireBarrier: {
size_t argc = read_short(&cache, &frame, 1);
// At this point the handler has been set as the subject of the call
// to the handler method. Above the arguments are also two scratch
// stack entries.
value_t handler = frame_peek_value(&frame, argc + 2);
CHECK_GENUS(dgSignalHandlerSection, handler);
if (maybe_fire_next_barrier(&cache, &frame, runtime, stack, handler)) {
// Pop the scratch entries off.
frame_pop_value(&frame);
frame_pop_value(&frame);
// Pop the value off.
value_t value = frame_pop_value(&frame);
// Escape to the handler's home.
restore_escape_state(&frame, stack, handler);
code_cache_refresh(&cache, &frame);
// Push the value back on, now in the handler's home frame.
frame_push_value(&frame, value);
} else {
// If a barrier was fired we'll want to let the interpreter loop
// around again so just break without touching .pc.
}
break;
}
case ocFireEscapeOrBarrier: {
value_t escape = frame_get_argument(&frame, 0);
CHECK_FAMILY(ofEscape, escape);
value_t section = get_escape_section(escape);
// Fire the next barrier or, if there are no more barriers, apply the
// escape.
if (maybe_fire_next_barrier(&cache, &frame, runtime, stack, section)) {
value_t value = frame_get_argument(&frame, kImplicitArgumentCount);
restore_escape_state(&frame, stack, section);
code_cache_refresh(&cache, &frame);
frame_push_value(&frame, value);
} else {
// If a barrier was fired we'll want to let the interpreter loop
// around again so just break without touching .pc.
}
break;
}
case ocDisposeEscape: {
value_t value = frame_pop_value(&frame);
value_t escape = frame_pop_value(&frame);
CHECK_FAMILY(ofEscape, escape);
value_t section = get_escape_section(escape);
value_validate(section);
barrier_state_unregister(section, stack);
on_escape_section_exit(section);
frame_destroy_derived_object(&frame, get_genus_descriptor(dgEscapeSection));
frame_push_value(&frame, value);
frame.pc += kDisposeEscapeOperationSize;
break;
}
case ocDisposeBlock: {
value_t value = frame_pop_value(&frame);
value_t block = frame_pop_value(&frame);
CHECK_FAMILY(ofBlock, block);
value_t section = get_block_section(block);
barrier_state_unregister(section, stack);
on_block_section_exit(section);
frame_destroy_derived_object(&frame, get_genus_descriptor(dgBlockSection));
frame_push_value(&frame, value);
frame.pc += kDisposeBlockOperationSize;
break;
}
case ocCreateCallData: {
size_t argc = read_short(&cache, &frame, 1);
E_TRY_DEF(raw_tags, new_heap_array(runtime, argc));
for (size_t i = 0; i < argc; i++) {
value_t tag = frame_peek_value(&frame, 2 * (argc - i) - 1);
set_array_at(raw_tags, i, tag);
}
E_TRY_DEF(entries, build_call_tags_entries(runtime, raw_tags));
E_TRY_DEF(call_tags, new_heap_call_tags(runtime, afFreeze, entries));
value_t values = raw_tags;
for (size_t i = 0; i < argc; i++) {
value_t value = frame_pop_value(&frame);
frame_pop_value(&frame);
set_array_at(values, i, value);
}
E_TRY_DEF(call_data, new_heap_call_data(runtime, call_tags, values));
frame_push_value(&frame, call_data);
frame.pc += kCreateCallDataOperationSize;
break;
}
case ocModuleFragmentPrivateInvokeCallData:
case ocModuleFragmentPrivateInvokeReifiedArguments: {
// Perform the method lookup.
value_t phrivate = frame_get_argument(&frame, 0);
CHECK_FAMILY(ofModuleFragmentPrivate, phrivate);
value_t values = whatever();
sigmap_input_layout_t layout;
if (opcode == ocModuleFragmentPrivateInvokeCallData) {
value_t call_data = frame_get_argument(&frame, 3);
CHECK_FAMILY(ofCallData, call_data);
values = get_call_data_values(call_data);
layout = sigmap_input_layout_new(ambience, get_call_data_tags(call_data),
nothing());
} else {
value_t reified = frame_get_argument(&frame, 3);
CHECK_FAMILY(ofReifiedArguments, reified);
values = get_reified_arguments_values(reified);
layout = sigmap_input_layout_new(ambience,
get_reified_arguments_tags(reified), nothing());
}
value_t arg_map = whatever();
value_t method = lookup_method_full_from_value_array(&layout, values,
&arg_map);
if (in_condition_cause(ccLookupError, method))
E_RETURN(signal_lookup_error(runtime, stack, &frame));
E_TRY(method);
E_TRY_DEF(code_block, ensure_method_code(runtime, method));
frame.pc += kModuleFragmentPrivateInvokeCallDataOperationSize;
// Method lookup succeeded. Build the frame that holds the arguments.
// The argument frame needs room for all the arguments as well as
// the return value.
size_t argc = (size_t) get_array_length(values);
value_t pushed = push_stack_frame(runtime, stack, &frame, argc + 1, nothing());
if (is_condition(pushed)) {
frame.pc -= kModuleFragmentPrivateInvokeCallDataOperationSize;
E_RETURN(pushed);
}
frame_set_code_block(&frame, ROOT(runtime, return_code_block));
for (size_t i = 0; i < argc; i++)
frame_push_value(&frame, get_array_at(values, argc - i - 1));
// Then build the method's frame.
pushed = push_stack_frame(runtime, stack, &frame,
(size_t) get_code_block_high_water_mark(code_block), arg_map);
// This should be handled gracefully.
CHECK_FALSE("call literal invocation failed", is_condition(pushed));
frame_set_code_block(&frame, code_block);
code_cache_refresh(&cache, &frame);
break;
}
case ocModuleFragmentPrivateLeaveReifiedArguments: {
// Perform the method lookup.
value_t phrivate = frame_get_argument(&frame, 0);
CHECK_FAMILY(ofModuleFragmentPrivate, phrivate);
value_t values = whatever();
sigmap_input_layout_t layout;
value_t reified = frame_get_argument(&frame, 3);
CHECK_FAMILY(ofReifiedArguments, reified);
values = get_reified_arguments_values(reified);
layout = sigmap_input_layout_new(ambience,
get_reified_arguments_tags(reified), nothing());
value_t arg_map = whatever();
value_t handler = whatever();
value_t method = lookup_signal_handler_method_from_value_array(&layout,
values, &frame, &handler, &arg_map);
if (in_condition_cause(ccLookupError, method))
E_RETURN(signal_lookup_error(runtime, stack, &frame));
E_TRY(method);
E_TRY_DEF(code_block, ensure_method_code(runtime, method));
frame.pc += kModuleFragmentPrivateLeaveReifiedArgumentsOperationSize;
// Method lookup succeeded. Build the frame that holds the arguments.
// The argument frame needs room for all the arguments as well as
// the return value.
size_t argc = (size_t) get_array_length(values);
value_t pushed = push_stack_frame(runtime, stack, &frame, argc + 1, nothing());
if (is_condition(pushed)) {
frame.pc -= kModuleFragmentPrivateLeaveReifiedArgumentsOperationSize;
E_RETURN(pushed);
}
frame_set_code_block(&frame, ROOT(runtime, return_code_block));
for (size_t i = 0; i < argc; i++)
frame_push_value(&frame, get_array_at(values, argc - i - 1));
// Then build the method's frame.
pushed = push_stack_frame(runtime, stack, &frame,
(size_t) get_code_block_high_water_mark(code_block), arg_map);
// This should be handled gracefully.
CHECK_FALSE("call literal invocation failed", is_condition(pushed));
frame_set_code_block(&frame, code_block);
CHECK_TRUE("subject not null", is_null(frame_get_argument(&frame, 0)));
frame_set_argument(&frame, 0, handler);
code_cache_refresh(&cache, &frame);
break;
}
default:
ERROR("Unexpected opcode %i", opcode);
UNREACHABLE("unexpected opcode");
break;
}
}
} FINALLY {
close_frame(&frame);
} YRT
}
// Print a trace of the task's stack.
static value_t print_task_stack_trace(value_t ambience, value_t task) {
runtime_t *runtime = get_ambience_runtime(ambience);
frame_t frame = open_stack(get_task_stack(task));
TRY_FINALLY {
E_TRY_DEF(trace, capture_backtrace(runtime, &frame, 0));
INFO_DETERMINISTIC("%9v", trace);
E_RETURN(success());
} FINALLY {
close_frame(&frame);
} YRT
}
// Runs the given stack until it hits a condition or completes successfully.
static value_t run_task_until_condition(value_t ambience, value_t task) {
CHECK_FAMILY(ofAmbience, ambience);
CHECK_FAMILY(ofTask, task);
value_t result = run_task_pushing_signals(ambience, task);
if (in_condition_cause(ccUncaughtSignal, result))
TRY(print_task_stack_trace(ambience, task));
return result;
}
// Runs the given stack until it hits a signal or completes successfully. If the
// heap becomes exhausted this function will try garbage collecting and
// continuing.
static value_t run_task_until_signal(safe_value_t s_ambience, safe_value_t s_task) {
CHECK_FAMILY(ofAmbience, deref(s_ambience));
CHECK_FAMILY(ofTask, deref(s_task));
loop: do {
value_t ambience = deref(s_ambience);
value_t task = deref(s_task);
value_t result = run_task_pushing_signals(ambience, task);
if (in_condition_cause(ccHeapExhausted, result)) {
runtime_t *runtime = get_ambience_runtime(ambience);
runtime_garbage_collect(runtime);
goto loop;
} else if (in_condition_cause(ccForceValidate, result)) {
runtime_t *runtime = get_ambience_runtime(ambience);
runtime_validate(runtime, result);
goto loop;
}
return result;
} while (false);
}
const char *get_opcode_name(opcode_t opcode) {
switch (opcode) {
#define __EMIT_CASE__(Name, ARGC) \
case oc##Name: \
return #Name;
ENUM_OPCODES(__EMIT_CASE__)
#undef __EMIT_CASE__
default:
return NULL;
}
}
value_t run_code_block_until_condition(value_t ambience, value_t code) {
CHECK_FAMILY(ofAmbience, ambience);
CHECK_FAMILY(ofCodeBlock, code);
// Create the stack to run the code on.
runtime_t *runtime = get_ambience_runtime(ambience);
TRY_DEF(process, new_heap_process(runtime));
TRY_DEF(task, get_process_root_task(process));
TRY_DEF(stack, get_task_stack(task));
// Push an activation onto the empty stack to get execution going.
size_t frame_size = (size_t) get_code_block_high_water_mark(code);
frame_t frame = open_stack(stack);
TRY(push_stack_frame(runtime, stack, &frame, frame_size, ROOT(runtime, empty_array)));
frame_set_code_block(&frame, code);
close_frame(&frame);
// Run the stack.
loop: do {
value_t result = run_task_until_condition(ambience, task);
if (in_condition_cause(ccForceValidate, result)) {
runtime_t *runtime = get_ambience_runtime(ambience);
runtime_validate(runtime, result);
goto loop;
}
return result;
} while (false);
}
static value_t prepare_run_job(runtime_t *runtime, value_t stack, job_t *job) {
frame_t frame = open_stack(stack);
// Set up the frame containing the argument. The code frame returns to
// this and then this returns by itself so at the end, if the job is
// successful, we're back to an empty stack.
TRY(push_stack_frame(runtime, stack, &frame, 2,
ROOT(runtime, empty_array)));
frame_set_code_block(&frame, ROOT(runtime, return_code_block));
frame_push_value(&frame, job->data);
// Set up the frame for running the code.
size_t frame_size = (size_t) get_code_block_high_water_mark(job->code);
TRY(push_stack_frame(runtime, stack, &frame, frame_size,
ROOT(runtime, empty_array)));
frame_set_code_block(&frame, job->code);
close_frame(&frame);
return success();
}
// Clears all the frames from the given stack down to the bottom.
static void clear_stack_to_bottom(value_t stack) {
frame_t frame = open_stack(stack);
while (!frame_has_flag(&frame, ffStackBottom))
frame_pop_within_stack_piece(&frame);
close_frame(&frame);
}
// Is the given stack cleared down to the bottom?
static bool stack_is_clear(value_t stack) {
frame_t frame = open_stack(stack);
bool result = frame_has_flag(&frame, ffStackBottom);
close_frame(&frame);
return result;
}
// Clears all stack fields to null above the current high water mark. This is
// a cheap hack to ensure that the stack doesn't keep cruft alive. A better
// solution surely exists but eludes me right now.
static void zap_stack(value_t stack) {
frame_t frame = open_stack(stack);
value_t *top = frame_get_stack_piece_top(&frame);
for (value_t *current = frame.frame_pointer; current != top; current++)
(*current) = null();
close_frame(&frame);
}
// Runs an individual job.
static value_t run_process_job(job_t *job, safe_value_pool_t *pool,
safe_value_t s_ambience, safe_value_t s_process) {
safe_value_t s_task = protect(pool, get_process_root_task(deref(s_process)));
CHECK_TRUE("stack not clear", stack_is_clear(get_task_stack(deref(s_task))));
runtime_t *runtime = get_ambience_runtime(deref(s_ambience));
TRY(prepare_run_job(runtime, get_task_stack(deref(s_task)), job));
value_t result = run_task_until_signal(s_ambience, s_task);
if (in_condition_cause(ccUncaughtSignal, result)) {
// The job resulted in an uncaught signal so print the stack trace.
print_task_stack_trace(deref(s_ambience), deref(s_task));
// The uncaught signal may have left any amount of stuff on the stack so
// we clear it.
clear_stack_to_bottom(get_task_stack(deref(s_task)));
}
zap_stack(get_task_stack(deref(s_task)));
return result;
}
// Grabs the next work job from the given process, which must have more work,
// and executes it on the process' main task.
static value_t run_next_process_job(safe_value_t s_ambience, safe_value_t s_process) {
process_airlock_t *airlock = get_process_airlock(deref(s_process));
// First, if there are delivered undertakings ready to be finished we finish
// those nonblocking.
TRY(finish_process_delivered_undertakings(deref(s_process), false, NULL));
runtime_t *runtime = get_ambience_runtime(deref(s_ambience));
while (true) {
job_t job;
struct_zero_fill(job);
// Try to get the next job that's ready to be run.
if (!take_process_ready_job(deref(s_process), &job)) {
// There was no job to run. That doesn't mean we're done, it might just
// mean that we have to wait for some undertakings to be delivered before
// we can go on.
int64_t undelivered = atomic_int64_get(&airlock->undelivered_undertakings);
if (undelivered > 0) {
// There is at least one open undertaking that hasn't been delivered
// yet. Wait for that to happen. There is a race condition here where
// some other undertaking has been delivered in the meantime but that
// doesn't matter we'll just run that one instead. In any case there
// will be, either now or later, an undertaking to finish. And then
// we'll loop around again.
TRY(finish_process_delivered_undertakings(deref(s_process), true, NULL));
continue;
} else {
// There are no open undertakings so unless we can finish some now there
// is simply no more work left.
//
// TODO: there is a subtle race condition here if it takes a very long
// time between the undelivered undertakings counter to be decremented
// the the undertaking to be delivered, where the count will be zero
// but there will be an undertaking on the way that isn't ready at the
// point where we calls this.
size_t finish_count = 0;
TRY(finish_process_delivered_undertakings(deref(s_process), false,
&finish_count));
if (finish_count == 0) {
// There were no more undertakings so we're done it appears.
return new_condition(ccProcessIdle);
} else {
// There were more undertakings so loop around and see if a job has
// become ready now.
continue;
}
}
}
CREATE_SAFE_VALUE_POOL(runtime, 5, pool);
TRY_FINALLY {
E_RETURN(run_process_job(&job, pool, s_ambience, s_process));
} FINALLY {
DISPOSE_SAFE_VALUE_POOL(pool);
} YRT
}
}
static value_t run_process_until_idle(safe_value_t s_ambience, safe_value_t s_process) {
value_t value = nothing();
while (true) {
value_t next_value = run_next_process_job(s_ambience, s_process);
if (is_condition(next_value)) {
if (in_condition_cause(ccProcessIdle, next_value)) {
return value;
} else {
return next_value;
}
} else {
value = next_value;
}
}
return value;
}
// After running a piece of code there may be hanging post mortems that are
// ready to run. This runs those.
static value_t run_process_finalizers(safe_value_t s_ambience, safe_value_t s_process) {
runtime_t *runtime = get_ambience_runtime(deref(s_ambience));
TRY(runtime_garbage_collect(runtime));
return run_process_until_idle(s_ambience, s_process);
// TODO: a post mortem may cause other post mortems to be scheduled and those
// should also be run. The trick is to know when there definitely are no
// more which the gc can tell us but that's for later.
}
value_t run_code_block(safe_value_t s_ambience, safe_value_t s_code) {
runtime_t *runtime = get_ambience_runtime(deref(s_ambience));
CREATE_SAFE_VALUE_POOL(runtime, 5, pool);
TRY_FINALLY {
// Build a process to run the code within.
E_S_TRY_DEF(s_process, protect(pool, new_heap_process(runtime)));
job_t job = job_new(runtime, deref(s_code), null(), nothing());
E_TRY(offer_process_job(runtime, deref(s_process), job));
E_TRY_DEF(result, run_process_until_idle(s_ambience, s_process));
E_TRY(run_process_finalizers(s_ambience, s_process));
E_RETURN(result);
} FINALLY {
DISPOSE_SAFE_VALUE_POOL(pool);
} YRT
}
| {
"content_hash": "47a7437e889dc62fd15453237d330501",
"timestamp": "",
"source": "github",
"line_count": 1118,
"max_line_length": 102,
"avg_line_length": 44.749552772808585,
"alnum_prop": 0.6198081151309215,
"repo_name": "tundra/neutrino",
"id": "94c95a69d3c5ca9230e68325f416775b19a1a102",
"size": "50458",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/c/interp.c",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "929089"
},
{
"name": "C++",
"bytes": "384606"
},
{
"name": "Groff",
"bytes": "109315"
},
{
"name": "Nemerle",
"bytes": "38670"
},
{
"name": "Objective-C",
"bytes": "27585"
},
{
"name": "Python",
"bytes": "156047"
},
{
"name": "Shell",
"bytes": "1441"
}
],
"symlink_target": ""
} |
using Microsoft.Xna.Framework.Content;
using Microsoft.Xna.Framework.Graphics;
using System;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using System.Text;
namespace MonoVarmint.Widgets
{
//-----------------------------------------------------------------------------------------------
// EmbeddedContentManager- A version of content manager that tries to load content from
// embedded resources first.
//
// The resources can be located anywhere in the assembly. The search algorithm is case insensitive
// and the first matching resource will be used, so be careful of name collisions.
//-----------------------------------------------------------------------------------------------
class EmbeddedContentManager : ContentManager
{
Assembly _localAssembly;
public EmbeddedContentManager(GraphicsDevice graphicsDevice) : base(new EmbeddedContentServiceManager(graphicsDevice))
{
_localAssembly = Assembly.GetExecutingAssembly();
}
protected override Stream OpenStream(string assetName)
{
// Look in the embedded resources first
var searchTerm = "." + assetName.ToLower() + ".xnb";
foreach(var resourceName in _localAssembly.GetManifestResourceNames())
{
if(resourceName.ToLower().EndsWith(searchTerm))
{
return _localAssembly.GetManifestResourceStream(resourceName);
}
}
return base.OpenStream(assetName);
}
}
//-----------------------------------------------------------------------------------------------
// EmbeddedContentServiceManager - Needed by EmbeddedContentManager
//-----------------------------------------------------------------------------------------------
class EmbeddedContentServiceManager : IServiceProvider, IGraphicsDeviceService
{
public GraphicsDevice GraphicsDevice { get; private set; }
public event EventHandler<EventArgs> DeviceCreated;
public event EventHandler<EventArgs> DeviceDisposing;
public event EventHandler<EventArgs> DeviceReset;
public event EventHandler<EventArgs> DeviceResetting;
public EmbeddedContentServiceManager(GraphicsDevice graphicsDevice)
{
GraphicsDevice = graphicsDevice;
}
public object GetService(Type serviceType)
{
if (serviceType.Name != "IGraphicsDeviceService")
{
throw new ApplicationException("Don't know how to prived a " + serviceType.Name);
}
return this;
}
}
}
| {
"content_hash": "8c9a658fc76c0c16198705590734ed46",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 126,
"avg_line_length": 39.43478260869565,
"alnum_prop": 0.5608232267548695,
"repo_name": "nebosite/MonoVarmint",
"id": "7b6e0b99d3072d5f7a52f961622a96bdfb0696e7",
"size": "2723",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Widgets/EmbeddedContentManager.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "361243"
}
],
"symlink_target": ""
} |
date: 30 August 2011 08:00
categories: news
author: Adam Stacoviak
summary: We were featured on the Ruby5 podcast (thanks!).
---
# Featured on the Ruby5 podcast
We were featured on the Ruby5 podcast (thanks!).
## Ruby5: Episode #201 - August 26th, 2011
<a href="http://ruby5.envylabs.com/episodes/204-episode-201-august-26th-2011/stories/1805-the-sass-way">
<img src="/attachments/ruby5.jpeg" class="left" alt="CSS, Sass, Compass" width="140" />
</a>
"Thank the Asset Pipeline for bringing the magic of Sass to the broader Rails community... now everyone needs to learn how to use it correctly. Enter the Sass Way, a great website with tutorials and articles about using Sass."
[Ruby5: The Sass Way - Episode #201](http://ruby5.envylabs.com/episodes/204-episode-201-august-26th-2011/stories/1805-the-sass-way)
| {
"content_hash": "5d37b18d7b0e75330526d1050529cc05",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 226,
"avg_line_length": 43.05263157894737,
"alnum_prop": 0.7469437652811736,
"repo_name": "amwelles/thesassway.com",
"id": "5dc9d5e3732f63556547beede97c959dece48c3d",
"size": "822",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "source/news/featured-on-ruby5.html.markdown",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
package net.fortuna.ical4j.model.component;
import net.fortuna.ical4j.model.PropertyList;
/**
* $Id$ [05-Apr-2004]
*
* Defines an iCalendar standard timezone observance component.
*
* <pre>
*
* standardc = "BEGIN" ":" "STANDARD" CRLF
*
* tzprop
*
* "END" ":" "STANDARD" CRLF
*
* tzprop = 3*(
*
* ; the following are each REQUIRED,
* ; but MUST NOT occur more than once
*
* dtstart / tzoffsetto / tzoffsetfrom /
*
* ; the following are optional,
* ; and MAY occur more than once
*
* comment / rdate / rrule / tzname / x-prop
*
* )
* </pre>
*
* @author Ben Fortuna
*/
public class Standard extends Observance {
private static final long serialVersionUID = -4750910013406451159L;
/**
* Default constructor.
*/
public Standard() {
super(STANDARD);
}
/**
* Constructor.
* @param properties a list of properties
*/
public Standard(final PropertyList properties) {
super(STANDARD, properties);
}
}
| {
"content_hash": "b17f92927c6452624028f6c4f8f97007",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 79,
"avg_line_length": 22.75925925925926,
"alnum_prop": 0.5427176566314077,
"repo_name": "benfortuna/ical4j",
"id": "7966723a1a548263eae9d1b6827dd156f2d6b27f",
"size": "2796",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/net/fortuna/ical4j/model/component/Standard.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Groovy",
"bytes": "299952"
},
{
"name": "HTML",
"bytes": "17137"
},
{
"name": "Java",
"bytes": "1898970"
},
{
"name": "Shell",
"bytes": "704"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<android.support.v4.widget.DrawerLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
android:id="@+id/drawer"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:fitsSystemWindows="true">
<LinearLayout
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical">
<android.support.v7.widget.Toolbar
android:id="@+id/toolbar"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:background="@color/primary"
app:theme="@style/ThemeOverlay.AppCompat.Dark.ActionBar" />
<FrameLayout
android:id="@+id/container"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
/>
</LinearLayout>
<android.support.design.widget.NavigationView
android:id="@+id/navigation"
android:layout_width="wrap_content"
android:layout_height="match_parent"
android:layout_gravity="start"
app:headerLayout="@layout/drawer_header"
app:menu="@menu/drawer"/>
</android.support.v4.widget.DrawerLayout> | {
"content_hash": "8207faac86e8996e3e41159bf5484cc6",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 71,
"avg_line_length": 39.08571428571429,
"alnum_prop": 0.64546783625731,
"repo_name": "shts/NogiFeed",
"id": "22ea088a7add81eb281c799ece450ad532eaa3ee",
"size": "1368",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/res/layout/activity_top.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "2722"
},
{
"name": "Java",
"bytes": "236491"
}
],
"symlink_target": ""
} |
package com.twitter.sdk.android.tweetcomposer;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.net.Uri;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(RobolectricTestRunner.class)
public class TweetComposerIntentBuilderTest {
@Test
public void testBuilder_constructor() {
final TweetComposer.Builder builder = new TweetComposer.Builder(mock(Context.class));
assertNotNull(builder);
}
@Test
public void testBuilder_constructorNullContext() {
try {
new TweetComposer.Builder(null);
fail();
} catch (IllegalArgumentException e) {
assertEquals("Context must not be null.", e.getMessage());
}
}
@Test
public void testBuilder_text() {
final Context context = createIntentContext(true);
final String text = "test";
final TweetComposer.Builder builder = new TweetComposer.Builder(context).text(text);
final Intent intent = builder.createTwitterIntent();
assertEquals(text, intent.getStringExtra(Intent.EXTRA_TEXT));
}
@Test
public void testBuilder_textNull() {
try {
new TweetComposer.Builder(mock(Context.class)).text(null);
fail();
} catch (IllegalArgumentException ignored) {
assertEquals("text must not be null.", ignored.getMessage());
}
}
@Test
public void testBuilder_textAlreadySet() {
final String text = "test";
try {
new TweetComposer.Builder(mock(Context.class)).text(text).text(text);
fail();
} catch (IllegalStateException ignored) {
assertEquals("text already set.", ignored.getMessage());
}
}
@Test
public void testBuilder_textAndUrl() throws MalformedURLException {
final Context context = createIntentContext(true);
final String text = "test";
final URL url = new URL("http://www.twitter.com");
final String result = text + " " + url.toString();
final TweetComposer.Builder builder = new TweetComposer.Builder(context)
.text(text)
.url(url);
final Intent intent = builder.createTwitterIntent();
assertEquals(result, intent.getStringExtra(Intent.EXTRA_TEXT));
}
@Test
public void testBuilder_url() throws MalformedURLException {
final Context context = createIntentContext(true);
final URL url = new URL("http://www.twitter.com");
final TweetComposer.Builder builder = new TweetComposer.Builder(context).url(url);
final Intent intent = builder.createTwitterIntent();
assertEquals(url.toString(), intent.getStringExtra(Intent.EXTRA_TEXT));
}
@Test
public void testBuilder_urlNull() {
try {
new TweetComposer.Builder(mock(Context.class)).url(null);
fail();
} catch (IllegalArgumentException ignored) {
assertEquals("url must not be null.", ignored.getMessage());
}
}
@Test
public void testBuilder_urlAlreadySet() throws MalformedURLException {
final URL url = new URL("http://www.twitter.com");
try {
new TweetComposer.Builder(mock(Context.class)).url(url).url(url);
fail();
} catch (IllegalStateException ignored) {
assertEquals("url already set.", ignored.getMessage());
}
}
@Test
public void testBuilder_image() {
final Context context = createIntentContext(true);
final Uri uri = Uri.parse("http://www.twitter.com");
final TweetComposer.Builder builder = new TweetComposer.Builder(context).image(uri);
final Intent intent = builder.createTwitterIntent();
assertEquals(uri, intent.getParcelableExtra(Intent.EXTRA_STREAM));
}
@Test
public void testBuilder_imageNull() {
try {
new TweetComposer.Builder(mock(Context.class)).image(null);
fail();
} catch (IllegalArgumentException ignored) {
assertEquals("imageUri must not be null.", ignored.getMessage());
}
}
@Test
public void testBuilder_imageAlreadySet() {
final Uri uri = Uri.parse("http://www.twitter.com");
try {
new TweetComposer.Builder(mock(Context.class)).image(uri).image(uri);
fail();
} catch (IllegalStateException ignored) {
assertEquals("imageUri already set.", ignored.getMessage());
}
}
@Test
public void testBuilder_createIntentTwitterInstalled() {
final Context context = createIntentContext(true);
final TweetComposer.Builder builder = new TweetComposer.Builder(context);
final Intent intentTwitter = builder.createTwitterIntent();
final Intent intent = builder.createIntent();
assertNotNull(intent);
assertNotNull(intentTwitter);
assertIntentEquals(intentTwitter, intent);
}
@Test
public void testBuilder_createIntentTwitterNotInstalled() {
final Context context = createIntentContext(false);
final TweetComposer.Builder builder = new TweetComposer.Builder(context);
final Intent intent = builder.createIntent();
final Intent intentTwitter = builder.createTwitterIntent();
final Intent intentWeb = builder.createWebIntent();
assertNotNull(intent);
assertNull(intentTwitter);
assertIntentEquals(intentWeb, intent);
}
@Test
public void testBuilder_show() {
final Context context = createIntentContext(true);
final TweetComposer.Builder builder = new TweetComposer.Builder(context);
builder.show();
verify(context).startActivity(any(Intent.class));
}
private Context createIntentContext(boolean twitterInstalled) {
final List<ResolveInfo> resolveInfoList = new ArrayList<>();
final ResolveInfo info = new ResolveInfo();
info.activityInfo = new ActivityInfo();
if (twitterInstalled) {
info.activityInfo.packageName = "com.twitter.android";
info.activityInfo.name = "Twitter";
} else {
info.activityInfo.packageName = "not.twitter.android";
info.activityInfo.name = "NotTwitter";
}
resolveInfoList.add(info);
final Context context = mock(Context.class);
final PackageManager manager = mock(PackageManager.class);
when(context.getPackageManager()).thenReturn(manager);
when(manager.queryIntentActivities(any(Intent.class), anyInt()))
.thenReturn(resolveInfoList);
return context;
}
private void assertIntentEquals(Intent intent, Intent otherIntent) {
assertEquals(intent.getType(), otherIntent.getType());
assertEquals(intent.getAction(), otherIntent.getAction());
assertEquals(intent.getStringExtra(Intent.EXTRA_TEXT),
otherIntent.getStringExtra(Intent.EXTRA_TEXT));
assertEquals(intent.getStringExtra(intent.getStringExtra(Intent.EXTRA_STREAM)),
otherIntent.getStringExtra(Intent.EXTRA_STREAM));
}
}
| {
"content_hash": "ef011cef5330b8437672b7d074f318ed",
"timestamp": "",
"source": "github",
"line_count": 220,
"max_line_length": 93,
"avg_line_length": 35.67727272727273,
"alnum_prop": 0.6628869919734998,
"repo_name": "twitter/twitter-kit-android",
"id": "bcd9346980d8a8d0dad21f51207db54b67d6f595",
"size": "8453",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tweet-composer/src/test/java/com/twitter/sdk/android/tweetcomposer/TweetComposerIntentBuilderTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "1518196"
}
],
"symlink_target": ""
} |
slug = "common-technology-services-cts"
redirect_path = "/government/publications/technology-code-of-practice/technology-code-of-practice"
group = PolicyGroup.find_by(slug:)
exit unless group
content_id = group.content_id
group.delete
PublishingApiRedirectWorker.new.perform(content_id, redirect_path, "en")
puts "#{slug} -> #{redirect_path}"
| {
"content_hash": "c14093d9d97ad84169e0b4740226b82c",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 98,
"avg_line_length": 28.916666666666668,
"alnum_prop": 0.7723342939481268,
"repo_name": "alphagov/whitehall",
"id": "85ca3cf756c1e16622994514a952f211f6353379",
"size": "347",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "db/data_migration/20171102132014_remove_and_redirect_cts_group.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "3039"
},
{
"name": "Gherkin",
"bytes": "82444"
},
{
"name": "HTML",
"bytes": "695467"
},
{
"name": "JavaScript",
"bytes": "250602"
},
{
"name": "Procfile",
"bytes": "117"
},
{
"name": "Ruby",
"bytes": "5239261"
},
{
"name": "SCSS",
"bytes": "180354"
},
{
"name": "Shell",
"bytes": "3870"
}
],
"symlink_target": ""
} |
from setuptools import setup
setup(
name='djangocms-tonicdev',
version='0.1.7',
description='DjangoCMS Tonic Notebook',
author='Aleksandr Zykov',
author_email='[email protected]',
url='https://github.com/TigerND/djangocms-tonicdev',
packages=[
'djangocms_tonicdev',
'djangocms_tonicdev.migrations',
],
data_files=[
],
install_requires = [
'django-cms>=3.2.5',
],
classifiers = [
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
include_package_data=True,
zip_safe=False,
)
| {
"content_hash": "048a316c9e7ff6779334881656869160",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 56,
"avg_line_length": 25.85185185185185,
"alnum_prop": 0.6045845272206304,
"repo_name": "TigerND/djangocms-tonicdev",
"id": "307dd1ea8a3638f3ddf6cedc11e8799b78f5d2fc",
"size": "721",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "709"
},
{
"name": "JavaScript",
"bytes": "588"
},
{
"name": "Python",
"bytes": "9715"
}
],
"symlink_target": ""
} |
require 'shellwords'
require 'cliqr/command/shell_command'
module Cliqr
# Utility methods
#
# @api private
class Util
# Ensure that a variable is a instance object not a class type
#
# @return [Object]
def self.ensure_instance(obj)
return obj.new if obj.class == Class
obj
end
# Build a help action for a parent config
#
# @return [Cliqr::CLI::Action] New action config
def self.build_help_action(config)
Cliqr::Config::Action.new.tap do |action_config|
action_config.name = 'help'
action_config.description = \
"The help action for command \"#{config.command}\" which provides details " \
'and usage information on how to use the command.'
action_config.handler = Util.help_action_handler(config)
action_config.help = :disable if config.help?
action_config.finalize
end
end
# Build a help option for a parent config
#
# @return [Cliqr::CLI::Option] New option config
def self.build_help_option(config)
Cliqr::Config::Option.new.tap do |option_config|
option_config.name = 'help'
option_config.short = 'h'
option_config.description = "Get helpful information for action \"#{config.command}\" " \
'along with its usage information.'
option_config.type = Cliqr::Config::BOOLEAN_ARGUMENT_TYPE
option_config.operator = Cliqr::Command::ArgumentOperator::DEFAULT_ARGUMENT_OPERATOR
option_config.finalize
end
end
# Build a version action for a parent config
#
# @return [Cliqr::CLI::Action] New action config
def self.build_version_action(config)
Cliqr::Config::Action.new.tap do |action_config|
action_config.name = 'version'
action_config.description = "Get version information for command \"#{config.command}\"."
action_config.arguments = Cliqr::Config::DISABLE_CONFIG
action_config.handler = proc do
puts config.version
end
action_config.finalize
end
end
# Build a version option for a parent config
#
# @return [Cliqr::CLI::Option] New option config
def self.build_version_option(config)
Cliqr::Config::Option.new.tap do |option_config|
option_config.name = 'version'
option_config.short = 'v'
option_config.description = "Get version information for command \"#{config.command}\"."
option_config.type = Cliqr::Config::BOOLEAN_ARGUMENT_TYPE
option_config.operator = Cliqr::Command::ArgumentOperator::DEFAULT_ARGUMENT_OPERATOR
option_config.finalize
end
end
# Action handler for help action
#
# @return [Proc]
def self.help_action_handler(config)
proc do
if arguments.length > 1
raise Cliqr::Error::IllegalArgumentError,
"too many arguments for \"#{command}\" command"
end
action_config = arguments.empty? ? config : config.action(arguments.first)
puts Cliqr::Usage::UsageBuilder.new(environment).build(action_config)
end
end
# Build a shell action for a parent config
#
# @return [Cliqr::CLI::Action] New action config
def self.build_shell_action(config, shell_config)
Cliqr::Config::Action.new.tap do |action_config|
action_config.name = if shell_config.name?
shell_config.name
else
'shell'
end
action_config.handler = Cliqr::Command::ShellCommand.new(shell_config)
action_config.arguments = Cliqr::Config::DISABLE_CONFIG
# allow description to be overridden
description = shell_config.description
description = "Execute a shell in the context of \"#{config.command}\" command." \
unless shell_config.description?
action_config.description = description
action_config.options = shell_config.options
action_config.events = shell_config.events
action_config.instance_eval do
def skip_validation?
true
end
end
action_config.finalize
end
end
# Build shell config for a parent config
#
# @return [Cliqr::CLI::Shell] New action config
def self.build_shell_config(config)
Cliqr::Config::Shell.new.tap do |shell_config|
shell_config.enabled = config.actions?
shell_config.prompt = Command::ShellPromptBuilder.new(config)
shell_config.banner = Command::ShellBannerBuilder.new
shell_config.finalize
end
end
# Sanitize raw command line arguments
#
# @return [Array<String>]
def self.sanitize_args(args, config = nil)
sanitized = []
if args.is_a?(Array)
sanitized = args
elsif args.is_a?(String)
sanitized = Shellwords.shellwords(args)
end
remove_base_command(sanitized, config)
end
# Remove base command form sanitized list of arguments
#
# @return [Array<String>]
def self.remove_base_command(sanitized, config)
if !config.nil? && sanitized[0] == config.root.name.to_s
sanitized.drop(1)
else
sanitized
end
end
# Get handler that forwards command to the help action
#
# @return [Proc]
def self.forward_to_help_handler
proc do
unless arguments.empty?
raise Cliqr::Error::IllegalArgumentError,
'no arguments allowed for default help action'
end
forward "#{command} help"
end
end
# Remove newlines from the end of a string
#
# @return [String]
def self.trim_newlines(str)
index = str.length - 1
count = 0
while str[index] == "\n" && index >= 0
count += 1
index -= 1
end
str[0...-count]
end
end
end
| {
"content_hash": "e5d500330b416293b0600c9ecacd30b8",
"timestamp": "",
"source": "github",
"line_count": 185,
"max_line_length": 97,
"avg_line_length": 32.064864864864866,
"alnum_prop": 0.6164868509777478,
"repo_name": "anshulverma/cliqr",
"id": "5edbbc3772c81f7294c26a1efbcf951aa7ef72b7",
"size": "5962",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/cliqr/util.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1311"
},
{
"name": "Ruby",
"bytes": "243826"
}
],
"symlink_target": ""
} |
import { all, call, put, takeLatest } from 'redux-saga/effects';
import * as types from './actionTypes';
import spotify from '../client/spotify';
import youTube from '../client/youTube';
import videoSelection from '../client/videoSelection';
import blacklist from '../client/blacklist';
export function* fetchSpotifyAccessToken(action) {
try {
const token = yield call(spotify.fetchSpotifyAccessToken);
yield put({ type: types.FETCH_SPOTIFY_TOKEN_SUCCEEDED, payload: token });
} catch (e) {
yield put({ type: types.FETCH_SPOTIFY_TOKEN_FAILED, message: e.message });
}
}
export function* determineSimilarArtists(action) {
try {
var token = action.payload[1];
if (!token) { // Attempt to recover if `fetchSpotifyAccessToken` saga failed
token = yield call(spotify.fetchSpotifyAccessToken);
action.payload[1] = token;
yield put({ type: types.FETCH_SPOTIFY_TOKEN_SUCCEEDED, payload: token });
}
var initialArtistInfo = yield call(spotify.fetchInitialArtist, ...action.payload);
yield put({ type: types.FETCH_INITIAL_ARTIST_SUCCEEDED, payload: initialArtistInfo });
action.payload[0] = initialArtistInfo;
const similarArtists = yield call(spotify.determineSimilarArtists, ...action.payload);
yield put({ type: types.DETERMINE_SIMILAR_ARTISTS_SUCCEEDED, payload: similarArtists });
} catch (e) {
if (!token) {
yield put({ type: types.FETCH_SPOTIFY_TOKEN_FAILED, message: e.message });
} else if (!initialArtistInfo) {
yield put({ type: types.FETCH_INITIAL_ARTIST_FAILED, message: e.message });
} else {
yield put({ type: types.DETERMINE_SIMILAR_ARTISTS_FAILED, message: e.message });
}
}
}
export function* fetchVideos(action) {
try {
const played = action.payload[1];
var videos = yield call(youTube.fetchVideos, ...action.payload); // `var` to share scope with catch block
yield put({ type: types.FETCH_VIDEOS_SUCCEEDED, payload: videos });
const selectedVideo = yield call(videoSelection.selectVideo, videos, played);
yield put({ type: types.SELECT_VIDEO_SUCCEEDED, payload: selectedVideo });
} catch (e) {
if (!videos) {
yield put({ type: types.FETCH_VIDEOS_FAILED, message: e.message });
} else {
yield put({ type: types.SELECT_VIDEO_FAILED, message: e.message });
}
}
}
export function* addToBlacklist(action) {
try {
const doc = yield call(blacklist.addToBlacklist, ...action.payload);
yield put({ type: types.ADD_TO_BLACKLIST_SUCCEEDED, payload: doc });
} catch (e) {
yield put({ type: types.ADD_TO_BLACKLIST_FAILED, message: e.message });
}
}
function* sagas() {
yield all([
takeLatest(types.FETCH_SPOTIFY_TOKEN_REQUESTED, fetchSpotifyAccessToken),
takeLatest(types.DETERMINE_SIMILAR_ARTISTS_REQUESTED, determineSimilarArtists),
takeLatest(types.FETCH_VIDEOS_REQUESTED, fetchVideos),
takeLatest(types.ADD_TO_BLACKLIST_REQUESTED, addToBlacklist)
]);
}
export default sagas;
| {
"content_hash": "94a5ce9f00f68036a4e8bcaa0bf9fdb7",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 109,
"avg_line_length": 35.975903614457835,
"alnum_prop": 0.6962491627595445,
"repo_name": "VitaC123/youTubeMixTape",
"id": "7b576ce099b8200a29b29b3cd29d2be50f9ed8e0",
"size": "2986",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "frontend/src/redux/sagas.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4041"
},
{
"name": "HTML",
"bytes": "4016"
},
{
"name": "JavaScript",
"bytes": "26133"
}
],
"symlink_target": ""
} |
input[type=range] {
width: 300px; }
ul[rn-carousel] {
overflow: hidden;
padding: 0;
white-space: nowrap;
position: relative;
perspective: 1000px;
-ms-touch-action: pan-y;
touch-action: pan-y; }
ul[rn-carousel] > li {
color: black;
backface-visibility: hidden;
overflow: visible;
vertical-align: top;
position: absolute;
left: 0;
right: 0;
white-space: normal;
padding: 0;
margin: 0;
list-style-type: none;
width: 100%;
height: 100%;
display: inline-block; }
/* prevent flickering when moving buffer */
ul[rn-carousel-buffered] > li {
display: none; }
ul[rn-carousel-transition="hexagon"] {
overflow: visible; }
/* indicators */
div.rn-carousel-indicator span {
cursor: pointer;
color: #666; }
div.rn-carousel-indicator span.active {
color: white; }
/* prev/next controls */
.rn-carousel-control {
transition: opacity 0.2s ease-out;
font-size: 2rem;
position: absolute;
top: 40%;
opacity: 0.75;
cursor: pointer; }
.rn-carousel-control:hover {
opacity: 1; }
.rn-carousel-control.rn-carousel-control-prev {
left: 0.5em; }
.rn-carousel-control.rn-carousel-control-prev:before {
content: "<"; }
.rn-carousel-control.rn-carousel-control-next {
right: 0.5em; }
.rn-carousel-control.rn-carousel-control-next:before {
content: ">"; }
/*# sourceMappingURL=angular-carousel.css.map */
| {
"content_hash": "f18c758e28ddfd348ec53aaecdf38f0a",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 58,
"avg_line_length": 23.24590163934426,
"alnum_prop": 0.6452750352609309,
"repo_name": "hyeyoungkang/angular-carousel-loop",
"id": "29586e2a008698dfb972caafbf7fe1f1c0fd9347",
"size": "1418",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dist/angular-carousel.css",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "105317"
},
{
"name": "HTML",
"bytes": "29417"
},
{
"name": "JavaScript",
"bytes": "134243"
},
{
"name": "Ruby",
"bytes": "636"
}
],
"symlink_target": ""
} |
#pragma once
#include <libdariadb/meas.h>
#include <libdariadb/utils/async/locker.h>
#include <common/net_common.h>
#include <tuple>
#include <boost/pool/object_pool.hpp>
#include <common/net_cmn_exports.h>
namespace dariadb {
namespace net {
#pragma pack(push, 1)
struct NetData {
typedef uint16_t MessageSize;
static const size_t MAX_MESSAGE_SIZE = std::numeric_limits<MessageSize>::max();
MessageSize size;
uint8_t data[MAX_MESSAGE_SIZE];
CM_EXPORT NetData();
CM_EXPORT NetData(const DATA_KINDS &k);
CM_EXPORT ~NetData();
CM_EXPORT std::tuple<MessageSize, uint8_t *> as_buffer();
};
struct Query_header {
uint8_t kind;
};
struct QueryHello_header {
uint8_t kind;
uint32_t version;
uint32_t host_size;
};
struct QueryOk_header {
uint8_t kind;
QueryNumber id;
};
struct QueryError_header {
uint8_t kind;
QueryNumber id;
uint16_t error_code;
};
struct QueryHelloFromServer_header {
uint8_t kind;
QueryNumber id;
};
struct QueryAppend_header {
uint8_t kind;
QueryNumber id;
uint32_t count;
/**
hdr - target header to fill
m_array - array with measurements
size - length of m_array
pos - position in m_array where processing must start.
space_left - space left in buffer after processing
return - count processed meases;
*/
CM_EXPORT static uint32_t make_query(QueryAppend_header *hdr, const Meas *m_array,
size_t size, size_t pos, size_t *space_left);
CM_EXPORT MeasArray read_measarray() const;
};
struct QueryInterval_header {
uint8_t kind;
QueryNumber id;
Time from;
Time to;
Flag flag;
uint16_t ids_count;
};
struct QueryTimePoint_header {
uint8_t kind;
QueryNumber id;
Time tp;
Flag flag;
uint16_t ids_count;
};
struct QueryCurrentValue_header {
uint8_t kind;
QueryNumber id;
Flag flag;
uint16_t ids_count;
};
struct QuerSubscribe_header {
uint8_t kind;
QueryNumber id;
Flag flag;
uint16_t ids_count;
};
struct QuerCompact_header {
uint8_t kind;
QueryNumber id;
size_t pageCount;
Time from;
Time to;
};
#pragma pack(pop)
// using NetData_Pool = boost::object_pool<NetData>;
struct NetData_Pool {
utils::async::Locker _locker;
typedef boost::object_pool<NetData> Pool;
Pool _pool;
CM_EXPORT void free(Pool::element_type *nd);
CM_EXPORT Pool::element_type *construct();
template <class T> Pool::element_type *construct(T &&a) {
_locker.lock();
auto res = _pool.construct(a);
_locker.unlock();
return res;
}
};
using NetData_ptr = NetData_Pool::Pool::element_type *;
const size_t MARKER_SIZE = sizeof(NetData::MessageSize);
}
}
| {
"content_hash": "12df2c043acdfe7b00f51d1594fc03fb",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 84,
"avg_line_length": 20.68503937007874,
"alnum_prop": 0.6870955462504759,
"repo_name": "dariadb/dariadb",
"id": "e5226ecbc7bd6de50ffcc173dd317d2f1aa6c3d0",
"size": "2627",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "network/common/net_data.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "521639"
},
{
"name": "CMake",
"bytes": "17976"
},
{
"name": "PowerShell",
"bytes": "143"
},
{
"name": "Shell",
"bytes": "2755"
}
],
"symlink_target": ""
} |
This backend offers read and write access to the Open Sound Control protocol,
spoken primarily by visual interface tools and hardware such as TouchOSC.
#### Global configuration
| Option | Example value | Default value | Description |
|---------------|-----------------------|-----------------------|-----------------------|
| `detect` | `on` | `off` | Output the path of all incoming OSC packets to allow for easier configuration. Any path filters configured using the `root` instance configuration options still apply. |
#### Instance configuration
| Option | Example value | Default value | Description |
|---------------|-----------------------|-----------------------|-----------------------|
| `root` | `/my/osc/path` | none | An OSC path prefix to be prepended to all channels |
| `bind` | `:: 8000` | none | The host and port to listen on |
| `destination` | `10.11.12.13 8001` | none | Remote address to send OSC data to. Setting this enables the instance for output. The special value `learn` causes the MIDImonster to always reply to the address the last incoming packet came from. A different remote port for responses can be forced with the syntax `learn@<port>` |
Note that specifying an instance root speeds up matching, as packets not matching
it are ignored early in processing.
Channels that are to be output or require a value range different from the default ranges (see below)
require special configuration, as their types and limits have to be set.
This is done by specifying *patterns* in the instance configuration using an assignment of the syntax
```
/local/osc/path = <format> <min> <max> <min> <max> ...
```
The pattern will be matched only against the local part (that is, the path excluding any configured instance root).
Patterns may contain the following expressions (conforming to the [OSC pattern matching specification](http://opensoundcontrol.org/spec-1_0)):
* `?` matches any single legal character
* `*` matches zero or more legal characters
* A comma-separated list of strings inside curly braces `{}` matches any of the strings
* A string of characters within square brackets `[]` matches any character in the string
* Two characters with a `-` between them specify a range of characters
* An exclamation mark immediately after the opening `[` negates the meaning of the expression (ie. it matches characters not in the range)
* Any other legal character matches only itself
**format** may be any sequence of valid OSC type characters. See below for a table of supported
OSC types.
For each component of the path, the minimum and maximum values must be given separated by spaces.
Components may be accessed in the mapping section as detailed in the next section.
An example configuration for transmission of an OSC message with 2 floating point components with
a range between 0.0 and 2.0 (for example, an X-Y control), would look as follows:
```
/1/xy1 = ff 0.0 2.0 0.0 2.0
```
To configure a range of faders, an expression similar to the following line could be used
```
/1/fader* = f 0.0 1.0
```
When matching channels against the patterns to use, the first matching pattern (in the order in which they have been configured) will be used
as configuration for that channel.
#### Channel specification
A channel may be any valid OSC path, to which the instance root will be prepended if
set. Multi-value controls (such as X-Y pads) are supported by appending `:n` to the path,
where `n` is the parameter index, with the first (and default) one being `0`.
Example mapping:
```
osc1./1/xy1:0 > osc2./1/fader1
```
Note that any channel that is to be output will need to be set up in the instance
configuration.
#### Supported types & value ranges
OSC allows controls to have individual value ranges and supports different parameter types.
The following types are currently supported by the MIDIMonster:
* **i**: 32-bit signed integer
* **f**: 32-bit IEEE floating point
* **h**: 64-bit signed integer
* **d**: 64-bit double precision floating point
For each type, there is a default value range which will be assumed if the channel is not otherwise
configured using the instance configuration. Values out of a channels range will be clipped.
The default ranges are:
* **i**: `0` to `255`
* **f**: `0.0` to `1.0`
* **h**: `0` to `1024`
* **d**: `0.0` to `1.0`
#### Known bugs / problems
The OSC path match currently works on the unit of characters. This may lead to some unexpected results
when matching expressions of the form `*<expr>`.
Ping requests are not yet answered. There may be some problems using broadcast output and input.
| {
"content_hash": "231336c5de9075b0d7103c2aba81c104",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 330,
"avg_line_length": 45.75247524752475,
"alnum_prop": 0.7165115775806102,
"repo_name": "cbdevnet/midimonster",
"id": "61b33245781c128234d5cb02c58dcd8cf4e834c6",
"size": "4644",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "backends/osc.md",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "491131"
},
{
"name": "C++",
"bytes": "7633"
},
{
"name": "Groovy",
"bytes": "2437"
},
{
"name": "Lua",
"bytes": "10625"
},
{
"name": "Makefile",
"bytes": "9852"
},
{
"name": "Python",
"bytes": "234"
},
{
"name": "Roff",
"bytes": "1276"
},
{
"name": "Shell",
"bytes": "20528"
}
],
"symlink_target": ""
} |
import rcomp from 'rcomp'
import SvgFile from 'components/svgfile'
import { darkSlate } from 'style'
import { join } from 'path'
import { readFileSync } from 'fs'
const comp = rcomp()
const { a, svgfile } = comp.els({ svgfile: SvgFile })
const styles = {
logo: {
position: 'absolute',
height: '17px',
left: '17px',
top: '17px',
zIndex: 1
},
svgfile: {
svg: { height: '100%' },
'svg *': { fill: darkSlate }
}
}
comp.render((props) => (
a({ href: '/', style: styles.logo },
svgfile({
src: readFileSync(join(__dirname, 'logo.svg'), 'utf8'),
style: styles.svgfile
}))
))
export default comp
| {
"content_hash": "4cbe01f8c823321f7977eb63e3e0efc1",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 61,
"avg_line_length": 20.28125,
"alnum_prop": 0.5855161787365177,
"repo_name": "adrhino/calf",
"id": "0ccd20fd5cc7300f8e06c2b0789d478810df2391",
"size": "649",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/components/layout/logo.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "55699"
}
],
"symlink_target": ""
} |
Subsets and Splits