text
stringlengths 2
1.04M
| meta
dict |
---|---|
using System;
using System.Text;
namespace AsterNET.FastAGI.Command
{
/// <summary>
/// Returns the value of the given channel varible and understands complex
/// variable names and builtin variables, unlike the GetVariableCommand.<br/>
/// You can also use this command to use custom Asterisk functions. Syntax is "func(args)".<br/>
/// Returns 0 if the variable is not set or channel does not exist. Returns 1 if
/// the variable is set and returns the variable in parenthesis.<br/>
/// Example return code: 200 result=1 (testvariable)
/// Available since Asterisk 1.2
/// </summary>
public class GetFullVariableCommand : AGICommand
{
private string varName;
private string channel;
/// <summary>
/// Creates a new GetFullVariableCommand.
/// </summary>
/// <param name="variable">the name of the variable to retrieve.</param>
public GetFullVariableCommand(string variable)
{
this.varName = variable;
}
/// <summary>
/// Creates a new GetFullVariableCommand.
/// </summary>
/// <param name="variable">the name of the variable to retrieve.</param>
/// <param name="channel">the name of the channel.</param>
public GetFullVariableCommand(string variable, string channel)
{
this.varName = variable;
this.channel = channel;
}
/// <summary>
/// Get/Set the name of the variable to retrieve.
/// </summary>
public string Variable
{
get { return varName; }
set { this.varName = value;}
}
/// <summary>
/// Get/Set the name of the channel.
/// </summary>
public string Channel
{
get { return channel; }
set { this.channel = value; }
}
public override string BuildCommand()
{
StringBuilder sb;
sb = new StringBuilder("GET FULL VARIABLE ");
sb.Append(EscapeAndQuote(varName));
if (channel != null)
{
sb.Append(" ");
sb.Append(EscapeAndQuote(channel));
}
return sb.ToString();
}
}
} | {
"content_hash": "d10cd9a69aad48af4b8c3a290aedf612",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 97,
"avg_line_length": 27.10958904109589,
"alnum_prop": 0.6488125315816069,
"repo_name": "AsterNET/AsterNET",
"id": "73f25bc6aa5fdbf9c08c576d6cbacd52ad846622",
"size": "1979",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "Asterisk.2013/Asterisk.NET/FastAGI/Command/GetFullVariableCommand.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "699876"
}
],
"symlink_target": ""
} |
/*
* 2012-3 Red Hat Inc. and/or its affiliates and other contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.overlord.rtgov.activity.server;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.overlord.commons.services.ServiceRegistryUtil;
import org.overlord.rtgov.common.util.RTGovProperties;
/**
* This class represents a CDI factory for obtaining an activity store implementation.
*
*/
public final class ActivityStoreFactory {
private static final Logger LOG=Logger.getLogger(ActivityStoreFactory.class.getName());
/**
* Property defining the activity store implementation class.
*/
public static final String ACTIVITY_STORE_CLASS="ActivityStore.class";
private static ActivityStore _instance;
/**
* Private constructor.
*/
private ActivityStoreFactory() {
}
/**
* This method resets the factory.
*/
public static void clear() {
_instance = null;
}
/**
* This method returns an instance of the ActivityStore interface.
*
* @return The activity store
*/
public static ActivityStore getActivityStore() {
if (_instance == null) {
java.util.Set<ActivityStore> services=ServiceRegistryUtil.getServices(ActivityStore.class);
String clsName=(String)RTGovProperties.getProperties().get(ACTIVITY_STORE_CLASS);
for (ActivityStore as : services) {
if (LOG.isLoggable(Level.FINEST)) {
LOG.finest("Checking activity store impl="+as);
}
if (as.getClass().getName().equals(clsName)) {
// Only overwrite if instance not set
if (_instance == null) {
_instance = as;
if (LOG.isLoggable(Level.FINEST)) {
LOG.finest("Found activity store impl="+as);
}
}
break;
}
}
}
if (LOG.isLoggable(Level.FINER)) {
LOG.finer("Activity store instance="+_instance);
}
return (_instance);
}
}
| {
"content_hash": "36b28cc511d3a2d9755c76695e72ca09",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 103,
"avg_line_length": 32.976470588235294,
"alnum_prop": 0.5982875490545844,
"repo_name": "djcoleman/rtgov",
"id": "3d3839cc9ca19f82486757f8575bfb60e899d0e1",
"size": "2803",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "modules/activity-management/activity/src/main/java/org/overlord/rtgov/activity/server/ActivityStoreFactory.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "560"
},
{
"name": "CSS",
"bytes": "21457"
},
{
"name": "HTML",
"bytes": "75224"
},
{
"name": "Java",
"bytes": "2777787"
},
{
"name": "JavaScript",
"bytes": "31419"
},
{
"name": "Shell",
"bytes": "2307"
},
{
"name": "XSLT",
"bytes": "13053"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using System.Xml.Linq;
namespace SonosSharp.Eventing
{
public abstract class BasicHttpServer
{
private bool _isRunning;
public bool IsRunning { get { return _isRunning; } }
public abstract string CallbackUrl { get; }
protected abstract Task StartInternalAsync();
protected abstract Task StopInternalAsync();
public event EventHandler<HttpVariableChangedEventArgs> VariableChanged;
public async Task StartAsync()
{
if (!_isRunning)
{
_isRunning = true;
await StartInternalAsync();
}
}
public async Task StopAsync()
{
if (_isRunning)
{
await StopInternalAsync();
_isRunning = false;
}
}
protected void OnVariableChanged(string variableName, string variableValue)
{
if (VariableChanged != null)
{
VariableChanged(this, new HttpVariableChangedEventArgs(variableName, variableValue));
}
}
protected bool ProcessHttpRequest(string fullRequest)
{
bool success = true;
using (StringReader reader = new StringReader(fullRequest))
{
var headers = GetHeaders(reader);
if (ShouldHandleRequest(headers))
{
string contents = reader.ReadToEnd();
var propertySetElement = XElement.Parse(contents);
var variables =
propertySetElement.Elements(Constants.UpnpEventNamespace + "property").Elements().ToList();
foreach (var variable in variables)
{
//Debug.WriteLine("{0}: {1}",variable.Name.LocalName, variable.Value);
OnVariableChanged(variable.Name.LocalName, variable.Value);
}
}
}
return success;
}
private bool ShouldHandleRequest(Dictionary<string, string> headers)
{
bool shouldHandle = true;
shouldHandle &= headers.ContainsKey("NT") &&
string.Equals(headers["NT"], "upnp:event", StringComparison.OrdinalIgnoreCase);
shouldHandle &= headers.ContainsKey("NTS") &&
string.Equals(headers["NTS"], "upnp:propchange", StringComparison.OrdinalIgnoreCase);
shouldHandle &= headers.ContainsKey("SID");
shouldHandle &= headers.ContainsKey("SEQ");
return shouldHandle;
}
private Dictionary<string, string> GetHeaders(StringReader reader)
{
var results = new Dictionary<string, string>();
string currentLine = reader.ReadLine();
while (!String.IsNullOrWhiteSpace(currentLine))
{
if (currentLine.StartsWith("GET") || currentLine.StartsWith("NOTIFY"))
{
currentLine = reader.ReadLine();
continue;
}
int indexOfColon = currentLine.IndexOf(':');
if (!currentLine.StartsWith("GET") && indexOfColon < 0)
{
break;
}
string key = currentLine.Substring(0, indexOfColon);
string value = currentLine.Substring(indexOfColon + 2);
results.Add(key, value);
currentLine = reader.ReadLine();
}
return results;
}
}
public class HttpVariableChangedEventArgs : EventArgs
{
public string VariableName { get; set; }
public string VariableValue { get; set; }
public HttpVariableChangedEventArgs(string variableName, string variableValue)
{
this.VariableName = variableName;
this.VariableValue = variableValue;
}
}
} | {
"content_hash": "abaf23b6d230210fae32708fdad90da1",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 115,
"avg_line_length": 31.30075187969925,
"alnum_prop": 0.5445592121066538,
"repo_name": "samneirinck/SonosSharp",
"id": "fb9cd8e0e5f4a1c04d1b5450ae719f64e61b52e2",
"size": "4165",
"binary": false,
"copies": "1",
"ref": "refs/heads/master-old",
"path": "src/SonosSharp/Eventing/BasicHttpServer.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "10382"
}
],
"symlink_target": ""
} |
div.reservaStyle {
background: #E0FFFF;
}
div.desistenciaStyle {
background: #FFDDEE;
}
div.containerStyle {
background: #FFFFB0;
}
div.desistenciaDoContainerStyle {
background: #FFDDC6;
}
div.faturadoStyle {
background: #BBFFCC;
}
div.canceladoStyle {
background: #DDDDDD;
}
| {
"content_hash": "e3d0a79a50cc4f250843427a5114bbd7",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 33,
"avg_line_length": 13.26086956521739,
"alnum_prop": 0.6885245901639344,
"repo_name": "tinacg/tinacg.github.io",
"id": "2e58e2be94c140fad724c5466a47befb46943219",
"size": "305",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "faia/aguardandoB/css/pedidocolor.css",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "115803"
},
{
"name": "HTML",
"bytes": "702493"
},
{
"name": "JavaScript",
"bytes": "2917504"
},
{
"name": "Python",
"bytes": "10912"
}
],
"symlink_target": ""
} |
local word_count = require('word-count').word_count
describe('word-count', function()
it('counts one word', function()
local result = word_count('word')
local expected = { word = 1 }
assert.are.same(expected, result)
end)
it('counts one of each', function()
local result = word_count('one of each')
local expected = { one = 1, of = 1, each = 1 }
assert.are.same(expected, result)
end)
it('counts multiple occurrences', function()
local result = word_count('one fish two fish red fish blue fish')
local expected = { one = 1, fish = 4, two = 1, red = 1, blue = 1 }
assert.are.same(expected, result)
end)
it('handles cramped lists', function()
local result = word_count('one,two,three')
local expected = { one = 1, two = 1, three = 1}
assert.are.same(expected, result)
end)
it('handles expanded lists', function()
local result = word_count('one,\ntwo,\nthree')
local expected = { one = 1, two = 1, three = 1}
assert.are.same(expected, result)
end)
it('ignores punctuation', function()
local result = word_count('car : carpet as java : javascript!!&@$%^&')
local expected = { car = 1, carpet = 1, as = 1, java = 1, javascript = 1 }
assert.are.same(expected, result)
end)
it('includes numbers', function()
local result = word_count('testing, 1, 2 testing')
local expected = { testing = 2, ['1'] = 1, ['2'] = 1 }
assert.are.same(expected, result)
end)
it('normalizes case', function()
local result = word_count('go Go GO Stop stop')
local expected = { go = 3, stop = 2 }
assert.are.same(expected, result)
end)
it('counts with apostrophes', function()
local result = word_count("First: don't laugh. Then: don't cry.")
local expected = { first = 1, ["don't"] = 2, laugh = 1, ['then'] = 1, cry = 1}
assert.are.same(expected, result)
end)
it('counts with quotation', function()
local result = word_count("Joe can't tell between 'large' and large.")
local expected = { joe = 1, ["can't"] = 1, tell = 1, between = 1, large = 2, ["and"] = 1}
assert.are.same(expected, result)
end)
it('counts with substrings from the beginning', function()
local result = word_count("Joe can't tell between app, apple and a.")
local expected = { joe = 1, ["can't"] = 1, tell = 1, between = 1, app = 1, apple = 1, ["and"] = 1, a = 1}
assert.are.same(expected, result)
end)
it('does not count multiple spaces as a word', function()
local result = word_count(' multiple whitespaces')
local expected = {multiple = 1, whitespaces = 1}
assert.are.same(expected, result)
end)
it('alternating word separators not detected as a word', function()
local result = word_count(",\n,one,\n ,two \n 'three'")
local expected = {one = 1, two = 1, three = 1}
assert.are.same(expected, result)
end)
end)
| {
"content_hash": "2987bb76ea8d307ee0e7f1b93e6af055",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 109,
"avg_line_length": 35.51851851851852,
"alnum_prop": 0.6239137990962809,
"repo_name": "exercism/xlua",
"id": "77094296733dd5b9018394f4abd37c1536e450bf",
"size": "2918",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "exercises/practice/word-count/word-count_spec.lua",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Lua",
"bytes": "227522"
},
{
"name": "Shell",
"bytes": "640"
}
],
"symlink_target": ""
} |
require 'spec_helper'
describe Gitlab::HashedStorage::Migrator do
describe '#bulk_schedule' do
it 'schedules job to StorageMigratorWorker' do
Sidekiq::Testing.fake! do
expect { subject.bulk_schedule(start: 1, finish: 5) }.to change(HashedStorage::MigratorWorker.jobs, :size).by(1)
end
end
end
describe '#bulk_migrate' do
let(:projects) { create_list(:project, 2, :legacy_storage) }
let(:ids) { projects.map(&:id) }
it 'enqueue jobs to ProjectMigrateHashedStorageWorker' do
Sidekiq::Testing.fake! do
expect { subject.bulk_migrate(start: ids.min, finish: ids.max) }.to change(ProjectMigrateHashedStorageWorker.jobs, :size).by(2)
end
end
it 'rescues and log exceptions' do
allow_any_instance_of(Project).to receive(:migrate_to_hashed_storage!).and_raise(StandardError)
expect { subject.bulk_migrate(start: ids.min, finish: ids.max) }.not_to raise_error
end
it 'delegates each project in specified range to #migrate' do
projects.each do |project|
expect(subject).to receive(:migrate).with(project)
end
subject.bulk_migrate(start: ids.min, finish: ids.max)
end
it 'has migrated projects set as writable' do
perform_enqueued_jobs do
subject.bulk_migrate(start: ids.min, finish: ids.max)
end
projects.each do |project|
expect(project.reload.repository_read_only?).to be_falsey
end
end
end
describe '#migrate' do
let(:project) { create(:project, :legacy_storage, :empty_repo) }
it 'enqueues project migration job' do
Sidekiq::Testing.fake! do
expect { subject.migrate(project) }.to change(ProjectMigrateHashedStorageWorker.jobs, :size).by(1)
end
end
it 'rescues and log exceptions' do
allow(project).to receive(:migrate_to_hashed_storage!).and_raise(StandardError)
expect { subject.migrate(project) }.not_to raise_error
end
it 'migrates project storage' do
perform_enqueued_jobs do
subject.migrate(project)
end
expect(project.reload.hashed_storage?(:attachments)).to be_truthy
end
it 'has migrated project set as writable' do
perform_enqueued_jobs do
subject.migrate(project)
end
expect(project.reload.repository_read_only?).to be_falsey
end
context 'when project is already on hashed storage' do
let(:project) { create(:project, :empty_repo) }
it 'doesnt enqueue any migration job' do
Sidekiq::Testing.fake! do
expect { subject.migrate(project) }.not_to change(ProjectMigrateHashedStorageWorker.jobs, :size)
end
end
it 'returns false' do
expect(subject.migrate(project)).to be_falsey
end
end
end
end
| {
"content_hash": "71805abfc5fff3da8ffc4129c53108b7",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 135,
"avg_line_length": 30.571428571428573,
"alnum_prop": 0.6678648454349388,
"repo_name": "axilleas/gitlabhq",
"id": "3942f168ceb1cc831094ffd5c03632995ae9dc97",
"size": "2782",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/lib/gitlab/hashed_storage/migrator_spec.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "683690"
},
{
"name": "Clojure",
"bytes": "79"
},
{
"name": "Dockerfile",
"bytes": "1907"
},
{
"name": "HTML",
"bytes": "1340167"
},
{
"name": "JavaScript",
"bytes": "4309733"
},
{
"name": "Ruby",
"bytes": "19732082"
},
{
"name": "Shell",
"bytes": "44575"
},
{
"name": "Vue",
"bytes": "1040466"
}
],
"symlink_target": ""
} |
.. _ras_gefs:
Ras regulation by GEFs
======================
Identities of Ras-specific GEFs
-------------------------------
There are several guanosine exchange factors (GEFs) that are specific to Ras
subfamily proteins.
[PMID9585556]_: Several genes have been isolated from different organisms
encoding proteins that have a GEF activity specific for Ras (for which we
use the general name RasGEFs throughout this paper): SOS1 and SOS2
([PMID9585556_1]_ [PMID9585556_2]_ [PMID9585556_3]_ [PMID9585556_4]_) ;
Cdc25Mm, also called RasGrf ([PMID9585556_5]_ [PMID9585556_6]_
[PMID9585556_7]_); and mRas-GRF2 ([PMID9585556_8]_).
RasGEFs contain a specific domain responsible for activating Ras proteins.
[PMID9585556]_: The RasGEFs are proteins of considerable length, 120 - 160
kDa, and contain several regions which are generally accepted to represent
structural domains (12). A region of 200 - 300 amino acids, the RasGEF
domain, is shared by all GEFs which act on members of the Ras subfamily,
and their activity is specific toward either Ras, Ral, or Rap. The fact
that truncated versions of various lengths, containing this RasGEF domain,
have been shown to be active RasGEFs in vivo and in vitro (4 , 13 - 16)
confirms that this region indeed represents the Ras-specific guanine
nucleotide exchange domain.
::
def rasgef_monomers():
# Declare a list of RasGEFs along with their site structure.
# The names in the list below are HGNC standard names.
# (note: Cdc25Mm = RASGRF1)
ras_gef_names = ['SOS1', 'SOS2', 'RASGRF1', 'RASGRF2']
for ras_gef_name in ras_gef_names:
Monomer(ras_gef_name, ['rasgef'])
Mechanism of GEFs
-----------------
Some key features of the mechanism:
1. Ras binds GEFs in the absence of nucleotides
2. GEF binding causes Ras to release GTP/GDP.
3. Rebinding of nucleotides causes Ras to release the GEF.
4. Reloading of Ras with GTP vs. GDP is not determined by the GEF, but rather
by the relative cellular concentrations of the nucleotides.
[PMID9690470]_: Biochemical studies of Ras exchange factors have shown that
the complex of Ras with these proteins is stable in the absence of
nucleotides and is dissociated by the rebinding of either GDP or GTP
([PMID9585556]_ [PMID9690470_17]_ [PMID9690470_18]_ [PMID9690470_21]_
[PMID9690470_22]_). The principal role for the exchange factor is to
facilitate nucleotide release, and it does not seem to control
significantly the preferential rebinding of GTP over GDP ([PMID9585556]_,
[PMID9690470_22]_, [PMID7548002]_). Cellular concentrations of GTP are
10-fold higher than GDP, which results in the loading of GTP onto Ras.
The fact that GTP/GDP can displace GEFs, while GEFs can also displace GTP/GDP,
leads to a paradox that is resolved by the fact that Ras undergoes a
conformational change that retains the necessary "state". The structural basis
of this conformational change is described as follows:
[PMID9690470]_: As a nucleotide-exchange factor, Sos functions under two
apparently conflicting imperatives. The interaction between Sos and Ras
must be strong enough to dislodge the tightly bound nucleotide, but the Ras
– Sos complex must also be poised for subsequent displacement by incoming
nucleotides. The structure of the Ras – Sos complex shows that Ras and Sos
meet these demands by forming a tight complex that is anchored at one end
of the nucleotide- binding site, where phosphate and magnesium are normally
bound. The interface between Sos and Ras is mainly hydrophilic, suggesting
a ready unzippering through water-mediated displacements of the
coordinating side chains. The main interacting elements of Sos avoid direct
occlusion of the nucleotide-binding site, except the region where the
terminal phosphate groups and the magnesium ion are bound. This feature
allows incoming nucleotides to reverse the process by competing for the
groups that ligate the phosphate and metal ion.
This conformational state change has been analyzed kinetically and identified
as the process of the nucleotide being first loosely and then tightly bound,
(see also :ref:`ras_gtpase`):
[PMID9690470]_: Kinetic analysis of nucleotide association shows that the
reaction proceeds by the formation of a ternary complex of a loosely bound
nucleotide and Ras – Cdc25Mm followed by conversion to a form in which the
nucleotide is tightly bound to Ras [PMID9585556]_. In light of the
structure of the Ras–Sos complex, the first step can be interpreted as the
interaction of the base and the ribose of the nucleotide with the part of
the Ras binding site that is not occluded by Sos. The second step would
involve a conformational change in the Switch 2 segment and release of
Switch 1, resulting in the restructuring of a competent binding site for
phosphate and magnesium, and the subsequent dissociation of Sos.
The kinetic analysis described in [PMID9585556]_ resulted in the following reaction scheme for the interactions between Ras, GTP/GDP, and GEFs:
.. image:: /images/9585556_rasgef_cycle.png
:width: 600px
Note that the upper equilibria for Ras-nucleotide binding, K1a and K1b, were
implemented in the section :ref:`ras_gtpase`, along with corresponding rates.
Here we implement only the equilibria involving GEFs: K2, K3, K4a and K4b.
::
def ras_gef_exchange_cycle(ras, rasgef, gxp,
k2_list, k3_list, k4a_list, k4b_list):
# Alias for Ras bound to GXP
rasgxp = ras(gef=None, gtp=99) % gxp(p=99)
# Binding of RasGEF to nucleotide-free Ras (K2)
bind(ras(gtp=None, s1s2='closed'), 'gef', rasgef(), 'rasgef', k2_list)
# Binding of RasGEF to RasGXP (K3)
bind(rasgxp(s1s2='open'), 'gef', rasgef(), 'rasgef', k3_list)
# Binding of GXP to Ras/RasGEF complex
bind(ras(s1s2='closed', gef=1) % rasgef(rasgef=1), 'gtp',
gxp(), 'p', k4a_list)
# Isomerization of Ras-RasGEF-GXP from loose to tight
equilibrate(rasgxp(gef=1, s1s2='closed') % rasgef(rasgef=1),
rasgxp(gef=1, s1s2='open') % rasgef(rasgef=1), k4b_list)
Rates of GEF activation
-----------------------
::
# Binding of RasGEF to nucleotide-free Ras
kf2 = 0.33e6 # M^-1 s^-1
kr2 = 1e-3 # s^-1
# Binding of RasGEF to RasGXP
KD3 = 0.6e-3 # M
kf3 = 3.4e4 # M^-1 s^-1 (lower limit)
kr3 = KD3 * kf3 # s^-1
# Binding of GXP to Ras/RasGEF complex
KD4a = 8.6e-6 # M
kf4a = 1e7 # M^-1 s^-1
kr4a = KD4a * kf4a # s^-1
# = kf1a, i.e., on rate is insensitive to presence of GEF
::
# Isomerization of Ras-RasGEF-GXP from loose to tight
kf4b = 20.4 # s^-1
kr4b = 3.9 # s^-1
The following study used purified HRAS and mouse RASGRF1:
[PMID9690470]_: The mechanism of nucleotide release by the catalytic domain
of murine Cdc25 (Cdc25Mm) has been investigated recently using
fluorescently labelled nucleotides [PMID9585556]_. The affinity of Cdc25Mm
for nucleotide-free Ras (Kd = 4.6 nM) is found to be several orders of
magnitude higher than that for nucleotide-bound Ras, and the maximal
acceleration by Cdc25Mm of the rate of dissociation of nucleotide is more
than 10^5.
[PMID9585556]_: The best fit of our data resulted in similar quantum yields
and a value of 4.6 nM for KD2 (NOTE: Kd between nucleotide-free H-Ras and
RasGRF1). A variation in the value for KD2 of approximately 2-fold resulted
in fits of comparable quality.
The activity of GEF (RASGRF1 in this case) does not depend on whether Ras
(HRAS) is loaded with GTP or GDP.
[PMID9585556]_: However, since the intrinsic dissociation rate of Ras for
GTP (1 × 10-5 s-1) is 2-fold lower than that for GDP (2 × 10-5 s-1), the
stimulatory action of Cdc25Mm285 is practically independent of the nature
of the bound nucleotide.
[PMID9585556]_: Although we did not reach complete saturation at 600 μM
Ras‚nucleotide, the data could be fitted to obtain a maximal rate of
3′mdGDP release from Ras of 3.9 s-1 and an apparent Km value of 386 μM.
Since the intrinsic dissociation rate of 3′mdGDP is 2 × 10-5 s-1 (Table 1),
the acceleration of GDP dissociation from Ras by this GEF is approximately
2 × 10^5-fold. An apparent Km of approximately 300 μM was obtained
for the triphosphate-bound form of Ras, confirming that there is no
pronounced specificity toward the nature of the Ras-bound nucleotide (data
not shown).
.. warning:: GEF binding to GTP bound Ras?
Can GEFs bind to Ras and cause ejection of nucleotide before the GTP/GDP
conversion is complete? Moreover, if GEF binds to Ras-GTP, can the
hydrolysis to GDP proceed while GEF is bound?
Instantiate the RasGEF cycle for HRAS and RASGRF1::
def rasgef_exchange_hras_rasgrf1(model):
HRAS = model.monomers['HRAS']
RASGRF1 = model.monomers['RASGRF1']
GTP = model.monomers['GTP']
GDP = model.monomers['GDP']
ras_gef_exchange_cycle(HRAS, RASGRF1, GTP, GDP)
[PMID9585556]_: Therefore, we tested the nucleotide specificity of the
interaction of Cdc25Mm285 (CdcMm285 is the fragment of CdcMm/RasGRF1 containing
the RasGEF domain) with Ras. Figure 1 shows the release of Ras-bound 3′mdGDP or
3′mdGTP (4 μM), in the presence of an excess of unlabeled nucleotide and in the
presence or absence of 1 μM Cdc25Mm285. The Cdc25Mm285-stimulated dissociation
rate of Ras-3′mdGDP is approximately twice that of Ras-3′mdGTP, with values of
0.0098 and 0.0046 s-1, respectively. However, since the intrinsic dissociation
rate of Ras for GTP (1 × 10-5 s-1) is 2-fold lower than that for GDP (2 × 10-5
s-1), the stimulatory action of Cdc25Mm285 is practically independent of the
nature of the bound nucleotide. The difference in stimulated dissociation rates
is somewhat smaller than the results of Jacquet et al. (16) but is similar to
the results with the yeast proteins CDC25 and RAS2 obtained by Haney and Broach
(28).
[PMID9690470]_: The overall shape of the catalytic domain of Sos is that of an
oblong bowl (Fig. 2), with Ras bound at the centre of the bowl. The regions of
Ras that interact most closely with Sos include the phosphate-binding P-loop
(residues 10 – 17) and surrounding segments (including strand 1 and helix 1),
the Switch 1 region (defined here as residues 25–40) and the Switch 2 region
(defined here as residues 57 – 75). Additional interactions are seen with helix
3 (residues 95–105; Fig. 3a, b). The interface between Ras and Sos is primarily
hydrophilic and very extensive, with 3,600 A^2 of surface area buried in the
complex.
[PMID9690470]_: The most obvious effect of Sos binding to Ras is the opening of
the nucleotide binding site as a result of the displacement of Switch 1 of Ras
by the insertion of the helical hairpin formed by aH and aI of Sos (Fig. 5)
Switch 1 and Switch 2 are the only regions of Ras in which structural changes
are directly induced by Sos.
The change in the Switch 1 region of Ras when bound to Sos is drastic...Switch
1 is completely removed from the nucleotide-binding site.
One important aspect of the insertion of the helical hairpin of Sos into the
Switch 1 region is that it does not result in a significant occlusion of the
guanine and ribose binding sites (Fig. 5d). Instead, this structural distortion
breaks the network of direct and water-mediated interactions between Switch 1
and the nucleotide. For example, in the nucleotide-bound forms of Ras, Phe 28
interacts with the guanine base through a perpendicular aromatic – aromatic
interaction (Fig. 5a). Mutation of Phe28 to leucine results in a significant
increase in the intrinsic rate of dissociation of nucleotide from Ras18. In the
Sos complex, the Calpha of Phe 28 moves 9.6 A and the side chain no longer
interacts with the nucleotide-binding site (Fig. 5b).
The Switch 2 region of Ras makes important interactions with GTP and not with
GDP (19,46). Nevertheless, structural changes that are induced in Switch 2 by
Sos result in the exclusion of both GDP and GTP, because they affect magnesium
binding as well as the conformation of Lys 16 in the P-loop, a crucial
phosphate ligand.
Specificity of RASGRF1 for Ras isoforms
---------------------------------------
[PMID9585556]_: Three mammalian isoforms of Ras, H-, K-, and N-Ras, have been
identified which are highly conserved intheirprimarysequence.
Thesignificanceofhavingmore than one isoform is not understood at present,
although the isoforms may have different functions in different tissues, since
certain types of tumors have a preference for a particular activated Ras gene,
such as K-Ras for lung, colon and pancreas cancers and N-Ras for myeloid
leukemias (25). To see whether Cdc25Mm285 acts differently on the three
isoforms, we tested the GEF activity of Cdc25Mm285 on these proteins. As
summarized in Table 1, Cdc25Mm285 is active on all isoforms, being somewhat
more active on N-Ras, in accordance with the results of Leonardsen et al. (26).
| {
"content_hash": "19660e592790f9aa70f71dfca0ac811d",
"timestamp": "",
"source": "github",
"line_count": 267,
"max_line_length": 143,
"avg_line_length": 49.70786516853933,
"alnum_prop": 0.7293550331525015,
"repo_name": "sorgerlab/rasmodel",
"id": "dc9103a696999cd0a62c780c2681812c7318c0c4",
"size": "13321",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "doc/components/ras/ras_gefs.rst",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "999"
},
{
"name": "Matlab",
"bytes": "256118"
},
{
"name": "Python",
"bytes": "261286"
}
],
"symlink_target": ""
} |
class Nis::Struct
# @attr [Array <Nis::Struct::Transaction>] txes
# @attr [String] block
# @attr [String] hash
# @see https://nemproject.github.io/#explorerViewModelTransaction
class ExplorerViewModelTransaction
include Nis::Util::Assignable
attr_accessor :txes, :block, :hash
def self.build(attrs)
attrs[:txes] = attrs[:txes].map { |tx| Transaction.build(tx) }
attrs[:block] = Block.build(attrs[:block])
new(attrs)
end
end
end
| {
"content_hash": "7720ca59e2a1ce5bbe51dea71c110089",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 68,
"avg_line_length": 29.6875,
"alnum_prop": 0.6673684210526316,
"repo_name": "44uk/nis-ruby",
"id": "40ac0b8642e8f1362ea5ba108b6664cfda2450a5",
"size": "475",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/nis/struct/explorer_view_model_transaction.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "211728"
}
],
"symlink_target": ""
} |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
'use strict';
/**
* @fileoverview Model is a parsed representation of the
* TraceEvents obtained from base/trace_event in which the begin-end
* tokens are converted into a hierarchy of processes, threads,
* subrows, and slices.
*
* The building block of the model is a slice. A slice is roughly
* equivalent to function call executing on a specific thread. As a
* result, slices may have one or more subslices.
*
* A thread contains one or more subrows of slices. Row 0 corresponds to
* the "root" slices, e.g. the topmost slices. Row 1 contains slices that
* are nested 1 deep in the stack, and so on. We use these subrows to draw
* nesting tasks.
*
*/
'use strict';
base.exportTo('tracing', function() {
var nextGUID = 1;
var GUID = {
allocate: function() {
return nextGUID++;
}
};
return {
GUID: GUID
};
});
| {
"content_hash": "1af04f23efcf40ef2a6bdbee8d310c4b",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 74,
"avg_line_length": 27.783783783783782,
"alnum_prop": 0.6945525291828794,
"repo_name": "CyanogenMod/android_external_chromium-trace",
"id": "0506ba79e7a96ba6617d5bacc72ec745bf15439f",
"size": "1028",
"binary": false,
"copies": "1",
"ref": "refs/heads/cm-10.2",
"path": "trace-viewer/src/guid.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "12906"
},
{
"name": "JavaScript",
"bytes": "487461"
},
{
"name": "Perl",
"bytes": "1568"
},
{
"name": "Python",
"bytes": "1211544"
},
{
"name": "Shell",
"bytes": "447"
}
],
"symlink_target": ""
} |
#ifndef _RBDL_EIGENMATH_H
#define _RBDL_EIGENMATH_H
class Vector3_t : public Eigen::Vector3d
{
public:
typedef Eigen::Vector3d Base;
template<typename OtherDerived>
Vector3_t(const Eigen::MatrixBase<OtherDerived>& other)
: Eigen::Vector3d(other)
{}
template<typename OtherDerived>
Vector3_t& operator=(const Eigen::MatrixBase<OtherDerived>& other)
{
this->Base::operator=(other);
return *this;
}
EIGEN_STRONG_INLINE Vector3_t()
{}
EIGEN_STRONG_INLINE Vector3_t(
const double& v0, const double& v1, const double& v2
)
{
Base::_check_template_params();
EIGEN_STATIC_ASSERT_VECTOR_SPECIFIC_SIZE(Matrix, 3)
(*this) << v0, v1, v2;
}
void set(const double& v0, const double& v1, const double& v2)
{
Base::_check_template_params();
EIGEN_STATIC_ASSERT_VECTOR_SPECIFIC_SIZE(Matrix, 3)
(*this) << v0, v1, v2;
}
};
class Matrix3_t : public Eigen::Matrix3d
{
public:
typedef Eigen::Matrix3d Base;
template<typename OtherDerived>
Matrix3_t(const Eigen::MatrixBase<OtherDerived>& other)
: Eigen::Matrix3d(other)
{}
template<typename OtherDerived>
Matrix3_t& operator=(const Eigen::MatrixBase<OtherDerived>& other)
{
this->Base::operator=(other);
return *this;
}
EIGEN_STRONG_INLINE Matrix3_t()
{}
EIGEN_STRONG_INLINE Matrix3_t(
const double& m00, const double& m01, const double& m02,
const double& m10, const double& m11, const double& m12,
const double& m20, const double& m21, const double& m22
)
{
Base::_check_template_params();
EIGEN_STATIC_ASSERT_MATRIX_SPECIFIC_SIZE(Matrix, 3, 3)
(*this)
<< m00, m01, m02,
m10, m11, m12,
m20, m21, m22
;
}
};
class SpatialVector_t : public Eigen::Matrix<double, 6, 1>
{
public:
typedef Eigen::Matrix<double, 6, 1> Base;
template<typename OtherDerived>
SpatialVector_t(const Eigen::MatrixBase<OtherDerived>& other)
: Eigen::Matrix<double, 6, 1>(other)
{}
template<typename OtherDerived>
SpatialVector_t& operator=(const Eigen::MatrixBase<OtherDerived>& other)
{
this->Base::operator=(other);
return *this;
}
EIGEN_STRONG_INLINE SpatialVector_t()
{}
EIGEN_STRONG_INLINE SpatialVector_t(
const double& v0, const double& v1, const double& v2,
const double& v3, const double& v4, const double& v5
)
{
Base::_check_template_params();
EIGEN_STATIC_ASSERT_VECTOR_SPECIFIC_SIZE(Matrix, 6)
(*this) << v0, v1, v2, v3, v4, v5;
}
void set(
const double& v0, const double& v1, const double& v2,
const double& v3, const double& v4, const double& v5
)
{
Base::_check_template_params();
EIGEN_STATIC_ASSERT_VECTOR_SPECIFIC_SIZE(Matrix, 6)
(*this) << v0, v1, v2, v3, v4, v5;
}
};
class SpatialMatrix_t : public Eigen::Matrix<double, 6, 6>
{
public:
typedef Eigen::Matrix<double, 6, 6> Base;
template<typename OtherDerived>
SpatialMatrix_t(const Eigen::MatrixBase<OtherDerived>& other)
: Eigen::Matrix<double, 6, 6>(other)
{}
template<typename OtherDerived>
SpatialMatrix_t& operator=(const Eigen::MatrixBase<OtherDerived>& other)
{
this->Base::operator=(other);
return *this;
}
EIGEN_STRONG_INLINE SpatialMatrix_t()
{}
EIGEN_STRONG_INLINE SpatialMatrix_t(
const Scalar& m00, const Scalar& m01, const Scalar& m02, const Scalar& m03, const Scalar& m04, const Scalar& m05,
const Scalar& m10, const Scalar& m11, const Scalar& m12, const Scalar& m13, const Scalar& m14, const Scalar& m15,
const Scalar& m20, const Scalar& m21, const Scalar& m22, const Scalar& m23, const Scalar& m24, const Scalar& m25,
const Scalar& m30, const Scalar& m31, const Scalar& m32, const Scalar& m33, const Scalar& m34, const Scalar& m35,
const Scalar& m40, const Scalar& m41, const Scalar& m42, const Scalar& m43, const Scalar& m44, const Scalar& m45,
const Scalar& m50, const Scalar& m51, const Scalar& m52, const Scalar& m53, const Scalar& m54, const Scalar& m55
)
{
Base::_check_template_params();
EIGEN_STATIC_ASSERT_MATRIX_SPECIFIC_SIZE(Matrix, 6, 6)
(*this)
<< m00, m01, m02, m03, m04, m05
, m10, m11, m12, m13, m14, m15
, m20, m21, m22, m23, m24, m25
, m30, m31, m32, m33, m34, m35
, m40, m41, m42, m43, m44, m45
, m50, m51, m52, m53, m54, m55
;
}
void set(
const Scalar& m00, const Scalar& m01, const Scalar& m02, const Scalar& m03, const Scalar& m04, const Scalar& m05,
const Scalar& m10, const Scalar& m11, const Scalar& m12, const Scalar& m13, const Scalar& m14, const Scalar& m15,
const Scalar& m20, const Scalar& m21, const Scalar& m22, const Scalar& m23, const Scalar& m24, const Scalar& m25,
const Scalar& m30, const Scalar& m31, const Scalar& m32, const Scalar& m33, const Scalar& m34, const Scalar& m35,
const Scalar& m40, const Scalar& m41, const Scalar& m42, const Scalar& m43, const Scalar& m44, const Scalar& m45,
const Scalar& m50, const Scalar& m51, const Scalar& m52, const Scalar& m53, const Scalar& m54, const Scalar& m55
)
{
Base::_check_template_params();
EIGEN_STATIC_ASSERT_MATRIX_SPECIFIC_SIZE(Matrix, 6, 6)
(*this)
<< m00, m01, m02, m03, m04, m05
, m10, m11, m12, m13, m14, m15
, m20, m21, m22, m23, m24, m25
, m30, m31, m32, m33, m34, m35
, m40, m41, m42, m43, m44, m45
, m50, m51, m52, m53, m54, m55
;
}
};
#endif /* _RBDL_EIGENMATH_H */
| {
"content_hash": "52e47c3cee64beccb9aabd8823195a58",
"timestamp": "",
"source": "github",
"line_count": 189,
"max_line_length": 117,
"avg_line_length": 28.70899470899471,
"alnum_prop": 0.6577589384445264,
"repo_name": "BADBDY23/nimbro-op-ros",
"id": "d5bd5b38132f78b38527090a8cb607df6f347077",
"size": "5616",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/nimbro_robotcontrol/contrib/rbdl/source/include/rbdl/rbdl_eigenmath.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "9720"
},
{
"name": "C",
"bytes": "2435555"
},
{
"name": "C++",
"bytes": "2862787"
},
{
"name": "CSS",
"bytes": "1647"
},
{
"name": "Lua",
"bytes": "3969"
},
{
"name": "M",
"bytes": "3800"
},
{
"name": "Makefile",
"bytes": "3426"
},
{
"name": "Matlab",
"bytes": "13239"
},
{
"name": "Objective-C",
"bytes": "5187"
},
{
"name": "Python",
"bytes": "66379"
},
{
"name": "Shell",
"bytes": "11137"
},
{
"name": "TeX",
"bytes": "62048"
}
],
"symlink_target": ""
} |
package com.facebook.buck.artifact_cache;
import com.facebook.buck.artifact_cache.config.ArtifactCacheBuckConfig;
import com.facebook.buck.artifact_cache.thrift.BuckCacheFetchResponse;
import com.facebook.buck.artifact_cache.thrift.BuckCacheResponse;
import com.facebook.buck.artifact_cache.thrift.BuckCacheStoreResponse;
import com.facebook.buck.core.cell.CellPathResolver;
import com.facebook.buck.core.cell.TestCellPathResolver;
import com.facebook.buck.core.model.BuildId;
import com.facebook.buck.core.model.BuildTargetFactory;
import com.facebook.buck.core.parser.buildtargetparser.ParsingUnconfiguredBuildTargetFactory;
import com.facebook.buck.core.parser.buildtargetparser.UnconfiguredBuildTargetFactory;
import com.facebook.buck.core.rulekey.RuleKey;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.event.BuckEventBusForTests;
import com.facebook.buck.io.file.BorrowablePath;
import com.facebook.buck.io.file.LazyPath;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.io.filesystem.impl.FakeProjectFilesystem;
import com.facebook.buck.slb.ThriftUtil;
import com.facebook.buck.support.bgtasks.BackgroundTaskManager;
import com.facebook.buck.support.bgtasks.TaskManagerScope;
import com.facebook.buck.support.bgtasks.TestBackgroundTaskManager;
import com.facebook.buck.testutil.TemporaryPaths;
import com.facebook.buck.testutil.integration.HttpdForTests;
import com.google.common.base.Charsets;
import com.google.common.util.concurrent.MoreExecutors;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.KeyPair;
import java.security.PrivateKey;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import javax.net.ssl.X509KeyManager;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import okhttp3.tls.HandshakeCertificates;
import okhttp3.tls.HeldCertificate;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.server.handler.AbstractHandler;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
public class ArtifactCachesIntegrationTest {
/**
* Certs generated with: openssl genrsa -out ca.key 2048 openssl req -new -x509 -days 3650 -key
* ca.key -out ca.crt openssl genrsa -out client.key 2048 openssl req -new -key client.key -out
* client.csr openssl x509 -req -days 3650 -in client.csr -CA ca.crt -CAkey ca.key -set_serial 01
* -out client.crt openssl pkcs8 -inform PEM -outform PEM -in client.key -out client.key.pkcs8
* -nocrypt -topk8 openssl genrsa -out server.key 2048 openssl req -new -key server.key -out
* server.csr openssl x509 -req -days 3650 -in server.csr -CA ca.crt -CAkey ca.key -set_serial 02
* -out server.crt openssl pkcs8 -inform PEM -outform PEM -in server.key -out server.key.pkcs8
* -nocrypt -topk8
*/
private static final String SAMPLE_CLIENT_CERT =
"-----BEGIN CERTIFICATE-----\n"
+ "MIIDEjCCAfoCAQEwDQYJKoZIhvcNAQEFBQAwTTELMAkGA1UEBhMCVVMxEzARBgNV\n"
+ "BAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxFzAVBgNVBAMMDmNhLmV4\n"
+ "YW1wbGUuY29tMB4XDTE4MTAwMzE4Mjk1N1oXDTI4MDkzMDE4Mjk1N1owUTELMAkG\n"
+ "A1UEBhMCVVMxEzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUx\n"
+ "GzAZBgNVBAMMEmNsaWVudC5leGFtcGxlLmNvbTCCASIwDQYJKoZIhvcNAQEBBQAD\n"
+ "ggEPADCCAQoCggEBANyyXUD+FSzICbv2JF7Z0Xnx9HVO1sZFjJlmTDknt/nyRw1y\n"
+ "sOZMO1LE7Wit24k6amqAZhjceehmPZIdQbtLxpBzwmtYum6qymLaC34Xx2LYEG4P\n"
+ "RJHY9AtMrb8hd4X4ZQD+bhAH59u+kTnVO+0vlOnyT3xPKVQQ+FEoErfmBDpKSiaj\n"
+ "v/ireTC/VrAcB24qhONKJuWK8xxu7vuJ6uNFU83IXrqgjS0iXkDWTXAI/dxZLexZ\n"
+ "Gm6nu66VCZlUbAP2Q1L0SPf2ZGKORo8VBl6MI+cT09k31CFjU4xaW9vosQQ9dInb\n"
+ "/5FK9K65OrbRIO3QdZclMs1Y5uDpLiFSvDfrZmMCAwEAATANBgkqhkiG9w0BAQUF\n"
+ "AAOCAQEAY56F8WLWwoCDCYF1YD4LcMRq8cuCMTTNk9v8gD7VSORuC+7bsyvTpMLx\n"
+ "qZbNNCJsv+L5GbRDnH4T98OEtrOn6pAgW8y7jKZNISPo6Tqohvn2Bi40OiCBNynr\n"
+ "0bki3HpLxDqgkOjbNCO35vHLs7ZtY1EijBnphlW57e4rXMAe63qkblWSXfxKo33+\n"
+ "0l4OiL/O0gPRrdeJEAU0k/GPgMdHd3QierkKg9LhZEIuU3bTMassPiWDwVGUXDov\n"
+ "ZEWyZ0qCcFV2nj23zPr/16FFvxuZEcGd9fBDLbJHbkBH1nlikFnchjvGCzH5gEcA\n"
+ "hpHC7IcvLgJPOTV0HbyaHmCxhBU8IQ==\n"
+ "-----END CERTIFICATE-----";
private static final String SAMPLE_CLIENT_KEY =
"-----BEGIN PRIVATE KEY-----\n"
+ "MIIEugIBADANBgkqhkiG9w0BAQEFAASCBKQwggSgAgEAAoIBAQDcsl1A/hUsyAm7\n"
+ "9iRe2dF58fR1TtbGRYyZZkw5J7f58kcNcrDmTDtSxO1orduJOmpqgGYY3HnoZj2S\n"
+ "HUG7S8aQc8JrWLpuqspi2gt+F8di2BBuD0SR2PQLTK2/IXeF+GUA/m4QB+fbvpE5\n"
+ "1TvtL5Tp8k98TylUEPhRKBK35gQ6Skomo7/4q3kwv1awHAduKoTjSiblivMcbu77\n"
+ "ierjRVPNyF66oI0tIl5A1k1wCP3cWS3sWRpup7uulQmZVGwD9kNS9Ej39mRijkaP\n"
+ "FQZejCPnE9PZN9QhY1OMWlvb6LEEPXSJ2/+RSvSuuTq20SDt0HWXJTLNWObg6S4h\n"
+ "Urw362ZjAgMBAAECggEAJjDjfFS7u1Uegh1VK+jLnCunnwk2l3b/nqgaNqXN633m\n"
+ "l8gqHqBAf9E+OCgl3nhyY922TUUR/4p5yygu8MdrJCI0GblwAaiifzq2VGqvAUbc\n"
+ "iP8xYX/Gs5HgWzviYBec+vAMgc+satVucjCZPzFFtrpM0Pkt8LNDFWA25QXz41YX\n"
+ "cpqUCR4tCGz5K3hI2XTeQehNrbCjzq01AT2+jY05JSDaU2lLc7b+tCcI+M6rWjF1\n"
+ "V2XufeVvYm/sG6eLasWIxpDKHFZCOvB1m6h1t+59d7e4y4rokjg2fspgTAjYNObb\n"
+ "YhDzxWhNUVQcbeo8OTwYZnZaQprOqY6uEo4C6E75qQKBgQD1NXB8roZoExYYkTeb\n"
+ "nK+1UD23opJTcbVGiqhP9qp7HP5o+e9Gk3TSZplZnIpaPqSib+tR3TU5lKEMefWS\n"
+ "p3Ou8K8Qgd9nlqK/gBuoT5ZmgaWr6HaUP5Pt4FOjrZ3g1M2kR7r9yqqg/moVvpiP\n"
+ "SBFzB+o6eCOjfjErTFXEXDeXvwKBgQDmaMTGvSY29QeVju8on9kgjHeZC1geYmav\n"
+ "23n6MrLwS0F5O/jVzZAll/hCljeZML/0aI733MZiDPd5qsn6jx6oyZ6GyWAtxoJw\n"
+ "JO+hRZq3dG/h74rp8aN/yfA2VPTfXCzJtsGBH9sF18eTnyfW2HXb+D7QT9tTlbQR\n"
+ "s8gL9qg6XQJ/XOjitltqkgSpWqWrbErySMEeoXX3+6YaCaCAJcxQzFUwEJajExrM\n"
+ "KOy3Lj0iLw+NUf8WKu6mPCsU2qVbZzYLnz2TF64d+CIbiHQCBsQhOLXnEDwEsidk\n"
+ "5b0Z8+rU51u6j4SeVYt1G4tKpvKQ27ly4yMcnQrodgpalw1VchF+/wKBgAa3CvUX\n"
+ "0iNL5Nqw/btbXUKblWi6cekAySla5iUqkRh7uP7Fhq0Efqz5ztxx8FDgoNeIrJIA\n"
+ "ty9oXVYIajaJMUWOCra267ypymdmTC2RD79E/3XAO3Yx+qfgxMVwmGpiD1QZpW4T\n"
+ "9Zgn/8MHomuah2TPyVTc3vGCrWrOqIfgumppAoGAB4zQWSn4+le24DHwgYwCChkT\n"
+ "s7wao8DiTMBRGYmfeAgMx/U2U3m/gN9q/+7WT+X4AKseXoWKuDegQq4CHlgkDyh2\n"
+ "B7sBo4ZpLm3kOlUlznssMBUEG2i/iyZGPwHBKcUskemLL5M2wIHH1O/CYJ3jDMcK\n"
+ "9kGk/IHTW2kCBLs+mVA=\n"
+ "-----END PRIVATE KEY-----";
private static final String SAMPLE_SERVER_CERT =
"-----BEGIN CERTIFICATE-----\n"
+ "MIIDEjCCAfoCAQIwDQYJKoZIhvcNAQEFBQAwTTELMAkGA1UEBhMCVVMxEzARBgNV\n"
+ "BAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxFzAVBgNVBAMMDmNhLmV4\n"
+ "YW1wbGUuY29tMB4XDTE4MTAwMzE4MzAwOVoXDTI4MDkzMDE4MzAwOVowUTELMAkG\n"
+ "A1UEBhMCVVMxEzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUx\n"
+ "GzAZBgNVBAMMEnNlcnZlci5leGFtcGxlLmNvbTCCASIwDQYJKoZIhvcNAQEBBQAD\n"
+ "ggEPADCCAQoCggEBAMRkUtz2qYmvJ2xWbVNHbGOoEinDqxXY749kEVf9swGDCtbd\n"
+ "qNmuB3P81CoEWm3O1ZnIBfgD6hNzC9YC0KJ16x7B7YbR37rw/so0AVIdiDU2Ftjz\n"
+ "N76Ih+GXKV1ZXE8Noq5W9BfccMxhwEr5+eI1v11V91x9LgMLzbUzbnr0SS7+/VOb\n"
+ "0C2tG2QSvyY33NMAAGyFRc9EPIB22blaIaylTqsp11akDd8im1+x/lpvJAt7qgxX\n"
+ "8bgtlI+z7d7bmvdO6bUlFWYXxgl2SFNdbFHyz7TvbbQrxEfTMZiFYgsmrp+0mGG7\n"
+ "2putPVyvE2x9sMUCXsnsZT6gHGKaNGURFWiJwtECAwEAATANBgkqhkiG9w0BAQUF\n"
+ "AAOCAQEAOlzAIo2c36+VUtZNbrOs767daO0WY4a+5tV+z9wU5dNa09MO74yN0cYl\n"
+ "O+4Kf9646GvVFfP0d3YLSivWJ8BC2j6m/plugnyjorO5eGdTeWaZk8foRpnK/yys\n"
+ "lCU7OT8NxmUp+ch+Oer6RyOG18HP7eRV5ejC+PoaCFlAq+rrdA9dZm4sQCRgWVd9\n"
+ "xWXJVSTmF2X7U6bT4r52P7ETLpqiG7ZHkZvZo8KbPi8U5V0CAqsV9J9QDOfJvstl\n"
+ "oN2PC/nv6B6b7ZNGj3wMWhoYBDT7gWgKeM0PlXERyjgMX3Ckn4j67u4trV1/TLUz\n"
+ "MUcHV2A4aEJMpR+W74/BRmKAPMwUCw==\n"
+ "-----END CERTIFICATE-----";
private static final String SAMPLE_SERVER_KEY =
"-----BEGIN PRIVATE KEY-----\n"
+ "MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDEZFLc9qmJryds\n"
+ "Vm1TR2xjqBIpw6sV2O+PZBFX/bMBgwrW3ajZrgdz/NQqBFptztWZyAX4A+oTcwvW\n"
+ "AtCidesewe2G0d+68P7KNAFSHYg1NhbY8ze+iIfhlyldWVxPDaKuVvQX3HDMYcBK\n"
+ "+fniNb9dVfdcfS4DC821M2569Eku/v1Tm9AtrRtkEr8mN9zTAABshUXPRDyAdtm5\n"
+ "WiGspU6rKddWpA3fIptfsf5abyQLe6oMV/G4LZSPs+3e25r3Tum1JRVmF8YJdkhT\n"
+ "XWxR8s+07220K8RH0zGYhWILJq6ftJhhu9qbrT1crxNsfbDFAl7J7GU+oBximjRl\n"
+ "ERVoicLRAgMBAAECggEAK5PPRzR8Xg69gq6Sx53bkSlkDlxahBiE355mss2agMVh\n"
+ "DFhW9SZGhRgew8v/fMoeX2cg2+2SbQpkH/Kz9LiRmVuSpw2+xS5geuGbQWtII/aC\n"
+ "j1U4k1CcRhRSm2IOt4PhCypEM184sEEod/qL1gPzGHTQ1Hb6VLazyHdHFoVKD+Ek\n"
+ "aATfYPYM8NEyPebkJVxjWGHv+eZXwyrF2mGiOoBOLlUWl+VkIHkTB7qe+nKsrtyw\n"
+ "JhJKt2Z2+390EL4Rxg0uqp5thvd4scKAzB58m42zd0m09X89Lw6722PmYlMCf9qX\n"
+ "5dNLxVkTQyiwn55JHN0Zlo+pyrDEijmRG+wDXqJgIQKBgQD+sT8Hs7vFaM3l7gt0\n"
+ "fsZtTOOMU4/ksEtPlQNQaTeoiSmMr6xGak4Xvr6LD/SxB1dPvaT0QSTJEQkXUx4U\n"
+ "G8zuNtJut4+dO3XV/88l+MDYSsI5KbwH5bYWwPXnsCTNf46IBMArbpoCJeVYV+W3\n"
+ "SdHDcG6QhvSsGXmzvEIWOyeOBQKBgQDFZnNTunRZEIhKAffFK0SnJL/kTKkVERW6\n"
+ "SWMqMoTW2ZckpSMnyfFbyz4LX0rl6MLOzGuk4ttVjCT7yvjhBiDUc2yY2wSFc5DK\n"
+ "gyuJqoVkcAklxGvQ1Yc07eMIB64Ipjz0J2kDaxjsn/TLYGGZlq8RO67nuUeH5Jrv\n"
+ "C++BrutvXQKBgQD8XFHw5sVSSJNjlafiCU/Bs2Lwg0fbuFcXBrae8XKF20rBLLwN\n"
+ "lX3Fh2mzzt6MnpKD34xXvUietfOFGgV+tUEsdEO0EswJZoZOwcbWgBFM/15NV64J\n"
+ "QTJYf1/o7x64RADNg6+KGXAeWsBR9d4W690dwwS6zg4XjLKLRilRb9G0pQKBgAtC\n"
+ "Tq2l4uD5mmxuNE2grCfEZtWEsdgrw0t+yBMuEnmWq5JBgQHR+Nw9eWp4ovL+Fa5p\n"
+ "5nHfJpd4iNt7tjpPeSvk8Xq+c0GRV97VIHSXr0gNQ9hNncCpjS6tqtdYaMrBgJSE\n"
+ "cu7o+uD0Nqgq9SYnfBDFkLJS1QuhNF0SFzUUXwVZAoGAfhGMXxl1uDkFIoSOySEV\n"
+ "d7DPQJLFT9crsqBbpA7nDuNB/3BXUqCjz+MaVHpNQEykfke9rAuswNjRl75cTcnE\n"
+ "m2NkREF3dEi1CllNZxb0LRFVpAmJwBFFcevpqvQnDSokZ7/5tUsaUMrR+mbq5vaV\n"
+ "9lVGHblMKOFQ22R+E4yI7G8=\n"
+ "-----END PRIVATE KEY-----";
private static final String SAMPLE_CA_CERT =
"-----BEGIN CERTIFICATE-----\n"
+ "MIIDFjCCAf4CCQCFJYJEzO/NoTANBgkqhkiG9w0BAQsFADBNMQswCQYDVQQGEwJV\n"
+ "UzETMBEGA1UECAwKV2FzaGluZ3RvbjEQMA4GA1UEBwwHU2VhdHRsZTEXMBUGA1UE\n"
+ "AwwOY2EuZXhhbXBsZS5jb20wHhcNMTgxMDAzMTgyOTM3WhcNMjgwOTMwMTgyOTM3\n"
+ "WjBNMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3RvbjEQMA4GA1UEBwwH\n"
+ "U2VhdHRsZTEXMBUGA1UEAwwOY2EuZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3DQEB\n"
+ "AQUAA4IBDwAwggEKAoIBAQDO+b9sMlJPpLbS0YHRz5jNUikshqelUriLLGyPKIcH\n"
+ "4Gd2HXp1kIpkN9U+h30EKAO2kluSkzU6q1EPOg54VIdZwOI+89F3fGvTuACGBBRW\n"
+ "KXDnLLfBj/XKE4JRAHxej8rrJzbYNv3iy1jcSRdMPdXw+RMBt4fbS7JuEeY1c1iM\n"
+ "hFbCZAqBgOWRQV+JqZJ5hv0x2Y8hmTQN8O8PRl5VwpKT+aNrpunyQVYdyzfMtHrm\n"
+ "c4W3MU2W9Oa0mTLzVAa9rVrpOxpSmEfXcMY8RIIN1mOK0yuJVPW8K0TcKy/HfO14\n"
+ "4OBsbzrlvtVNLSMeiCIvsROXJCBd90ZtE3DHwZYRvltTAgMBAAEwDQYJKoZIhvcN\n"
+ "AQELBQADggEBADEhwOYsugkZ0IhTD9CGPnCqOY97iLaVXy7/P7xnOtzlXJiK/AMj\n"
+ "tapwh9mG1vgpOv9orCyTm6GZqBSObhoymyFGoxWgku3nQyiqJDQFAbB2+N46H5GC\n"
+ "r4Cu+MnPGljJtNClVn+Q9CRKuaOSiRYygGc84bUbQAeMuPnRswK2IInAahzfpWWI\n"
+ "xYFwXb3611NqPQAIwnFdgpmsm4Ko82xh5sWhchRy5BwIlGUXxrFAOUOMonvIUzSW\n"
+ "hitWCW5AMwHKOeTs0/4BmJE/6rmR84ozZ0z3X/5+LAYLeI+GbUZBr/kUB3euXaj4\n"
+ "rGN2EuvbdWav5As8evyWUnB5QGxTTeptYCg=\n"
+ "-----END CERTIFICATE-----";
private static final RuleKey ruleKey = new RuleKey("00000000000000000000000000000000");
private static final Path outputPath = Paths.get("output/file");
private static final BuildId BUILD_ID = new BuildId("test");
@Rule public TemporaryPaths tempDir = new TemporaryPaths();
private BackgroundTaskManager bgTaskManager;
private TaskManagerScope managerScope;
private Path clientCertPath;
private Path clientKeyPath;
private Path serverCertPath;
private Path serverKeyPath;
private Path caCertPath;
private ClientCertificateHandler clientCertificateHandler;
@Before
public void setUp() throws IOException {
bgTaskManager = new TestBackgroundTaskManager();
managerScope = bgTaskManager.getNewScope(BUILD_ID);
clientCertPath = tempDir.newFile("client.crt");
clientKeyPath = tempDir.newFile("client.key");
serverCertPath = tempDir.newFile("server.crt");
serverKeyPath = tempDir.newFile("server.key");
caCertPath = tempDir.newFile("ca.crt");
Files.write(clientCertPath, SAMPLE_CLIENT_CERT.getBytes(Charsets.UTF_8));
Files.write(clientKeyPath, SAMPLE_CLIENT_KEY.getBytes(Charsets.UTF_8));
Files.write(serverCertPath, SAMPLE_SERVER_CERT.getBytes(Charsets.UTF_8));
Files.write(serverKeyPath, SAMPLE_SERVER_KEY.getBytes(Charsets.UTF_8));
Files.write(caCertPath, SAMPLE_CA_CERT.getBytes(Charsets.UTF_8));
clientCertificateHandler =
createClientCertificateHandler(clientKeyPath, clientCertPath, caCertPath);
}
@After
public void tearDown() throws InterruptedException {
managerScope.close();
bgTaskManager.shutdown(1, TimeUnit.SECONDS);
}
@Test
public void testUsesClientTlsCertsForHttpsFetch() throws Exception {
NotFoundHandler handler = new NotFoundHandler(false, false);
X509KeyManager keyManager = clientCertificateHandler.getHandshakeCertificates().keyManager();
X509Certificate clientCert =
keyManager.getCertificateChain(keyManager.getClientAliases("RSA", null)[0])[0];
ProjectFilesystem projectFilesystem = new FakeProjectFilesystem();
BuckEventBus buckEventBus = BuckEventBusForTests.newInstance();
try (HttpdForTests server = new HttpdForTests(caCertPath, serverCertPath, serverKeyPath)) {
server.addHandler(handler);
server.start();
ArtifactCacheBuckConfig cacheConfig =
ArtifactCacheBuckConfigTest.createFromText(
"[cache]",
"mode = http",
"http_url = " + server.getRootUri(),
"http_client_tls_key = " + clientKeyPath.toString(),
"http_client_tls_cert = " + clientCertPath.toString());
CacheResult result;
try (ArtifactCache artifactCache =
newArtifactCache(buckEventBus, projectFilesystem, cacheConfig)
.remoteOnlyInstance(false, false)) {
result =
artifactCache
.fetchAsync(
BuildTargetFactory.newInstance("//:foo"),
ruleKey,
LazyPath.ofInstance(outputPath))
.get();
}
Assert.assertEquals(result.cacheError().orElse(""), CacheResultType.MISS, result.getType());
Assert.assertEquals(1, handler.peerCertificates.size());
Assert.assertEquals(1, handler.peerCertificates.get(0).length);
Assert.assertEquals(clientCert, handler.peerCertificates.get(0)[0]);
}
}
@Test
public void testUsesClientTlsCertsForThriftFetch() throws Exception {
NotFoundHandler handler = new NotFoundHandler(true, false);
X509KeyManager keyManager = clientCertificateHandler.getHandshakeCertificates().keyManager();
X509Certificate clientCert =
keyManager.getCertificateChain(keyManager.getClientAliases("RSA", null)[0])[0];
ProjectFilesystem projectFilesystem = new FakeProjectFilesystem();
BuckEventBus buckEventBus = BuckEventBusForTests.newInstance();
try (HttpdForTests server = new HttpdForTests(caCertPath, serverCertPath, serverKeyPath)) {
server.addHandler(handler);
server.start();
ArtifactCacheBuckConfig cacheConfig =
ArtifactCacheBuckConfigTest.createFromText(
"[cache]",
"mode = thrift_over_http",
"http_url = " + server.getRootUri(),
"hybrid_thrift_endpoint = /hybrid_thrift",
"http_client_tls_key = " + clientKeyPath.toString(),
"http_client_tls_cert = " + clientCertPath.toString());
CacheResult result;
try (ArtifactCache artifactCache =
newArtifactCache(buckEventBus, projectFilesystem, cacheConfig)
.remoteOnlyInstance(false, false)) {
result =
artifactCache
.fetchAsync(
BuildTargetFactory.newInstance("//:foo"),
ruleKey,
LazyPath.ofInstance(outputPath))
.get();
}
Assert.assertEquals(CacheResultType.MISS, result.getType());
Assert.assertEquals(1, handler.peerCertificates.size());
Assert.assertEquals(1, handler.peerCertificates.get(0).length);
Assert.assertEquals(clientCert, handler.peerCertificates.get(0)[0]);
}
}
@Test
public void testUsesClientTlsCertsForHttpsStore() throws Exception {
NotFoundHandler handler = new NotFoundHandler(false, true);
X509KeyManager keyManager = clientCertificateHandler.getHandshakeCertificates().keyManager();
X509Certificate clientCert =
keyManager.getCertificateChain(keyManager.getClientAliases("RSA", null)[0])[0];
String data = "data";
BuckEventBus buckEventBus = BuckEventBusForTests.newInstance();
FakeProjectFilesystem projectFilesystem = new FakeProjectFilesystem();
projectFilesystem.writeContentsToPath(data, outputPath);
try (HttpdForTests server = new HttpdForTests(caCertPath, serverCertPath, serverKeyPath)) {
server.addHandler(handler);
server.start();
ArtifactCacheBuckConfig cacheConfig =
ArtifactCacheBuckConfigTest.createFromText(
"[cache]",
"mode = http",
"http_url = " + server.getRootUri(),
"http_client_tls_key = " + clientKeyPath.toString(),
"http_client_tls_cert = " + clientCertPath.toString());
try (ArtifactCache artifactCache =
newArtifactCache(buckEventBus, projectFilesystem, cacheConfig)
.remoteOnlyInstance(false, false)) {
artifactCache
.store(
ArtifactInfo.builder().addRuleKeys(ruleKey).build(),
BorrowablePath.borrowablePath(outputPath))
.get();
}
Assert.assertEquals(1, handler.peerCertificates.size());
Assert.assertEquals(1, handler.peerCertificates.get(0).length);
Assert.assertEquals(clientCert, handler.peerCertificates.get(0)[0]);
}
}
@Test
public void testUsesClientTlsCertsForThriftStore() throws Exception {
NotFoundHandler handler = new NotFoundHandler(true, true);
X509KeyManager keyManager = clientCertificateHandler.getHandshakeCertificates().keyManager();
X509Certificate clientCert =
keyManager.getCertificateChain(keyManager.getClientAliases("RSA", null)[0])[0];
String data = "data";
BuckEventBus buckEventBus = BuckEventBusForTests.newInstance();
FakeProjectFilesystem projectFilesystem = new FakeProjectFilesystem();
projectFilesystem.writeContentsToPath(data, outputPath);
try (HttpdForTests server = new HttpdForTests(caCertPath, serverCertPath, serverKeyPath)) {
server.addHandler(handler);
server.start();
ArtifactCacheBuckConfig cacheConfig =
ArtifactCacheBuckConfigTest.createFromText(
"[cache]",
"mode = thrift_over_http",
"http_url = " + server.getRootUri(),
"hybrid_thrift_endpoint = /hybrid_thrift",
"http_client_tls_key = " + clientKeyPath.toString(),
"http_client_tls_cert = " + clientCertPath.toString());
try (ArtifactCache artifactCache =
newArtifactCache(buckEventBus, projectFilesystem, cacheConfig)
.remoteOnlyInstance(false, false)) {
artifactCache
.store(
ArtifactInfo.builder().addRuleKeys(ruleKey).build(),
BorrowablePath.borrowablePath(outputPath))
.get();
}
Assert.assertEquals(1, handler.peerCertificates.size());
Assert.assertEquals(1, handler.peerCertificates.get(0).length);
Assert.assertEquals(clientCert, handler.peerCertificates.get(0)[0]);
}
}
private ArtifactCaches newArtifactCache(
BuckEventBus buckEventBus,
ProjectFilesystem projectFilesystem,
ArtifactCacheBuckConfig cacheConfig) {
CellPathResolver cellPathResolver = TestCellPathResolver.get(projectFilesystem);
UnconfiguredBuildTargetFactory unconfiguredBuildTargetFactory =
new ParsingUnconfiguredBuildTargetFactory();
return new ArtifactCaches(
cacheConfig,
buckEventBus,
target -> unconfiguredBuildTargetFactory.create(cellPathResolver, target),
projectFilesystem,
Optional.empty(),
MoreExecutors.newDirectExecutorService(),
MoreExecutors.newDirectExecutorService(),
MoreExecutors.newDirectExecutorService(),
MoreExecutors.newDirectExecutorService(),
managerScope,
"test://",
"myhostname",
Optional.of(clientCertificateHandler));
}
/**
* Create a ClientCertificateHandler that accepts all hostnames (so that we don't have to setup
* hostnames for tests), and that accepts certs signed by the CA
*/
private ClientCertificateHandler createClientCertificateHandler(
Path clientKeyPath, Path clientCertPath, Path caCertPath) throws IOException {
X509Certificate certificate =
ClientCertificateHandler.parseCertificate(Optional.of(clientCertPath), true).get();
X509Certificate caCertificate =
ClientCertificateHandler.parseCertificate(Optional.of(caCertPath), true).get();
PrivateKey privateKey =
ClientCertificateHandler.parsePrivateKey(Optional.of(clientKeyPath), certificate, true)
.get();
HeldCertificate cert =
new HeldCertificate(new KeyPair(certificate.getPublicKey(), privateKey), certificate);
HandshakeCertificates.Builder hsBuilder = new HandshakeCertificates.Builder();
hsBuilder.addPlatformTrustedCertificates();
hsBuilder.addTrustedCertificate(caCertificate);
hsBuilder.heldCertificate(cert);
return new ClientCertificateHandler(hsBuilder.build(), Optional.of((s, sslSession) -> true));
}
class NotFoundHandler extends AbstractHandler {
private final boolean isThrift;
private final boolean isStore;
List<X509Certificate[]> peerCertificates = new ArrayList<>();
public NotFoundHandler(boolean isThrift, boolean isStore) {
this.isThrift = isThrift;
this.isStore = isStore;
}
@Override
public void handle(
String s,
Request request,
HttpServletRequest httpServletRequest,
HttpServletResponse httpServletResponse)
throws IOException {
X509Certificate[] certs =
(X509Certificate[])
httpServletRequest.getAttribute("javax.servlet.request.X509Certificate");
peerCertificates.add(certs);
if (isThrift) {
httpServletResponse.setStatus(HttpServletResponse.SC_OK);
BuckCacheResponse response = new BuckCacheResponse();
response.setWasSuccessful(true);
if (isStore) {
BuckCacheStoreResponse storeResponse = new BuckCacheStoreResponse();
response.setStoreResponse(storeResponse);
} else {
BuckCacheFetchResponse fetchResponse = new BuckCacheFetchResponse();
fetchResponse.setArtifactExists(false);
response.setFetchResponse(fetchResponse);
}
byte[] serialized = ThriftUtil.serialize(ThriftArtifactCache.PROTOCOL, response);
httpServletResponse.setContentType("application/octet-stream");
httpServletResponse
.getOutputStream()
.write(ByteBuffer.allocate(4).putInt(serialized.length).array());
httpServletResponse.getOutputStream().write(serialized);
httpServletResponse.getOutputStream().close();
} else {
if (isStore) {
httpServletResponse.setStatus(HttpServletResponse.SC_OK);
} else {
httpServletResponse.setStatus(HttpServletResponse.SC_NOT_FOUND);
}
}
request.setHandled(true);
}
}
}
| {
"content_hash": "07edf19043e6c4d9a9011285aec3b078",
"timestamp": "",
"source": "github",
"line_count": 506,
"max_line_length": 99,
"avg_line_length": 48.430830039525695,
"alnum_prop": 0.7305149759242634,
"repo_name": "rmaz/buck",
"id": "beee9ed221810adc4bd79b712fe6dbf8efe9e735",
"size": "25111",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/com/facebook/buck/artifact_cache/ArtifactCachesIntegrationTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "1585"
},
{
"name": "Batchfile",
"bytes": "3875"
},
{
"name": "C",
"bytes": "281295"
},
{
"name": "C#",
"bytes": "237"
},
{
"name": "C++",
"bytes": "18966"
},
{
"name": "CSS",
"bytes": "56106"
},
{
"name": "D",
"bytes": "1017"
},
{
"name": "Dockerfile",
"bytes": "2081"
},
{
"name": "Go",
"bytes": "10020"
},
{
"name": "Groovy",
"bytes": "3362"
},
{
"name": "HTML",
"bytes": "11252"
},
{
"name": "Haskell",
"bytes": "1008"
},
{
"name": "IDL",
"bytes": "480"
},
{
"name": "Java",
"bytes": "29307150"
},
{
"name": "JavaScript",
"bytes": "938678"
},
{
"name": "Kotlin",
"bytes": "25755"
},
{
"name": "Lex",
"bytes": "12772"
},
{
"name": "MATLAB",
"bytes": "47"
},
{
"name": "Makefile",
"bytes": "1916"
},
{
"name": "OCaml",
"bytes": "4935"
},
{
"name": "Objective-C",
"bytes": "176972"
},
{
"name": "Objective-C++",
"bytes": "34"
},
{
"name": "PowerShell",
"bytes": "2244"
},
{
"name": "Prolog",
"bytes": "2087"
},
{
"name": "Python",
"bytes": "2075938"
},
{
"name": "Roff",
"bytes": "1207"
},
{
"name": "Rust",
"bytes": "5716"
},
{
"name": "Scala",
"bytes": "5082"
},
{
"name": "Shell",
"bytes": "77999"
},
{
"name": "Smalltalk",
"bytes": "194"
},
{
"name": "Swift",
"bytes": "11393"
},
{
"name": "Thrift",
"bytes": "48632"
},
{
"name": "Yacc",
"bytes": "323"
}
],
"symlink_target": ""
} |
package org.batfish.bddreachability;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Predicates.alwaysTrue;
import static org.batfish.bddreachability.BidirectionalReachabilityReturnPassInstrumentation.instrumentReturnPassEdges;
import static org.batfish.bddreachability.SessionInstrumentation.sessionInstrumentation;
import static org.batfish.bddreachability.transition.Transitions.IDENTITY;
import static org.batfish.bddreachability.transition.Transitions.ZERO;
import static org.batfish.bddreachability.transition.Transitions.addLastHopConstraint;
import static org.batfish.bddreachability.transition.Transitions.addNoLastHopConstraint;
import static org.batfish.bddreachability.transition.Transitions.addOriginatingFromDeviceConstraint;
import static org.batfish.bddreachability.transition.Transitions.addOutgoingOriginalFlowFiltersConstraint;
import static org.batfish.bddreachability.transition.Transitions.addSourceInterfaceConstraint;
import static org.batfish.bddreachability.transition.Transitions.branch;
import static org.batfish.bddreachability.transition.Transitions.compose;
import static org.batfish.bddreachability.transition.Transitions.constraint;
import static org.batfish.bddreachability.transition.Transitions.eraseAndSet;
import static org.batfish.bddreachability.transition.Transitions.removeNodeSpecificConstraints;
import static org.batfish.common.util.CollectionUtil.toImmutableMap;
import static org.batfish.datamodel.FlowDisposition.LOOP;
import static org.batfish.datamodel.acl.AclLineMatchExprs.matchDst;
import static org.batfish.datamodel.transformation.TransformationUtil.visitTransformationSteps;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Suppliers;
import com.google.common.collect.BoundType;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.common.collect.Streams;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.annotation.ParametersAreNonnullByDefault;
import net.sf.javabdd.BDD;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.batfish.bddreachability.IpsRoutedOutInterfacesFactory.IpsRoutedOutInterfaces;
import org.batfish.bddreachability.transition.TransformationToTransition;
import org.batfish.bddreachability.transition.Transition;
import org.batfish.common.BatfishException;
import org.batfish.common.bdd.BDDInteger;
import org.batfish.common.bdd.BDDPacket;
import org.batfish.common.bdd.BDDSourceManager;
import org.batfish.common.bdd.IpAccessListToBdd;
import org.batfish.common.bdd.IpAccessListToBddImpl;
import org.batfish.common.bdd.IpSpaceToBDD;
import org.batfish.datamodel.Configuration;
import org.batfish.datamodel.FlowDisposition;
import org.batfish.datamodel.ForwardingAnalysis;
import org.batfish.datamodel.Interface;
import org.batfish.datamodel.InterfaceForwardingBehavior;
import org.batfish.datamodel.IpAccessList;
import org.batfish.datamodel.IpSpace;
import org.batfish.datamodel.UniverseIpSpace;
import org.batfish.datamodel.VrfForwardingBehavior;
import org.batfish.datamodel.acl.AclLineMatchExpr;
import org.batfish.datamodel.packet_policy.ActionVisitor;
import org.batfish.datamodel.packet_policy.ApplyFilter;
import org.batfish.datamodel.packet_policy.ApplyTransformation;
import org.batfish.datamodel.packet_policy.Drop;
import org.batfish.datamodel.packet_policy.FibLookup;
import org.batfish.datamodel.packet_policy.FibLookupOverrideLookupIp;
import org.batfish.datamodel.packet_policy.If;
import org.batfish.datamodel.packet_policy.Return;
import org.batfish.datamodel.packet_policy.StatementVisitor;
import org.batfish.datamodel.packet_policy.VrfExprNameExtractor;
import org.batfish.datamodel.transformation.ApplyAll;
import org.batfish.datamodel.transformation.ApplyAny;
import org.batfish.datamodel.transformation.AssignIpAddressFromPool;
import org.batfish.datamodel.transformation.AssignPortFromPool;
import org.batfish.datamodel.transformation.IpField;
import org.batfish.datamodel.transformation.Noop;
import org.batfish.datamodel.transformation.PortField;
import org.batfish.datamodel.transformation.ShiftIpAddressIntoSubnet;
import org.batfish.datamodel.transformation.TransformationStepVisitor;
import org.batfish.specifier.IpSpaceAssignment;
import org.batfish.specifier.LocationVisitor;
import org.batfish.symbolic.IngressLocation;
import org.batfish.symbolic.state.Accept;
import org.batfish.symbolic.state.DeliveredToSubnet;
import org.batfish.symbolic.state.DropAclIn;
import org.batfish.symbolic.state.DropAclOut;
import org.batfish.symbolic.state.DropNoRoute;
import org.batfish.symbolic.state.DropNullRoute;
import org.batfish.symbolic.state.ExitsNetwork;
import org.batfish.symbolic.state.InsufficientInfo;
import org.batfish.symbolic.state.InterfaceAccept;
import org.batfish.symbolic.state.NeighborUnreachable;
import org.batfish.symbolic.state.NodeAccept;
import org.batfish.symbolic.state.NodeDropAclIn;
import org.batfish.symbolic.state.NodeDropAclOut;
import org.batfish.symbolic.state.NodeDropNoRoute;
import org.batfish.symbolic.state.NodeDropNullRoute;
import org.batfish.symbolic.state.NodeInterfaceDeliveredToSubnet;
import org.batfish.symbolic.state.NodeInterfaceExitsNetwork;
import org.batfish.symbolic.state.NodeInterfaceInsufficientInfo;
import org.batfish.symbolic.state.NodeInterfaceNeighborUnreachable;
import org.batfish.symbolic.state.OriginateInterface;
import org.batfish.symbolic.state.OriginateInterfaceLink;
import org.batfish.symbolic.state.OriginateVrf;
import org.batfish.symbolic.state.PacketPolicyAction;
import org.batfish.symbolic.state.PacketPolicyStatement;
import org.batfish.symbolic.state.PostInInterface;
import org.batfish.symbolic.state.PostInVrf;
import org.batfish.symbolic.state.PreInInterface;
import org.batfish.symbolic.state.PreOutEdge;
import org.batfish.symbolic.state.PreOutEdgePostNat;
import org.batfish.symbolic.state.PreOutInterfaceDeliveredToSubnet;
import org.batfish.symbolic.state.PreOutInterfaceExitsNetwork;
import org.batfish.symbolic.state.PreOutInterfaceInsufficientInfo;
import org.batfish.symbolic.state.PreOutInterfaceNeighborUnreachable;
import org.batfish.symbolic.state.PreOutVrf;
import org.batfish.symbolic.state.Query;
import org.batfish.symbolic.state.SetupSessionDeliveredToSubnet;
import org.batfish.symbolic.state.SetupSessionExitsNetwork;
import org.batfish.symbolic.state.StateExpr;
import org.batfish.symbolic.state.VrfAccept;
/**
* Constructs a the reachability graph for {@link BDDReachabilityAnalysis}. The public API is very
* simple: it provides two methods for constructing {@link BDDReachabilityAnalysis}, depending on
* whether or not you have a destination Ip constraint.
*
* <p>The core of the implementation is the {@code generateEdges()} method and its many helpers,
* which generate the {@link StateExpr nodes} and {@link Edge edges} of the reachability graph. Each
* node represents a step of the routing process within some network device or between devices. The
* edges represent the flow of traffic between these steps. Each edge is labeled with a {@link BDD}
* that represents the set of packets that can traverse that edge. If the edge represents a source
* NAT, the edge will be labeled with the NAT rules (match conditions and set of pool IPs).
*
* <p>To support {@link org.batfish.datamodel.acl.MatchSrcInterface} and {@link
* org.batfish.datamodel.acl.OriginatingFromDevice} {@link
* org.batfish.datamodel.acl.AclLineMatchExpr ACL expressions}, we maintain the invariant that
* whenever a packet is inside a node, it has a valid source (according to the BDDSourceManager of
* that node). For forward edges this is established by constraining to a single source. For
* backward edges it's established using {@link BDDSourceManager#isValidValue}. When we exit the
* node (e.g. forward into another node or a disposition state, or backward into another node or an
* origination state), we erase the constraint on source by existential quantification.
*/
@ParametersAreNonnullByDefault
public final class BDDReachabilityAnalysisFactory {
private static final Logger LOGGER = LogManager.getLogger(BDDReachabilityAnalysisFactory.class);
// node name --> acl name --> set of packets denied by the acl.
private final Map<String, Map<String, Supplier<BDD>>> _aclDenyBDDs;
// node name --> acl name --> set of packets permitted by the acl.
private final Map<String, Map<String, Supplier<BDD>>> _aclPermitBDDs;
/*
* node -> vrf -> edge -> set of packets that vrf will forward out that edge successfully,
* including that the neighbor will respond to ARP.
*/
private final Map<String, Map<String, Map<org.batfish.datamodel.Edge, BDD>>> _arpTrueEdgeBDDs;
/*
* Symbolic variables corresponding to the different packet header fields. We use these to
* generate new BDD constraints on those fields. Each constraint can be understood as the set
* of packet headers for which the constraint is satisfied.
*/
private final BDDPacket _bddPacket;
@VisibleForTesting final @Nonnull BDDFibGenerator _bddFibGenerator;
private final Map<String, BDDSourceManager> _bddSourceManagers;
private final Map<String, IpAccessListToBdd> _aclToBdds = new HashMap<>();
private final Map<String, BDDOutgoingOriginalFlowFilterManager>
_bddOutgoingOriginalFlowFilterManagers;
// Needed when initializing sessions.
private final @Nullable LastHopOutgoingInterfaceManager _lastHopMgr;
// node --> iface --> bdd nats
private final Map<String, Map<String, Transition>> _bddIncomingTransformations;
private final Map<String, Map<String, Transition>> _bddOutgoingTransformations;
private final Map<String, Configuration> _configs;
// only use this for IpSpaces that have no references
private final IpSpaceToBDD _dstIpSpaceToBDD;
private final IpSpaceToBDD _srcIpSpaceToBDD;
private final boolean _ignoreFilters;
/*
* node --> vrf --> interface --> set of packets that get routed out the interface but do not
* reach the neighbor, or exits network, or delivered to subnet
* This includes neighbor unreachable, exits network, and delivered to subnet
*/
private final Map<String, Map<String, Map<String, BDD>>> _neighborUnreachableBDDs;
private final Map<String, Map<String, Map<String, BDD>>> _deliveredToSubnetBDDs;
private final Map<String, Map<String, Map<String, BDD>>> _exitsNetworkBDDs;
private final Map<String, Map<String, Map<String, BDD>>> _insufficientInfoBDDs;
private final BDD _one;
private final BDD _requiredTransitNodeBDD;
// node --> vrf --> set of packets null-routed by the vrf
private final @Nonnull Map<String, Map<String, BDD>> _nullRoutedBDDs;
// node --> vrf --> set of packets routable by the vrf
private final Map<String, Map<String, BDD>> _routableBDDs;
// conjunction of the BDD vars encoding source and dest IPs/Ports. Used for existential
// quantification in source and destination NAT.
private final BDD _dstIpVars;
private final BDD _sourceIpVars;
private final BDD _dstPortVars;
private final BDD _sourcePortVars;
private final Set<org.batfish.datamodel.Edge> _topologyEdges;
// ranges of IPs in all transformations in the network, per IP address field.
private final Map<IpField, BDD> _transformationIpRanges;
// ranges of ports in all transformations in the network, per port field.
private final Map<PortField, BDD> _transformationPortRanges;
/** node --> vrf --> interface --> set of packets accepted by the interface */
private final Map<String, Map<String, Map<String, BDD>>> _ifaceAcceptBDDs;
/** node --> vrf --> set of packets accepted by the vrf */
private final Map<String, Map<String, BDD>> _vrfAcceptBDDs;
// node --> vrf --> nextVrf --> set of packets vrf delegates to nextVrf
private final Map<String, Map<String, Map<String, BDD>>> _nextVrfBDDs;
// node --> interface --> vrf
private final Map<String, Map<String, String>> _interfacesToVrfsMap;
private final BDD _zero;
private final IpsRoutedOutInterfacesFactory _ipsRoutesOutInterfacesFactory;
public BDDReachabilityAnalysisFactory(
BDDPacket packet,
Map<String, Configuration> configs,
ForwardingAnalysis forwardingAnalysis,
IpsRoutedOutInterfacesFactory ipsRoutedOutInterfacesFactory,
boolean ignoreFilters,
boolean initializeSessions) {
_bddPacket = packet;
_one = packet.getFactory().one();
_zero = packet.getFactory().zero();
_ignoreFilters = ignoreFilters;
_ipsRoutesOutInterfacesFactory = ipsRoutedOutInterfacesFactory;
Map<String, Map<String, VrfForwardingBehavior>> vrfForwardingBehavior =
forwardingAnalysis.getVrfForwardingBehavior();
_topologyEdges =
vrfForwardingBehavior.values().stream()
.flatMap(m -> m.values().stream())
.flatMap(vfb -> vfb.getArpTrueEdge().keySet().stream())
.collect(ImmutableSet.toImmutableSet());
_lastHopMgr =
initializeSessions
? new LastHopOutgoingInterfaceManager(packet, configs, _topologyEdges)
: null;
_requiredTransitNodeBDD = _bddPacket.allocateBDDBit("requiredTransitNodes");
_bddSourceManagers = BDDSourceManager.forNetwork(_bddPacket, configs, initializeSessions);
_configs = configs;
_dstIpSpaceToBDD = _bddPacket.getDstIpSpaceToBDD();
_srcIpSpaceToBDD = _bddPacket.getSrcIpSpaceToBDD();
_aclPermitBDDs = computeAclBDDs(this::ipAccessListToBddForNode, configs);
_aclDenyBDDs = computeAclDenyBDDs(_aclPermitBDDs);
if (_ignoreFilters) {
// If ignoring filters, make all BDDOutgoingOriginalFlowFilterManagers trivial; they should
// never enforce any constraints.
BDDOutgoingOriginalFlowFilterManager empty =
BDDOutgoingOriginalFlowFilterManager.empty(_bddPacket);
_bddOutgoingOriginalFlowFilterManagers =
toImmutableMap(configs.keySet(), Function.identity(), k -> empty);
} else {
_bddOutgoingOriginalFlowFilterManagers =
BDDOutgoingOriginalFlowFilterManager.forNetwork(
_bddPacket,
configs,
(hostname, aclName) -> _aclPermitBDDs.get(hostname).get(aclName).get());
}
_bddIncomingTransformations = computeBDDIncomingTransformations();
_bddOutgoingTransformations = computeBDDOutgoingTransformations();
_arpTrueEdgeBDDs = computeArpTrueEdgeBDDs(vrfForwardingBehavior, _dstIpSpaceToBDD);
_neighborUnreachableBDDs =
computeIfaceForwardingBehaviorBDDs(
vrfForwardingBehavior,
InterfaceForwardingBehavior::getNeighborUnreachable,
_dstIpSpaceToBDD);
_deliveredToSubnetBDDs =
computeIfaceForwardingBehaviorBDDs(
vrfForwardingBehavior,
InterfaceForwardingBehavior::getDeliveredToSubnet,
_dstIpSpaceToBDD);
_exitsNetworkBDDs =
computeIfaceForwardingBehaviorBDDs(
vrfForwardingBehavior, InterfaceForwardingBehavior::getExitsNetwork, _dstIpSpaceToBDD);
_insufficientInfoBDDs =
computeIfaceForwardingBehaviorBDDs(
vrfForwardingBehavior,
InterfaceForwardingBehavior::getInsufficientInfo,
_dstIpSpaceToBDD);
_nullRoutedBDDs =
computeVrfForwardingBehaviorBDDs(
vrfForwardingBehavior, VrfForwardingBehavior::getNullRoutedIps, _dstIpSpaceToBDD);
_routableBDDs =
computeVrfForwardingBehaviorBDDs(
vrfForwardingBehavior, VrfForwardingBehavior::getRoutableIps, _dstIpSpaceToBDD);
_ifaceAcceptBDDs =
computeIfaceForwardingBehaviorBDDs(
vrfForwardingBehavior, InterfaceForwardingBehavior::getAcceptedIps, _dstIpSpaceToBDD);
_vrfAcceptBDDs = computeVrfAcceptBDDs(); // must do this after populating _ifaceAcceptBDDs
_nextVrfBDDs = computeNextVrfBDDs(vrfForwardingBehavior, _dstIpSpaceToBDD);
_interfacesToVrfsMap = computeInterfacesToVrfsMap(configs);
_dstIpVars = _bddPacket.getDstIp().getVars();
_sourceIpVars = _bddPacket.getSrcIp().getVars();
_dstPortVars = _bddPacket.getDstPort().getVars();
_sourcePortVars = _bddPacket.getSrcPort().getVars();
RangeComputer rangeComputer = computeTransformationRanges();
_transformationPortRanges = rangeComputer.getPortRanges();
_transformationIpRanges = rangeComputer.getIpRanges();
_bddFibGenerator =
new BDDFibGenerator(
_arpTrueEdgeBDDs,
_neighborUnreachableBDDs,
_deliveredToSubnetBDDs,
_exitsNetworkBDDs,
_insufficientInfoBDDs,
_ifaceAcceptBDDs,
_vrfAcceptBDDs,
_routableBDDs,
_nextVrfBDDs,
_nullRoutedBDDs,
this::flowsLeavingInterface);
}
/**
* Computes VRF accept BDDs based on interface accept BDDs. Each VRF's accept BDD is the union of
* its interfaces' accept BDDs.
*/
private Map<String, Map<String, BDD>> computeVrfAcceptBDDs() {
return toImmutableMap(
_ifaceAcceptBDDs,
Entry::getKey, // node name
nodeEntry ->
toImmutableMap(
nodeEntry.getValue(),
Entry::getKey, // vrf name
vrfEntry ->
_bddPacket
.getFactory()
.orAll(vrfEntry.getValue().values()))); // vrf's accept BDD
}
/**
* Lazily compute the ACL BDDs, since we may only need some of them (depending on ignoreFilters,
* forbidden transit nodes, etc). When ignoreFilters is enabled, we still need the ACLs used in
* NATs. This is simpler than trying to precompute which ACLs we actually need.
*/
private static Map<String, Map<String, Supplier<BDD>>> computeAclBDDs(
Function<Configuration, IpAccessListToBdd> aclToBdds, Map<String, Configuration> configs) {
return toImmutableMap(
configs,
Entry::getKey,
nodeEntry -> {
Configuration config = nodeEntry.getValue();
IpAccessListToBdd aclToBdd = aclToBdds.apply(config);
return toImmutableMap(
config.getIpAccessLists(),
Entry::getKey,
aclEntry -> Suppliers.memoize(() -> aclToBdd.toBdd(aclEntry.getValue())));
});
}
Map<String, Map<String, Supplier<BDD>>> getAclPermitBdds() {
return _aclPermitBDDs;
}
/** Return an {@link IpAccessListToBdd} for the input {@link Configuration}. */
IpAccessListToBdd ipAccessListToBddForNode(Configuration config) {
return _aclToBdds.computeIfAbsent(
config.getHostname(),
hostname ->
new IpAccessListToBddImpl(
_bddPacket,
_bddSourceManagers.get(hostname),
config.getIpAccessLists(),
config.getIpSpaces()));
}
private static Map<String, Map<String, Supplier<BDD>>> computeAclDenyBDDs(
Map<String, Map<String, Supplier<BDD>>> aclBDDs) {
return toImmutableMap(
aclBDDs,
Entry::getKey,
nodeEntry ->
toImmutableMap(
nodeEntry.getValue(),
Entry::getKey,
aclEntry -> Suppliers.memoize(() -> aclEntry.getValue().get().not())));
}
private Map<String, Map<String, Transition>> computeBDDIncomingTransformations() {
long start = System.currentTimeMillis();
Map<String, Map<String, Transition>> result =
toImmutableMap(
_configs,
Entry::getKey, /* node */
nodeEntry -> {
Configuration node = nodeEntry.getValue();
TransformationToTransition toTransition =
new TransformationToTransition(_bddPacket, ipAccessListToBddForNode(node));
return node.activeL3Interfaces()
.filter(Interface::canReceiveIpTraffic)
.collect(
ImmutableMap.toImmutableMap(
Interface::getName,
iface -> toTransition.toTransition(iface.getIncomingTransformation())));
});
long t = System.currentTimeMillis() - start;
LOGGER.info("computeBDDIncomingTransformations: {}ms", t);
return result;
}
private Map<String, Map<String, Transition>> computeBDDOutgoingTransformations() {
long start = System.currentTimeMillis();
Map<String, Map<String, Transition>> result =
toImmutableMap(
_configs,
Entry::getKey, /* node */
nodeEntry -> {
Configuration node = nodeEntry.getValue();
TransformationToTransition toTransition =
new TransformationToTransition(_bddPacket, ipAccessListToBddForNode(node));
return node.activeL3Interfaces()
.filter(Interface::canSendIpTraffic)
.collect(
ImmutableMap.toImmutableMap(
Interface::getName,
iface -> toTransition.toTransition(iface.getOutgoingTransformation())));
});
long t = System.currentTimeMillis() - start;
LOGGER.info("computeBDDOutgoingTransformations: {}ms", t);
return result;
}
private static @Nonnull Map<String, Map<String, BDD>> computeVrfForwardingBehaviorBDDs(
Map<String, Map<String, VrfForwardingBehavior>> vrfForwardingBehavior,
Function<VrfForwardingBehavior, IpSpace> ipSpaceGetter,
IpSpaceToBDD toBDD) {
return toImmutableMap(
vrfForwardingBehavior,
Entry::getKey, // node
nodeEntry ->
toImmutableMap(
nodeEntry.getValue(),
Entry::getKey, // vrf
vrfEntry -> toBDD.visit(ipSpaceGetter.apply(vrfEntry.getValue()))));
}
Map<String, Map<String, Map<String, BDD>>> getIfaceAcceptBDDs() {
return _ifaceAcceptBDDs;
}
BDD getRequiredTransitNodeBDD() {
return _requiredTransitNodeBDD;
}
private static Map<String, Map<String, Map<org.batfish.datamodel.Edge, BDD>>>
computeArpTrueEdgeBDDs(
Map<String, Map<String, VrfForwardingBehavior>> vrfForwardingBehavior,
IpSpaceToBDD ipSpaceToBDD) {
return toImmutableMap(
vrfForwardingBehavior,
Entry::getKey, // node
nodeEntry ->
toImmutableMap(
nodeEntry.getValue(),
Entry::getKey, // vrf
vrfEntry ->
toImmutableMap(
vrfEntry.getValue().getArpTrueEdge(),
Entry::getKey,
edgeEntry -> ipSpaceToBDD.visit(edgeEntry.getValue()))));
}
private static Map<String, Map<String, Map<String, BDD>>> computeIfaceForwardingBehaviorBDDs(
Map<String, Map<String, VrfForwardingBehavior>> vrfForwardingBehavior,
Function<InterfaceForwardingBehavior, IpSpace> dispositionIpSpaceGetter,
IpSpaceToBDD ipSpaceToBDD) {
return toImmutableMap(
vrfForwardingBehavior,
Entry::getKey,
nodeEntry ->
toImmutableMap(
nodeEntry.getValue(),
Entry::getKey,
vrfEntry ->
toImmutableMap(
vrfEntry.getValue().getInterfaceForwardingBehavior(),
Entry::getKey,
ifaceEntry ->
ipSpaceToBDD.visit(
dispositionIpSpaceGetter.apply(ifaceEntry.getValue())))));
}
private Stream<Edge> generateRootEdges(Map<StateExpr, BDD> rootBdds) {
return Streams.concat(
generateRootEdges_OriginateInterfaceLink_PreInInterface(rootBdds),
generateRootEdges_OriginateVrf_PostInVrf(rootBdds),
generateRootEdges_OriginateInterface_PostInVrf(rootBdds));
}
private static Stream<Edge> generateQueryEdges(Set<FlowDisposition> actions) {
return actions.stream()
.map(
action -> {
switch (action) {
case ACCEPTED:
return new Edge(Accept.INSTANCE, Query.INSTANCE);
case DENIED_IN:
return new Edge(DropAclIn.INSTANCE, Query.INSTANCE);
case DENIED_OUT:
return new Edge(DropAclOut.INSTANCE, Query.INSTANCE);
case LOOP:
throw new BatfishException("FlowDisposition LOOP is unsupported");
case NEIGHBOR_UNREACHABLE:
return new Edge(NeighborUnreachable.INSTANCE, Query.INSTANCE);
case DELIVERED_TO_SUBNET:
return new Edge(DeliveredToSubnet.INSTANCE, Query.INSTANCE);
case EXITS_NETWORK:
return new Edge(ExitsNetwork.INSTANCE, Query.INSTANCE);
case INSUFFICIENT_INFO:
return new Edge(InsufficientInfo.INSTANCE, Query.INSTANCE);
case NO_ROUTE:
return new Edge(DropNoRoute.INSTANCE, Query.INSTANCE);
case NULL_ROUTED:
return new Edge(DropNullRoute.INSTANCE, Query.INSTANCE);
default:
throw new BatfishException("Unknown FlowDisposition " + action);
}
});
}
private Stream<Edge> generateRootEdges_OriginateInterfaceLink_PreInInterface(
Map<StateExpr, BDD> rootBdds) {
return rootBdds.entrySet().stream()
.filter(entry -> entry.getKey() instanceof OriginateInterfaceLink)
.map(
entry -> {
OriginateInterfaceLink originateInterfaceLink =
(OriginateInterfaceLink) entry.getKey();
String hostname = originateInterfaceLink.getHostname();
String iface = originateInterfaceLink.getInterface();
PreInInterface preInInterface = new PreInInterface(hostname, iface);
BDD rootBdd = entry.getValue();
return new Edge(
originateInterfaceLink,
preInInterface,
compose(
constraint(rootBdd),
addNoLastHopConstraint(_lastHopMgr, hostname, iface),
addSourceInterfaceConstraint(_bddSourceManagers.get(hostname), iface)));
});
}
@VisibleForTesting
Stream<Edge> generateRootEdges_OriginateVrf_PostInVrf(Map<StateExpr, BDD> rootBdds) {
return rootBdds.entrySet().stream()
.filter(entry -> entry.getKey() instanceof OriginateVrf)
.map(
entry -> {
OriginateVrf originateVrf = (OriginateVrf) entry.getKey();
String hostname = originateVrf.getHostname();
String vrf = originateVrf.getVrf();
PostInVrf postInVrf = new PostInVrf(hostname, vrf);
BDD rootBdd = entry.getValue();
// Keep this edge's transition in sync with transition of session-matching edge,
// defined in SessionScopeFibLookupSessionEdges#visitOriginatingSessionScope.
return new Edge(
originateVrf,
postInVrf,
compose(
addOriginatingFromDeviceConstraint(_bddSourceManagers.get(hostname)),
addOutgoingOriginalFlowFiltersConstraint(
_bddOutgoingOriginalFlowFilterManagers.get(hostname)),
constraint(rootBdd)));
});
}
@VisibleForTesting
Stream<Edge> generateRootEdges_OriginateInterface_PostInVrf(Map<StateExpr, BDD> rootBdds) {
return rootBdds.entrySet().stream()
.filter(e -> e.getKey() instanceof OriginateInterface)
.map(
e -> {
OriginateInterface state = (OriginateInterface) e.getKey();
String vrf = _interfacesToVrfsMap.get(state.getHostname()).get(state.getInterface());
PostInVrf postInVrf = new PostInVrf(state.getHostname(), vrf);
String hostname = state.getHostname();
return new Edge(
state,
postInVrf,
compose(
addOriginatingFromDeviceConstraint(_bddSourceManagers.get(hostname)),
addOutgoingOriginalFlowFiltersConstraint(
_bddOutgoingOriginalFlowFilterManagers.get(hostname)),
constraint(e.getValue())));
});
}
/** Generate edges to each disposition. Depends on final nodes. */
private Stream<Edge> generateDispositionEdges(Set<String> finalNodes) {
return Streams.concat(
generateRules_NodeAccept_Accept(finalNodes),
generateRules_NodeDropAclIn_DropAclIn(finalNodes),
generateRules_NodeDropAclOut_DropAclOut(finalNodes),
generateRules_NodeDropNoRoute_DropNoRoute(finalNodes),
generateRules_NodeDropNullRoute_DropNullRoute(finalNodes),
generateRules_NodeInterfaceDeliveredToSubnet_DeliveredToSubnet(finalNodes),
generateRules_NodeInterfaceExitsNetwork_ExitsNetwork(finalNodes),
generateRules_NodeInterfaceInsufficientInfo_InsufficientInfo(finalNodes),
generateRules_NodeInterfaceNeighborUnreachable_NeighborUnreachable(finalNodes));
}
/*
* These edges do not depend on the query. Compute them separately so that we can later cache them
* across queries if we want to.
*/
private Stream<Edge> generateEdges() {
return Streams.concat(
generateRules_PreInInterface_NodeDropAclIn(),
generateRules_PreInInterface_PostInInterface(),
generateRules_PreInInterface_PacketPolicy(),
generateRules_PostInInterface_NodeDropAclIn(),
generateRules_PostInInterface_PostInVrf(),
generateRules_PreOutEdge_NodeDropAclOut(),
generateRules_PreOutEdge_PreOutEdgePostNat(),
generateRules_PreOutEdgePostNat_NodeDropAclOut(),
generateRules_PreOutEdgePostNat_PreInInterface(),
generateRules_PreOutInterfaceDisposition_SetupSessionDisposition(),
generateRules_SetupSessionDisposition_NodeInterfaceDisposition(),
generateRules_PreOutInterfaceDisposition_NodeDropAclOut(),
generateRules_VrfAccept_NodeAccept(),
generateFibRules());
}
private @Nonnull Stream<Edge> generateFibRules() {
return _bddFibGenerator.generateForwardingEdges(
alwaysTrue(),
PostInVrf::new,
PreOutEdge::new,
PreOutVrf::new,
PreOutInterfaceDeliveredToSubnet::new,
PreOutInterfaceExitsNetwork::new,
PreOutInterfaceInsufficientInfo::new,
PreOutInterfaceNeighborUnreachable::new);
}
private Stream<Edge> generateRules_NodeAccept_Accept(Set<String> finalNodes) {
return finalNodes.stream().map(node -> new Edge(new NodeAccept(node), Accept.INSTANCE));
}
private static Stream<Edge> generateRules_NodeDropAclIn_DropAclIn(Set<String> finalNodes) {
return finalNodes.stream().map(node -> new Edge(new NodeDropAclIn(node), DropAclIn.INSTANCE));
}
private static Stream<Edge> generateRules_NodeDropAclOut_DropAclOut(Set<String> finalNodes) {
return finalNodes.stream().map(node -> new Edge(new NodeDropAclOut(node), DropAclOut.INSTANCE));
}
private Stream<Edge> generateRules_NodeDropNoRoute_DropNoRoute(Set<String> finalNodes) {
return finalNodes.stream()
/* In differential context, nodes can be added or removed. This can lead to a finalNode
* that doesn't exist in _configs.
*/
.filter(_configs::containsKey)
.map(
node ->
new Edge(
new NodeDropNoRoute(node),
DropNoRoute.INSTANCE,
removeNodeSpecificConstraints(
node,
_lastHopMgr,
_bddOutgoingOriginalFlowFilterManagers.get(node),
_bddSourceManagers.get(node))));
}
private static Stream<Edge> generateRules_NodeDropNullRoute_DropNullRoute(
Set<String> finalNodes) {
return finalNodes.stream()
.map(node -> new Edge(new NodeDropNullRoute(node), DropNullRoute.INSTANCE));
}
private Stream<Edge> generateRules_NodeInterfaceDisposition_Disposition(
BiFunction<String, String, StateExpr> nodeInterfaceDispositionConstructor,
StateExpr dispositionNode,
Set<String> finalNodes) {
return finalNodes.stream()
.map(_configs::get)
.filter(Objects::nonNull) // remove finalNodes that don't exist on this network
.flatMap(Configuration::activeL3Interfaces)
.filter(Interface::canSendIpTraffic)
.map(
iface -> {
String node = iface.getOwner().getHostname();
String ifaceName = iface.getName();
return new Edge(
nodeInterfaceDispositionConstructor.apply(node, ifaceName), dispositionNode);
});
}
private Stream<Edge> generateRules_NodeInterfaceNeighborUnreachable_NeighborUnreachable(
Set<String> finalNodes) {
return generateRules_NodeInterfaceDisposition_Disposition(
NodeInterfaceNeighborUnreachable::new, NeighborUnreachable.INSTANCE, finalNodes);
}
private Stream<Edge> generateRules_NodeInterfaceDeliveredToSubnet_DeliveredToSubnet(
Set<String> finalNodes) {
return generateRules_NodeInterfaceDisposition_Disposition(
NodeInterfaceDeliveredToSubnet::new, DeliveredToSubnet.INSTANCE, finalNodes);
}
private Stream<Edge> generateRules_NodeInterfaceExitsNetwork_ExitsNetwork(
Set<String> finalNodes) {
return generateRules_NodeInterfaceDisposition_Disposition(
NodeInterfaceExitsNetwork::new, ExitsNetwork.INSTANCE, finalNodes);
}
private Stream<Edge> generateRules_NodeInterfaceInsufficientInfo_InsufficientInfo(
Set<String> finalNodes) {
return generateRules_NodeInterfaceDisposition_Disposition(
NodeInterfaceInsufficientInfo::new, InsufficientInfo.INSTANCE, finalNodes);
}
private Stream<Edge> generateRules_PostInInterface_NodeDropAclIn() {
return getAllL3Interfaces()
.filter(Interface::canReceiveIpTraffic)
.filter(iface -> iface.getPostTransformationIncomingFilter() != null)
.map(
i -> {
IpAccessList acl = i.getPostTransformationIncomingFilter();
String node = i.getOwner().getHostname();
String iface = i.getName();
BDD aclDenyBDD = ignorableAclDenyBDD(node, acl);
return new Edge(
new PostInInterface(node, iface),
new NodeDropAclIn(node),
compose(
constraint(aclDenyBDD),
removeNodeSpecificConstraints(
node,
_lastHopMgr,
_bddOutgoingOriginalFlowFilterManagers.get(node),
_bddSourceManagers.get(node))));
});
}
private Stream<Edge> generateRules_PostInInterface_PostInVrf() {
return getAllL3Interfaces()
.filter(Interface::canReceiveIpTraffic)
.map(
iface -> {
IpAccessList acl = iface.getPostTransformationIncomingFilter();
String nodeName = iface.getOwner().getHostname();
String vrfName = iface.getVrfName();
String ifaceName = iface.getName();
PostInInterface preState = new PostInInterface(nodeName, ifaceName);
PostInVrf postState = new PostInVrf(nodeName, vrfName);
BDD inAclBDD = ignorableAclPermitBDD(nodeName, acl);
return new Edge(preState, postState, constraint(inAclBDD));
});
}
@VisibleForTesting
Stream<Edge> generateRules_PreInInterface_NodeDropAclIn() {
return getAllL3Interfaces()
.filter(Interface::canReceiveIpTraffic)
.filter(iface -> iface.getPacketPolicyName() == null && iface.getIncomingFilter() != null)
.map(
i -> {
String node = i.getOwner().getHostname();
String iface = i.getName();
return new Edge(
new PreInInterface(node, iface),
new NodeDropAclIn(node),
compose(
constraint(ignorableAclDenyBDD(node, i.getIncomingFilter())),
removeNodeSpecificConstraints(
node,
_lastHopMgr,
_bddOutgoingOriginalFlowFilterManagers.get(node),
_bddSourceManagers.get(node))));
});
}
@VisibleForTesting
Stream<Edge> generateRules_PreInInterface_PacketPolicy() {
return _configs.values().stream()
.flatMap(
config -> {
String nodeName = config.getHostname();
Map<String, IpsRoutedOutInterfaces> ipsRoutedOutInterfaces = new HashMap<>();
IpAccessListToBdd ipAccessListToBdd = ipAccessListToBddForNode(config);
Transition addOutgoingOriginalFlowFiltersConstraint =
addOutgoingOriginalFlowFiltersConstraint(
_bddOutgoingOriginalFlowFilterManagers.get(nodeName));
Multimap<String, String> convertedPolicies = HashMultimap.create();
return config
.activeL3Interfaces()
.filter(Interface::canReceiveIpTraffic)
.filter(iface -> iface.getPacketPolicyName() != null)
.flatMap(
iface -> {
String vrfName = iface.getVrfName();
String ifaceName = iface.getName();
String policyName = iface.getPacketPolicyName();
Edge enterPolicyEdge =
new Edge(
new PreInInterface(nodeName, ifaceName),
new PacketPolicyStatement(nodeName, vrfName, policyName, 0),
addOutgoingOriginalFlowFiltersConstraint);
if (!convertedPolicies.put(vrfName, policyName)) {
// the policy edges have been generated already
// only need to generate this edge into the policy.
return Stream.of(enterPolicyEdge);
}
long t = System.currentTimeMillis();
PacketPolicyToBdd.BddPacketPolicy bddPacketPolicy =
PacketPolicyToBdd.evaluate(
nodeName,
vrfName,
config.getPacketPolicies().get(policyName),
ipAccessListToBdd,
ipsRoutedOutInterfaces.computeIfAbsent(
vrfName,
(key) ->
_ipsRoutesOutInterfacesFactory.getIpsRoutedOutInterfaces(
nodeName, vrfName)));
t = System.currentTimeMillis() - t;
LOGGER.debug(
"converted packet policy on {}/{} to BDD: {}ms", nodeName, vrfName, t);
PacketPolicyActionToEdges actionToEdges =
new PacketPolicyActionToEdges(nodeName, policyName, vrfName);
return Streams.concat(
// enter PacketPolicy start state
Stream.of(enterPolicyEdge),
// all the internal packet policy edges
bddPacketPolicy.getEdges().stream(),
// stitch action states back into the main graph
bddPacketPolicy.getActions().stream()
.map(PacketPolicyAction::getAction)
.flatMap(actionToEdges::visit));
});
});
}
private @Nonnull BDD aclDenyBDD(String node, @Nullable IpAccessList acl) {
return acl == null ? _zero : _aclDenyBDDs.get(node).get(acl.getName()).get();
}
private @Nonnull BDD aclPermitBDD(String node, @Nullable IpAccessList acl) {
return acl == null ? _one : _aclPermitBDDs.get(node).get(acl.getName()).get();
}
private @Nonnull BDD ignorableAclDenyBDD(String node, @Nullable IpAccessList acl) {
return _ignoreFilters ? _zero : aclDenyBDD(node, acl);
}
private @Nonnull BDD ignorableAclPermitBDD(String node, @Nullable IpAccessList acl) {
return _ignoreFilters ? _one : aclPermitBDD(node, acl);
}
@VisibleForTesting
Stream<Edge> generateRules_PreInInterface_PostInInterface() {
return getAllL3Interfaces()
.filter(Interface::canReceiveIpTraffic)
// Policy-based routing edges handled elsewhere
.filter(iface -> iface.getPacketPolicyName() == null)
.map(
iface -> {
IpAccessList acl = iface.getIncomingFilter();
String nodeName = iface.getOwner().getHostname();
String ifaceName = iface.getName();
PreInInterface preState = new PreInInterface(nodeName, ifaceName);
PostInInterface postState = new PostInInterface(nodeName, ifaceName);
BDD inAclBDD = ignorableAclPermitBDD(nodeName, acl);
Transition transition =
compose(
constraint(inAclBDD),
addOutgoingOriginalFlowFiltersConstraint(
_bddOutgoingOriginalFlowFilterManagers.get(nodeName)),
_bddIncomingTransformations.get(nodeName).get(ifaceName));
return new Edge(preState, postState, transition);
});
}
/**
* Restricts to flows that leave this device via the given interface. Important for the
* out-of-order flow computation when outgoing original flow filter is present on any interface on
* the node.
*/
private BDD flowsLeavingInterface(String node, String iface) {
return _bddOutgoingOriginalFlowFilterManagers.get(node).outgoingInterfaceBDD(iface);
}
private Stream<Edge> generateRules_PreOutEdge_NodeDropAclOut() {
if (_ignoreFilters) {
return Stream.of();
}
return _topologyEdges.stream()
.flatMap(
edge -> {
String node1 = edge.getNode1();
String iface1 = edge.getInt1();
String node2 = edge.getNode2();
String iface2 = edge.getInt2();
Interface i1 = _configs.get(node1).getAllInterfaces().get(iface1);
assert i1.canSendIpTraffic();
IpAccessList preNatAcl = i1.getPreTransformationOutgoingFilter();
BDD denyPreNat = ignorableAclDenyBDD(node1, preNatAcl);
if (denyPreNat.isZero()) {
return Stream.of();
}
return Stream.of(
new Edge(
new PreOutEdge(node1, iface1, node2, iface2),
new NodeDropAclOut(node1),
compose(
constraint(denyPreNat),
removeNodeSpecificConstraints(
node1,
_lastHopMgr,
_bddOutgoingOriginalFlowFilterManagers.get(node1),
_bddSourceManagers.get(node1)))));
});
}
private Stream<Edge> generateRules_PreOutEdge_PreOutEdgePostNat() {
return _topologyEdges.stream()
.flatMap(
edge -> {
String node1 = edge.getNode1();
String iface1 = edge.getInt1();
String node2 = edge.getNode2();
String iface2 = edge.getInt2();
Interface i1 = _configs.get(node1).getAllInterfaces().get(iface1);
assert i1.canSendIpTraffic();
IpAccessList preNatAcl = i1.getPreTransformationOutgoingFilter();
BDD aclPermit = ignorableAclPermitBDD(node1, preNatAcl);
if (aclPermit.isZero()) {
return Stream.of();
}
PreOutEdge preState = new PreOutEdge(node1, iface1, node2, iface2);
PreOutEdgePostNat postState = new PreOutEdgePostNat(node1, iface1, node2, iface2);
return Stream.of(
new Edge(
preState,
postState,
compose(
constraint(aclPermit),
_bddOutgoingTransformations.get(node1).get(iface1))));
});
}
@VisibleForTesting
Stream<Edge> generateRules_PreOutEdgePostNat_NodeDropAclOut() {
if (_ignoreFilters) {
return Stream.of();
}
return _topologyEdges.stream()
.flatMap(
edge -> {
String node1 = edge.getNode1();
String iface1 = edge.getInt1();
String node2 = edge.getNode2();
String iface2 = edge.getInt2();
Interface i1 = _configs.get(node1).getAllInterfaces().get(iface1);
assert i1.canSendIpTraffic();
IpAccessList acl = i1.getOutgoingFilter();
BDD aclDenyBDD = ignorableAclDenyBDD(node1, acl);
BDDOutgoingOriginalFlowFilterManager originalFlowFilterMgr =
_bddOutgoingOriginalFlowFilterManagers.get(node1);
BDD originalFlowAclDenyBdd =
originalFlowFilterMgr.deniedByOriginalFlowEgressFilter(iface1);
BDD denyBdd = aclDenyBDD.or(originalFlowAclDenyBdd);
return denyBdd.isZero()
? Stream.of()
: Stream.of(
new Edge(
new PreOutEdgePostNat(node1, iface1, node2, iface2),
new NodeDropAclOut(node1),
compose(
constraint(denyBdd),
removeNodeSpecificConstraints(
node1,
_lastHopMgr,
_bddOutgoingOriginalFlowFilterManagers.get(node1),
_bddSourceManagers.get(node1)))));
});
}
@VisibleForTesting
Stream<Edge> generateRules_PreOutEdgePostNat_PreInInterface() {
return _topologyEdges.stream()
.map(
edge -> {
String node1 = edge.getNode1();
String iface1 = edge.getInt1();
String node2 = edge.getNode2();
String iface2 = edge.getInt2();
Interface i1 = _configs.get(node1).getAllInterfaces().get(iface1);
assert i1.canSendIpTraffic();
BDD aclPermitBDD = ignorableAclPermitBDD(node1, i1.getOutgoingFilter());
BDDOutgoingOriginalFlowFilterManager originalFlowFilterMgr =
_bddOutgoingOriginalFlowFilterManagers.get(node1);
BDD originalFlowAclPermitBdd =
originalFlowFilterMgr.permittedByOriginalFlowEgressFilter(iface1);
return new Edge(
new PreOutEdgePostNat(node1, iface1, node2, iface2),
new PreInInterface(node2, iface2),
compose(
constraint(aclPermitBDD.and(originalFlowAclPermitBdd)),
removeNodeSpecificConstraints(
node1,
_lastHopMgr,
_bddOutgoingOriginalFlowFilterManagers.get(node1),
_bddSourceManagers.get(node1)),
addSourceInterfaceConstraint(_bddSourceManagers.get(node2), iface2),
addLastHopConstraint(_lastHopMgr, node1, iface1, node2, iface2)));
});
}
@VisibleForTesting
Stream<Edge> generateRules_PreOutInterfaceDisposition_NodeDropAclOut() {
if (_ignoreFilters) {
return Stream.of();
}
return _configs.entrySet().stream()
.flatMap(
nodeEntry -> {
String node = nodeEntry.getKey();
BDDOutgoingOriginalFlowFilterManager originalFlowFilterMgr =
_bddOutgoingOriginalFlowFilterManagers.get(node);
return nodeEntry.getValue().getVrfs().entrySet().stream()
.flatMap(
vrfEntry -> {
StateExpr postState = new NodeDropAclOut(node);
return nodeEntry
.getValue()
.activeL3Interfaces()
.filter(Interface::canSendIpTraffic)
.filter(
iface ->
iface.getOutgoingOriginalFlowFilter() != null
|| iface.getPreTransformationOutgoingFilter() != null
|| iface.getOutgoingFilter() != null)
.flatMap(
iface -> {
String ifaceName = iface.getName();
BDD denyOriginalFlowBdd =
originalFlowFilterMgr.deniedByOriginalFlowEgressFilter(
ifaceName);
BDD denyPreAclBDD =
ignorableAclDenyBDD(
node, iface.getPreTransformationOutgoingFilter());
BDD denyPostAclBDD =
ignorableAclDenyBDD(node, iface.getOutgoingFilter());
Transition transformation =
_bddOutgoingTransformations.get(node).get(ifaceName);
// DENIED_OUT can be due to any of:
// - denied by the outgoingOriginalFlowFilter
// - denied by the pre-Transformation ACL
// - transformed and denied by the post-Transformation ACL
Transition deniedFlows =
branch(
// branch on whether denied before transformation
denyOriginalFlowBdd.or(denyPreAclBDD),
// deny all flows denied by pre-trans ACLs
IDENTITY,
// for flows permitted by first two ACLs, transform and
// then apply the post-trans ACL. deny any that are denied
// by the post-trans ACL.
compose(transformation, constraint(denyPostAclBDD)));
// Clear any node-specific constraints before exiting the node.
Transition transition =
compose(
deniedFlows,
removeNodeSpecificConstraints(
node,
_lastHopMgr,
_bddOutgoingOriginalFlowFilterManagers.get(node),
_bddSourceManagers.get(node)));
return Stream.of(
new PreOutInterfaceDeliveredToSubnet(node, ifaceName),
new PreOutInterfaceExitsNetwork(node, ifaceName),
new PreOutInterfaceInsufficientInfo(node, ifaceName),
new PreOutInterfaceNeighborUnreachable(node, ifaceName))
.map(preState -> new Edge(preState, postState, transition));
});
});
});
}
@Nonnull
private Stream<Edge> generateRules_VrfAccept_NodeAccept() {
return _ifaceAcceptBDDs.entrySet().stream()
.flatMap(
nodeEntry -> {
String hostname = nodeEntry.getKey();
Transition removeNodeSpecificConstraints =
removeNodeSpecificConstraints(
hostname,
_lastHopMgr,
_bddOutgoingOriginalFlowFilterManagers.get(hostname),
_bddSourceManagers.get(hostname));
StateExpr nodeAccept = new NodeAccept(hostname);
return nodeEntry.getValue().keySet().stream() // vrf names
.map(
vrf ->
new Edge(
new VrfAccept(hostname, vrf),
nodeAccept,
removeNodeSpecificConstraints));
});
}
@VisibleForTesting
Stream<Edge> generateRules_PreOutInterfaceDisposition_SetupSessionDisposition() {
return getAllL3Interfaces()
.filter(Interface::canSendIpTraffic)
.flatMap(
iface -> {
String node = iface.getOwner().getHostname();
String ifaceName = iface.getName();
BDD permitBeforeNatBDD =
ignorableAclPermitBDD(node, iface.getPreTransformationOutgoingFilter());
BDD permitAfterNatBDD = ignorableAclPermitBDD(node, iface.getOutgoingFilter());
Transition outgoingTransformation =
_bddOutgoingTransformations.get(node).get(ifaceName);
if (permitBeforeNatBDD.isZero() || permitAfterNatBDD.isZero()) {
return Stream.of();
}
BDDOutgoingOriginalFlowFilterManager originalFlowFilterMgr =
_bddOutgoingOriginalFlowFilterManagers.get(node);
BDD permitOriginalFlowBdd =
originalFlowFilterMgr.permittedByOriginalFlowEgressFilter(ifaceName);
/* 1. pre-transformation filter
* 2. outgoingOriginalFlowFilter (also constrains to interface)
* can be applied in any order, so do it before transformation for
* less forward work.
* 3. outgoing transformation
* 4. post-transformation filter
*/
Transition sessionTransition =
compose(
constraint(permitBeforeNatBDD.and(permitOriginalFlowBdd)),
outgoingTransformation,
constraint(permitAfterNatBDD));
if (sessionTransition == ZERO) {
return Stream.of();
}
/* For failure dispostions we don't setup sessions. So do 1-4 above, then:
* 5. erase node-specific constraints
*/
Transition nonSessionTransition =
compose(
sessionTransition,
removeNodeSpecificConstraints(
node, _lastHopMgr, originalFlowFilterMgr, _bddSourceManagers.get(node)));
return Stream.of(
new Edge(
new PreOutInterfaceDeliveredToSubnet(node, ifaceName),
new SetupSessionDeliveredToSubnet(node, ifaceName),
sessionTransition),
new Edge(
new PreOutInterfaceExitsNetwork(node, ifaceName),
new SetupSessionExitsNetwork(node, ifaceName),
sessionTransition),
new Edge(
new PreOutInterfaceInsufficientInfo(node, ifaceName),
// skip setup session for II
new NodeInterfaceInsufficientInfo(node, ifaceName),
nonSessionTransition),
new Edge(
new PreOutInterfaceNeighborUnreachable(node, ifaceName),
// skip setup session for NU
new NodeInterfaceNeighborUnreachable(node, ifaceName),
nonSessionTransition));
});
}
@VisibleForTesting
Stream<Edge> generateRules_SetupSessionDisposition_NodeInterfaceDisposition() {
return _configs.values().stream()
.flatMap(
c -> {
String node = c.getHostname();
Transition transition =
removeNodeSpecificConstraints(
node,
_lastHopMgr,
_bddOutgoingOriginalFlowFilterManagers.get(node),
_bddSourceManagers.get(node));
/* We only setup sessions for successful ARP failure dispositions (i.e.
* DeliveredToSubnet and ExitsNetwork).
* see generateRules_PreOutInterfaceDisposition_SetupSessionDisposition
*/
return c.activeInterfaces()
.filter(Interface::canSendIpTraffic)
.flatMap(
iface -> {
String ifaceName = iface.getName();
return Stream.of(
new Edge(
new SetupSessionDeliveredToSubnet(node, ifaceName),
new NodeInterfaceDeliveredToSubnet(node, ifaceName),
transition),
new Edge(
new SetupSessionExitsNetwork(node, ifaceName),
new NodeInterfaceExitsNetwork(node, ifaceName),
transition));
});
});
}
public BDDReachabilityAnalysis bddReachabilityAnalysis(IpSpaceAssignment srcIpSpaceAssignment) {
return bddReachabilityAnalysis(srcIpSpaceAssignment, false);
}
public BDDReachabilityAnalysis bddReachabilityAnalysis(
IpSpaceAssignment srcIpSpaceAssignment, boolean useInterfaceRoots) {
return bddReachabilityAnalysis(
srcIpSpaceAssignment,
matchDst(UniverseIpSpace.INSTANCE),
ImmutableSet.of(),
ImmutableSet.of(),
_configs.keySet(),
ImmutableSet.of(FlowDisposition.ACCEPTED),
useInterfaceRoots);
}
public BDDLoopDetectionAnalysis bddLoopDetectionAnalysis(IpSpaceAssignment srcIpSpaceAssignment) {
Map<StateExpr, BDD> ingressLocationStates = rootConstraints(srcIpSpaceAssignment, _one, false);
Stream<Edge> edges = Stream.concat(generateEdges(), generateRootEdges(ingressLocationStates));
return new BDDLoopDetectionAnalysis(_bddPacket, edges, ingressLocationStates.keySet());
}
/**
* Given a set of parameters finds a {@link Map} of {@link IngressLocation}s to {@link BDD}s while
* including the results for {@link FlowDisposition#LOOP} if required
*
* @param srcIpSpaceAssignment An assignment of active source locations to the corresponding
* source {@link IpSpace}.
* @param initialHeaderSpace The initial headerspace (i.e. before any packet transformations).
* @param forbiddenTransitNodes A set of hostnames that must not be transited.
* @param requiredTransitNodes A set of hostnames of which one must be transited.
* @param finalNodes Find flows that stop at one of these nodes.
* @param actions Find flows for which at least one trace has one of these actions.
* @return {@link Map} of {@link IngressLocation}s to {@link BDD}s
*/
public Map<IngressLocation, BDD> getAllBDDs(
IpSpaceAssignment srcIpSpaceAssignment,
AclLineMatchExpr initialHeaderSpace,
Set<String> forbiddenTransitNodes,
Set<String> requiredTransitNodes,
Set<String> finalNodes,
Set<FlowDisposition> actions) {
checkArgument(!actions.isEmpty(), "No actions");
Set<FlowDisposition> nonLoopActions = new HashSet<>(actions);
boolean loopIncluded = nonLoopActions.remove(LOOP);
if (nonLoopActions.isEmpty()) {
// since actions is not empty, loopIncluded must be true. Thus just detect loops
return bddLoopDetectionAnalysis(srcIpSpaceAssignment).detectLoops();
} else if (!loopIncluded) {
// only reachability, no loop detection
return bddReachabilityAnalysis(
srcIpSpaceAssignment,
initialHeaderSpace,
forbiddenTransitNodes,
requiredTransitNodes,
finalNodes,
nonLoopActions)
.getIngressLocationReachableBDDs();
} else {
// both reachability and loop detection
return bddReachabilityAndLoopDetectionAnalysis(
srcIpSpaceAssignment,
initialHeaderSpace,
forbiddenTransitNodes,
requiredTransitNodes,
finalNodes,
nonLoopActions)
.getIngressLocationBdds();
}
}
private BDDReachabilityAndLoopDetectionAnalysis bddReachabilityAndLoopDetectionAnalysis(
IpSpaceAssignment srcIpSpaceAssignment,
AclLineMatchExpr initialHeaderSpace,
Set<String> forbiddenTransitNodes,
Set<String> requiredTransitNodes,
Set<String> finalNodes,
Set<FlowDisposition> actions) {
BDD initialHeaderSpaceBdd = computeInitialHeaderSpaceBdd(initialHeaderSpace);
BDD finalHeaderSpaceBdd = computeFinalHeaderSpaceBdd(initialHeaderSpaceBdd);
Map<StateExpr, BDD> roots = rootConstraints(srcIpSpaceAssignment, initialHeaderSpaceBdd, false);
List<Edge> sharedEdges =
Stream.concat(generateEdges(), generateRootEdges(roots)).collect(Collectors.toList());
Stream<Edge> reachabilityEdges =
Streams.concat(
sharedEdges.stream(),
generateDispositionEdges(finalNodes),
generateQueryEdges(actions));
reachabilityEdges = instrumentForbiddenTransitNodes(forbiddenTransitNodes, reachabilityEdges);
reachabilityEdges = instrumentRequiredTransitNodes(requiredTransitNodes, reachabilityEdges);
BDDLoopDetectionAnalysis loopDetectionAnalysis =
new BDDLoopDetectionAnalysis(_bddPacket, sharedEdges.stream(), roots.keySet());
BDDReachabilityAnalysis reachabilityAnalysis =
new BDDReachabilityAnalysis(
_bddPacket, roots.keySet(), reachabilityEdges, finalHeaderSpaceBdd);
return new BDDReachabilityAndLoopDetectionAnalysis(reachabilityAnalysis, loopDetectionAnalysis);
}
/**
* Create a {@link BDDReachabilityAnalysis} with the specified parameters.
*
* @param srcIpSpaceAssignment An assignment of active source locations to the corresponding
* source {@link IpSpace}.
* @param initialHeaderSpace The initial headerspace (i.e. before any packet transformations).
* @param forbiddenTransitNodes A set of hostnames that must not be transited.
* @param requiredTransitNodes A set of hostnames of which one must be transited.
* @param finalNodes Find flows that stop at one of these nodes.
* @param actions Find flows for which at least one trace has one of these actions.
*/
@VisibleForTesting
public BDDReachabilityAnalysis bddReachabilityAnalysis(
IpSpaceAssignment srcIpSpaceAssignment,
AclLineMatchExpr initialHeaderSpace,
Set<String> forbiddenTransitNodes,
Set<String> requiredTransitNodes,
Set<String> finalNodes,
Set<FlowDisposition> actions) {
return bddReachabilityAnalysis(
srcIpSpaceAssignment,
initialHeaderSpace,
forbiddenTransitNodes,
requiredTransitNodes,
finalNodes,
actions,
false);
}
/**
* Create a {@link BDDReachabilityAnalysis} with the specified parameters.
*
* @param srcIpSpaceAssignment An assignment of active source locations to the corresponding
* source {@link IpSpace}.
* @param initialHeaderSpace The initial headerspace (i.e. before any packet transformations).
* @param forbiddenTransitNodes A set of hostnames that must not be transited.
* @param requiredTransitNodes A set of hostnames of which one must be transited.
* @param finalNodes Find flows that stop at one of these nodes.
* @param actions Find flows for which at least one trace has one of these actions.
* @param useInterfaceRoots Whether to perform analysis with {@link OriginateInterface} roots
* rather than {@link OriginateVrf}.
*/
@VisibleForTesting
public BDDReachabilityAnalysis bddReachabilityAnalysis(
IpSpaceAssignment srcIpSpaceAssignment,
AclLineMatchExpr initialHeaderSpace,
Set<String> forbiddenTransitNodes,
Set<String> requiredTransitNodes,
Set<String> finalNodes,
Set<FlowDisposition> actions,
boolean useInterfaceRoots) {
checkArgument(!finalNodes.isEmpty(), "final nodes cannot be empty");
BDD initialHeaderSpaceBdd = computeInitialHeaderSpaceBdd(initialHeaderSpace);
BDD finalHeaderSpaceBdd = computeFinalHeaderSpaceBdd(initialHeaderSpaceBdd);
Map<StateExpr, BDD> roots =
rootConstraints(srcIpSpaceAssignment, initialHeaderSpaceBdd, useInterfaceRoots);
Stream<Edge> edgeStream =
Streams.concat(
generateEdges(),
generateRootEdges(roots),
generateDispositionEdges(finalNodes),
generateQueryEdges(actions));
edgeStream = instrumentForbiddenTransitNodes(forbiddenTransitNodes, edgeStream);
edgeStream = instrumentRequiredTransitNodes(requiredTransitNodes, edgeStream);
return new BDDReachabilityAnalysis(_bddPacket, roots.keySet(), edgeStream, finalHeaderSpaceBdd);
}
private BDD computeInitialHeaderSpaceBdd(AclLineMatchExpr initialHeaderSpace) {
IpAccessListToBdd ipAccessListToBdd =
new IpAccessListToBddImpl(
_bddPacket, BDDSourceManager.empty(_bddPacket), ImmutableMap.of(), ImmutableMap.of());
return ipAccessListToBdd.toBdd(initialHeaderSpace);
}
/**
* Compute the space of possible final headers, under the assumption that any NAT rule may be
* applied.
*/
public BDD computeFinalHeaderSpaceBdd(BDD initialHeaderSpaceBdd) {
BDD finalHeaderSpace = initialHeaderSpaceBdd;
if (finalHeaderSpace.testsVars(_dstIpVars)) {
// there's a constraint on dst Ip, so include nat pool Ips
BDD dstTransformationRange = _transformationIpRanges.get(IpField.DESTINATION);
if (dstTransformationRange != null) {
// dst IP is either the initial one, or one of that NAT pool IPs.
BDD noDstIp = finalHeaderSpace.exist(_dstIpVars);
finalHeaderSpace = finalHeaderSpace.or(noDstIp.and(dstTransformationRange));
}
}
if (finalHeaderSpace.testsVars(_sourceIpVars)) {
// there's a constraint on source Ip, so include nat pool Ips
BDD srcNatPoolIps = _transformationIpRanges.getOrDefault(IpField.SOURCE, _zero);
if (!srcNatPoolIps.isZero()) {
/*
* In this case, since source IPs usually don't play a huge role in routing, we could just
* existentially quantify away the constraint. There's a performance trade-off: tighter
* constraints prune more paths, but are more expensive to operate on.
*/
BDD noSrcIp = finalHeaderSpace.exist(_sourceIpVars);
finalHeaderSpace = finalHeaderSpace.or(noSrcIp.and(srcNatPoolIps));
}
}
if (finalHeaderSpace.testsVars(_dstPortVars)) {
BDD dstTransformationRange = _transformationPortRanges.get(PortField.DESTINATION);
if (dstTransformationRange != null) {
BDD noDstPort = finalHeaderSpace.exist(_dstPortVars);
finalHeaderSpace = finalHeaderSpace.or(noDstPort.and(dstTransformationRange));
}
}
if (finalHeaderSpace.testsVars(_sourcePortVars)) {
BDD srcNatPool = _transformationPortRanges.getOrDefault(PortField.SOURCE, _zero);
if (!srcNatPool.isZero()) {
BDD noSrcPort = finalHeaderSpace.exist(_sourcePortVars);
finalHeaderSpace = finalHeaderSpace.or(noSrcPort.and(srcNatPool));
}
}
return finalHeaderSpace;
}
/**
* Create a {@link BDDReachabilityAnalysis} with the given {@link BDDFirewallSessionTraceInfo
* sessions}.
*
* @param returnPassOrigBdds Sets of packets at different locations to originate packets from in
* the return pass graph.
* @param returnPassSuccessBdds Sets of packets at different locations in the return pass graph
* that are to be considered successful (and not failure). See {@link
* BidirectionalReachabilityReturnPassInstrumentation}.
*/
BDDReachabilityAnalysis bddReachabilityAnalysis(
Map<StateExpr, BDD> returnPassOrigBdds,
Map<StateExpr, BDD> returnPassSuccessBdds,
Map<String, List<BDDFirewallSessionTraceInfo>> initializedSessions,
Set<String> forbiddenTransitNodes,
Set<String> requiredTransitNodes,
Set<FlowDisposition> dispositions) {
/* We will use the return pass reachability graph a bit differently than usual: to find flows
* that successfully return to the origination point of the forward flow, we'll look at states
* like NodeInterfaceDeliveredToSubnet, rather than adding edges all the way to Query. Also,
* to find return flows that fail, we add the usual query edges for failure dispositions, and
* then search backward from Query to the origination points, which are the termination points
* of the forward pass, and then in the forward reachability graph we'll propagate failing flows
* from the termination points back to the forward pass origination points.
*
* This backward search would work for successful flows as well, but requires an additional
* graph traversal (two backward searches instead of one forward search). The advantage is that
* it would work in cases where the forward and return flow don't match (if that's possible).
*/
Stream<Edge> returnPassEdges =
Streams.concat(
instrumentReturnPassEdges(
returnPassSuccessBdds,
sessionInstrumentation(
_bddPacket,
_configs,
_bddSourceManagers,
_lastHopMgr,
_bddOutgoingOriginalFlowFilterManagers,
_aclPermitBDDs,
Stream.concat(generateEdges(), generateDispositionEdges(_configs.keySet())),
initializedSessions,
_bddFibGenerator)),
generateRootEdges(returnPassOrigBdds),
generateQueryEdges(dispositions));
returnPassEdges = instrumentForbiddenTransitNodes(forbiddenTransitNodes, returnPassEdges);
returnPassEdges = instrumentRequiredTransitNodes(requiredTransitNodes, returnPassEdges);
return new BDDReachabilityAnalysis(
_bddPacket, returnPassOrigBdds.keySet(), returnPassEdges, _one);
}
/**
* Computes the set of values into which the various parts of a packet may be transformed.
*
* <p>For example, {@code rangeComputer.getIpRanges().get(DESTINATION)} will return all
* post-transformation destination IPs, if destination NAT is present in the network.
*
* <p>In networks without NAT, the returned maps will be empty.
*/
class RangeComputer implements TransformationStepVisitor<Void>, StatementVisitor<Void> {
private final Map<IpField, BDD> _ipRanges;
private final Map<PortField, BDD> _portRanges;
public RangeComputer() {
_ipRanges = new HashMap<>();
_portRanges = new HashMap<>();
}
public Map<IpField, BDD> getIpRanges() {
return ImmutableMap.copyOf(_ipRanges);
}
public Map<PortField, BDD> getPortRanges() {
return ImmutableMap.copyOf(_portRanges);
}
private IpSpaceToBDD getIpSpaceToBDD(IpField ipField) {
switch (ipField) {
case DESTINATION:
return _dstIpSpaceToBDD;
case SOURCE:
return _srcIpSpaceToBDD;
default:
throw new IllegalArgumentException("Unknown IpField " + ipField);
}
}
private BDDInteger getPortVar(PortField portField) {
switch (portField) {
case DESTINATION:
return _bddPacket.getDstPort();
case SOURCE:
return _bddPacket.getSrcPort();
default:
throw new IllegalArgumentException("Unknown PortField " + portField);
}
}
@Override
public Void visitAssignIpAddressFromPool(AssignIpAddressFromPool assignIpAddressFromPool) {
IpField ipField = assignIpAddressFromPool.getIpField();
BDDInteger var = getIpSpaceToBDD(ipField).getBDDInteger();
BDD bdd =
assignIpAddressFromPool.getIpRanges().asRanges().stream()
.map(
range -> {
assert range.lowerBoundType() == BoundType.CLOSED
&& range.upperBoundType() == BoundType.CLOSED;
return var.range(
range.lowerEndpoint().asLong(), range.upperEndpoint().asLong());
})
.reduce(var.getFactory().zero(), BDD::or);
_ipRanges.merge(ipField, bdd, BDD::or);
return null;
}
@Override
public Void visitNoop(Noop noop) {
return null;
}
@Override
public Void visitShiftIpAddressIntoSubnet(ShiftIpAddressIntoSubnet shiftIpAddressIntoSubnet) {
IpField ipField = shiftIpAddressIntoSubnet.getIpField();
BDD bdd = getIpSpaceToBDD(ipField).toBDD(shiftIpAddressIntoSubnet.getSubnet());
_ipRanges.merge(ipField, bdd, BDD::or);
return null;
}
@Override
public Void visitAssignPortFromPool(AssignPortFromPool assignPortFromPool) {
PortField portField = assignPortFromPool.getPortField();
BDDInteger var = getPortVar(portField);
BDD bdd = var.range(assignPortFromPool.getPoolStart(), assignPortFromPool.getPoolEnd());
_portRanges.merge(portField, bdd, BDD::or);
return null;
}
@Override
public Void visitApplyAll(ApplyAll applyAll) {
applyAll.getSteps().forEach(step -> step.accept(this));
return null;
}
@Override
public Void visitApplyAny(ApplyAny applyAny) {
applyAny.getSteps().forEach(step -> step.accept(this));
return null;
}
@Override
public Void visitApplyFilter(ApplyFilter applyFilter) {
// Filter does not contain transformation
return null;
}
@Override
public Void visitApplyTransformation(ApplyTransformation transformation) {
visitTransformationSteps(transformation.getTransformation(), this);
return null;
}
@Override
public Void visitIf(If ifStmt) {
// ifStmt.getMatchCondition() does not contain transformation
ifStmt.getTrueStatements().forEach(this::visit);
return null;
}
@Override
public Void visitReturn(Return returnStmt) {
// Return does not contain transformation
return null;
}
}
private RangeComputer computeTransformationRanges() {
RangeComputer rangeComputer = new RangeComputer();
_configs.values().forEach(c -> computeTransformationRanges(c, rangeComputer));
return rangeComputer;
}
/** Compute the ranges for a single {@link Configuration}. */
private void computeTransformationRanges(Configuration c, RangeComputer rangeComputer) {
// Transformations can live in interfaces.
c.activeL3Interfaces()
// Transformations in interfaces can only be reached on transiting traffic
.filter(i -> i.canSendIpTraffic() || i.canReceiveIpTraffic())
.forEach(
iface -> {
visitTransformationSteps(iface.getIncomingTransformation(), rangeComputer);
visitTransformationSteps(iface.getOutgoingTransformation(), rangeComputer);
});
// Transformations can live in packet policies. Packet policies can be large and reused across
// interfaces, so only visit each once.
c.activeL3Interfaces()
// Packet policies in interfaces can only be reached on transiting traffic
.filter(i -> i.canSendIpTraffic() || i.canReceiveIpTraffic())
.map(i -> Optional.ofNullable(i.getPacketPolicyName()).map(c.getPacketPolicies()::get))
.filter(Optional::isPresent)
.map(Optional::get)
.distinct()
.forEach(policy -> policy.getStatements().forEach(rangeComputer::visit));
}
Map<StateExpr, BDD> rootConstraints(
IpSpaceAssignment srcIpSpaceAssignment,
BDD initialHeaderSpaceBdd,
boolean useInterfaceRoots) {
LocationVisitor<Optional<StateExpr>> locationToStateExpr =
new LocationToOriginationStateExpr(_configs, useInterfaceRoots);
// convert Locations to StateExprs, and merge srcIp constraints
Map<StateExpr, BDD> rootConstraints = new HashMap<>();
for (IpSpaceAssignment.Entry entry : srcIpSpaceAssignment.getEntries()) {
BDD srcIpSpaceBDD = _srcIpSpaceToBDD.visit(entry.getIpSpace());
entry.getLocations().stream()
.map(locationToStateExpr::visit)
.filter(Optional::isPresent)
.map(Optional::get)
.forEach(root -> rootConstraints.merge(root, srcIpSpaceBDD, BDD::or));
}
// add the global initial HeaderSpace and remove unsat entries
Map<StateExpr, BDD> finalRootConstraints =
rootConstraints.entrySet().stream()
.map(
entry ->
Maps.immutableEntry(
entry.getKey(), entry.getValue().and(initialHeaderSpaceBdd)))
.filter(entry -> !entry.getValue().isZero())
.collect(ImmutableMap.toImmutableMap(Entry::getKey, Entry::getValue));
// make sure there is at least one possible source
checkArgument(
!finalRootConstraints.isEmpty(),
"No sources are compatible with the headerspace constraint");
return finalRootConstraints;
}
/** Creates mapping of hostname -> interface name -> vrf name for active interfaces */
private static Map<String, Map<String, String>> computeInterfacesToVrfsMap(
Map<String, Configuration> configs) {
return toImmutableMap(
configs,
Entry::getKey,
nodeEntry ->
nodeEntry
.getValue()
.activeL3Interfaces()
.collect(ImmutableMap.toImmutableMap(Interface::getName, Interface::getVrfName)));
}
private Map<String, Map<String, Map<String, BDD>>> computeNextVrfBDDs(
Map<String, Map<String, VrfForwardingBehavior>> vrfForwardingBehavior,
IpSpaceToBDD ipSpaceToBDD) {
return toImmutableMap(
vrfForwardingBehavior,
Entry::getKey /* node */,
nextVrfIpsByNodeVrfEntry ->
toImmutableMap(
nextVrfIpsByNodeVrfEntry.getValue() /* nextVrfIpsByVrf */,
Entry::getKey /* vrf */,
nextVrfIpsByVrfEntry ->
toImmutableMap(
nextVrfIpsByVrfEntry.getValue().getNextVrfIps() /* nextVrfIpsByNextVrf */,
Entry::getKey,
nextVrfIpsByNextVrfEntry ->
ipSpaceToBDD.visit(nextVrfIpsByNextVrfEntry.getValue()))));
}
/**
* Adapt an edge to set the bit indicating that one of the nodes required to be transited has now
* been transited.
*
* <p>Going forward, we erase the previous value of the bit as we enter the edge, then set it to 1
* as we exit. Going backward, we just erase the bit, since the requirement has been satisfied.
*/
@VisibleForTesting
private Edge adaptEdgeSetTransitedBit(Edge edge) {
return new Edge(
edge.getPreState(),
edge.getPostState(),
compose(
edge.getTransition(), eraseAndSet(_requiredTransitNodeBDD, _requiredTransitNodeBDD)));
}
/**
* Adapt an edge, applying an additional constraint after traversing the edge (in the forward
* direction).
*/
private static Edge andThen(Edge edge, BDD constraint) {
return new Edge(
edge.getPreState(),
edge.getPostState(),
compose(edge.getTransition(), constraint(constraint)));
}
/**
* Instrumentation to forbid certain nodes from being transited. Simply removes the edges from the
* graph at which one of those nodes would become transited.
*/
private Stream<Edge> instrumentForbiddenTransitNodes(
Set<String> forbiddenTransitNodes, Stream<Edge> edgeStream) {
if (forbiddenTransitNodes.isEmpty()) {
return edgeStream;
}
// remove any edges at which a forbidden node becomes transited.
return edgeStream.filter(
edge ->
!(edge.getPreState() instanceof PreOutEdgePostNat
&& edge.getPostState() instanceof PreInInterface
&& forbiddenTransitNodes.contains(
((PreOutEdgePostNat) edge.getPreState()).getSrcNode())));
}
/**
* Instrumentation to require that one of a set of nodes is transited. We use a single bit of
* state in the BDDs to track this. The bit is initialized to 0 at the origination points, and
* constrained to be 1 at the Query state. When one of the specified nodes becomes transited (i.e.
* the flow leaves that node and enters another) we set the bit to 1. All other edges in the graph
* propagate the current value of that bit unchanged.
*/
private Stream<Edge> instrumentRequiredTransitNodes(
Set<String> requiredTransitNodes, Stream<Edge> edgeStream) {
if (requiredTransitNodes.isEmpty()) {
return edgeStream;
}
BDD transited = _requiredTransitNodeBDD;
BDD notTransited = _requiredTransitNodeBDD.not();
return edgeStream.map(
edge -> {
if (edge.getPreState() instanceof PreOutEdgePostNat
&& edge.getPostState() instanceof PreInInterface) {
String hostname = ((PreOutEdgePostNat) edge.getPreState()).getSrcNode();
return requiredTransitNodes.contains(hostname) ? adaptEdgeSetTransitedBit(edge) : edge;
} else if (edge.getPreState() instanceof OriginateVrf
|| edge.getPreState() instanceof OriginateInterface
|| edge.getPreState() instanceof OriginateInterfaceLink) {
return andThen(edge, notTransited);
} else if (edge.getPostState() instanceof Query) {
return andThen(edge, transited);
} else {
return edge;
}
});
}
public Map<String, BDDOutgoingOriginalFlowFilterManager>
getBddOutgoingOriginalFlowFilterManagers() {
return _bddOutgoingOriginalFlowFilterManagers;
}
public Map<String, BDDSourceManager> getBDDSourceManagers() {
return _bddSourceManagers;
}
public @Nullable LastHopOutgoingInterfaceManager getLastHopManager() {
return _lastHopMgr;
}
/**
* Returns a stream of all active L3 interfaces in the network.
*
* <p>Note that this may need further filtering depending on application.
*/
private @Nonnull Stream<Interface> getAllL3Interfaces() {
return _configs.values().stream()
.flatMap(Configuration::activeInterfaces)
.filter(Interface::isActiveL3);
}
private class PacketPolicyActionToEdges implements ActionVisitor<Stream<Edge>> {
private final String _nodeName;
private final String _policyName;
private final String _ingressVrfName;
private final VrfExprNameExtractor _vrfExprNameExtractor;
public PacketPolicyActionToEdges(String nodeName, String policyName, String ingressVrfName) {
_nodeName = nodeName;
_policyName = policyName;
_ingressVrfName = ingressVrfName;
_vrfExprNameExtractor = new VrfExprNameExtractor(ingressVrfName);
}
@Override
public Stream<Edge> visitDrop(Drop drop) {
return Stream.of(
new Edge(
new PacketPolicyAction(_nodeName, _ingressVrfName, _policyName, drop),
new NodeDropAclIn(_nodeName),
removeNodeSpecificConstraints(
_nodeName,
_lastHopMgr,
_bddOutgoingOriginalFlowFilterManagers.get(_nodeName),
_bddSourceManagers.get(_nodeName))));
}
@Override
public Stream<Edge> visitFibLookup(FibLookup fibLookup) {
// from FibLookup we branch to InterfaceAccept, NoRoute,
// PreOutVrf
StateExpr actionState =
new PacketPolicyAction(_nodeName, _ingressVrfName, _policyName, fibLookup);
Stream<Edge> interfaceAcceptEdges =
_ifaceAcceptBDDs.get(_nodeName).get(_ingressVrfName).entrySet().stream()
.map(
ifaceAcceptBddEntry ->
new Edge(
actionState,
new InterfaceAccept(_nodeName, ifaceAcceptBddEntry.getKey()),
ifaceAcceptBddEntry.getValue()));
BDD acceptedBdd = _vrfAcceptBDDs.get(_nodeName).get(_ingressVrfName);
String lookupVrf = _vrfExprNameExtractor.visit(fibLookup.getVrfExpr());
BDD routableBDD = _routableBDDs.get(_nodeName).get(lookupVrf);
Edge noRouteEdge =
new Edge(actionState, new NodeDropNoRoute(_nodeName), acceptedBdd.nor(routableBDD));
Edge preOutVrfEdge =
new Edge(actionState, new PreOutVrf(_nodeName, lookupVrf), routableBDD.diff(acceptedBdd));
return Stream.concat(interfaceAcceptEdges, Stream.of(noRouteEdge, preOutVrfEdge));
}
@Override
public Stream<Edge> visitFibLookupOverrideLookupIp(FibLookupOverrideLookupIp fibLookup) {
// Currently unsupported
return Stream.of();
}
}
}
| {
"content_hash": "10120ba780f55bce3600a8ecc6c68fce",
"timestamp": "",
"source": "github",
"line_count": 1950,
"max_line_length": 119,
"avg_line_length": 44.30153846153846,
"alnum_prop": 0.6476362457748761,
"repo_name": "arifogel/batfish",
"id": "4b138608a0cb120e4fbfee02c6d305167b364bf8",
"size": "86388",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "projects/batfish/src/main/java/org/batfish/bddreachability/BDDReachabilityAnalysisFactory.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1999103"
},
{
"name": "HCL",
"bytes": "49266"
},
{
"name": "Java",
"bytes": "25862751"
},
{
"name": "Python",
"bytes": "11181"
},
{
"name": "Shell",
"bytes": "6502"
},
{
"name": "Starlark",
"bytes": "214539"
}
],
"symlink_target": ""
} |
@implementation TestNotificationView
- (UIRectEdge)edgeForSliderAccessory
{
return UIRectEdgeLeft;
}
@end
| {
"content_hash": "2ad019db74a9158b267ee8ba31656b36",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 36,
"avg_line_length": 14,
"alnum_prop": 0.7946428571428571,
"repo_name": "fousa/IIShortNotificationPresenter",
"id": "55e6bbcc9552e06963b6e2497a531c34613f8b09",
"size": "309",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "IIShortNotificationPresenter/TestNotificationView.m",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "85610"
},
{
"name": "Ruby",
"bytes": "737"
}
],
"symlink_target": ""
} |
package vals
// Iterator wraps the Iterate method.
type Iterator interface {
// Iterate calls the passed function with each value within the receiver.
// The iteration is aborted if the function returns false.
Iterate(func(v any) bool)
}
type cannotIterate struct{ kind string }
func (err cannotIterate) Error() string { return "cannot iterate " + err.kind }
// CanIterate returns whether the value can be iterated. If CanIterate(v) is
// true, calling Iterate(v, f) will not result in an error.
func CanIterate(v any) bool {
switch v.(type) {
case Iterator, string, List:
return true
}
return false
}
// Iterate iterates the supplied value, and calls the supplied function in each
// of its elements. The function can return false to break the iteration. It is
// implemented for the builtin type string, the List type, and types satisfying
// the Iterator interface. For these types, it always returns a nil error. For
// other types, it doesn't do anything and returns an error.
func Iterate(v any, f func(any) bool) error {
switch v := v.(type) {
case string:
for _, r := range v {
b := f(string(r))
if !b {
break
}
}
case List:
for it := v.Iterator(); it.HasElem(); it.Next() {
if !f(it.Elem()) {
break
}
}
case Iterator:
v.Iterate(f)
default:
return cannotIterate{Kind(v)}
}
return nil
}
// Collect collects all elements of an iterable value into a slice.
func Collect(it any) ([]any, error) {
var vs []any
if len := Len(it); len >= 0 {
vs = make([]any, 0, len)
}
err := Iterate(it, func(v any) bool {
vs = append(vs, v)
return true
})
return vs, err
}
| {
"content_hash": "4c798f44e12040d243760c8bde4e22c0",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 79,
"avg_line_length": 25.841269841269842,
"alnum_prop": 0.6799754299754299,
"repo_name": "elves/elvish",
"id": "ee5ac9f83e03bd593857e5b1252d7bfc0e057eff",
"size": "1628",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pkg/eval/vals/iterate.go",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "12245"
},
{
"name": "Dockerfile",
"bytes": "365"
},
{
"name": "Elvish",
"bytes": "154044"
},
{
"name": "Go",
"bytes": "1704527"
},
{
"name": "HTML",
"bytes": "21886"
},
{
"name": "JavaScript",
"bytes": "5695"
},
{
"name": "Makefile",
"bytes": "3187"
},
{
"name": "Python",
"bytes": "3045"
},
{
"name": "Shell",
"bytes": "6505"
}
],
"symlink_target": ""
} |
import mockups
from datetime import datetime
from django.contrib.auth.models import User, UNUSABLE_PASSWORD
from mockups import Mockup, Factory
from mockups import generators
class UserFactory(Factory):
username = generators.UUIDGenerator(max_length=30)
first_name = generators.LoremWordGenerator(1)
last_name = generators.LoremWordGenerator(1)
password = generators.StaticGenerator(UNUSABLE_PASSWORD)
is_active = generators.StaticGenerator(True)
# don't generate admin users
is_staff = generators.StaticGenerator(False)
is_superuser = generators.StaticGenerator(False)
date_joined = generators.DateTimeGenerator(max_date=datetime.now())
last_login = generators.DateTimeGenerator(max_date=datetime.now())
class UserMockup(Mockup):
'''
:class:`UserMockup` is automatically used by default to create new
``User`` instances. It uses the following values to assure that you can
use the generated instances without any modification:
* ``username`` only contains chars that are allowed by django's auth forms.
* ``email`` is unique.
* ``first_name`` and ``last_name`` are single, random words of the lorem
ipsum text.
* ``is_staff`` and ``is_superuser`` are always ``False``.
* ``is_active`` is always ``True``.
* ``date_joined`` and ``last_login`` are always in the past and it is
assured that ``date_joined`` will be lower than ``last_login``.
'''
# don't follow permissions and groups
follow_m2m = False
factory = UserFactory
def __init__(self, *args, **kwargs):
'''
By default the password is set to an unusable value, this makes it
impossible to login with the generated users. If you want to use for
example ``mockups.create_one('auth.User')`` in your unittests to have
a user instance which you can use to login with the testing client you
can provide a ``username`` and a ``password`` argument. Then you can do
something like::
mockups.create_one('auth.User', username='foo', password='bar`)
self.client.login(username='foo', password='bar')
'''
self.username = kwargs.pop('username', None)
self.password = kwargs.pop('password', None)
super(UserMockup, self).__init__(*args, **kwargs)
if self.username:
self.update_fieldname_generator(
username = generators.StaticGenerator(self.username)
)
def unique_email(self, model, instance):
if User.objects.filter(email=instance.email):
raise mockups.InvalidConstraint(('email',))
def prepare_class(self):
self.add_constraint(self.unique_email)
def post_process_instance(self, instance):
# make sure user's last login was not before he joined
if instance.last_login < instance.date_joined:
instance.last_login = instance.date_joined
if self.password:
instance.set_password(self.password)
return instance
mockups.register(User, UserMockup, fail_silently=True)
| {
"content_hash": "69275feeae0c368909d2d645413af9f2",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 79,
"avg_line_length": 40.103896103896105,
"alnum_prop": 0.6716321243523317,
"repo_name": "sorl/django-mockups",
"id": "3dc64993e062fc1ffa705163bbc407aa838989d8",
"size": "3112",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mockups/contrib/auth.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "88743"
}
],
"symlink_target": ""
} |
String SystemStats::getJUCEVersion()
{
// Some basic tests, to keep an eye on things and make sure these types work ok
// on all platforms. Let me know if any of these assertions fail on your system!
static_jassert (sizeof (pointer_sized_int) == sizeof (void*));
static_jassert (sizeof (int8) == 1);
static_jassert (sizeof (uint8) == 1);
static_jassert (sizeof (int16) == 2);
static_jassert (sizeof (uint16) == 2);
static_jassert (sizeof (int32) == 4);
static_jassert (sizeof (uint32) == 4);
static_jassert (sizeof (int64) == 8);
static_jassert (sizeof (uint64) == 8);
return "JUCE v" JUCE_STRINGIFY(JUCE_MAJOR_VERSION)
"." JUCE_STRINGIFY(JUCE_MINOR_VERSION)
"." JUCE_STRINGIFY(JUCE_BUILDNUMBER);
}
#if JUCE_ANDROID && ! defined (JUCE_DISABLE_JUCE_VERSION_PRINTING)
#define JUCE_DISABLE_JUCE_VERSION_PRINTING 1
#endif
#if JUCE_DEBUG && ! JUCE_DISABLE_JUCE_VERSION_PRINTING
struct JuceVersionPrinter
{
JuceVersionPrinter()
{
DBG (SystemStats::getJUCEVersion());
}
};
static JuceVersionPrinter juceVersionPrinter;
#endif
//==============================================================================
struct CPUInformation
{
CPUInformation() noexcept
: numCpus (0), hasMMX (false), hasSSE (false),
hasSSE2 (false), hasSSE3 (false), has3DNow (false),
hasSSSE3 (false), hasAVX (false)
{
initialise();
}
void initialise() noexcept;
int numCpus;
bool hasMMX, hasSSE, hasSSE2, hasSSE3, has3DNow, hasSSSE3, hasAVX;
};
static const CPUInformation& getCPUInformation() noexcept
{
static CPUInformation info;
return info;
}
int SystemStats::getNumCpus() noexcept { return getCPUInformation().numCpus; }
bool SystemStats::hasMMX() noexcept { return getCPUInformation().hasMMX; }
bool SystemStats::has3DNow() noexcept { return getCPUInformation().has3DNow; }
bool SystemStats::hasSSE() noexcept { return getCPUInformation().hasSSE; }
bool SystemStats::hasSSE2() noexcept { return getCPUInformation().hasSSE2; }
bool SystemStats::hasSSE3() noexcept { return getCPUInformation().hasSSE3; }
bool SystemStats::hasSSSE3() noexcept { return getCPUInformation().hasSSSE3; }
bool SystemStats::hasAVX() noexcept { return getCPUInformation().hasAVX; }
//==============================================================================
String SystemStats::getStackBacktrace()
{
String result;
#if JUCE_ANDROID || JUCE_MINGW
jassertfalse; // sorry, not implemented yet!
#elif JUCE_WINDOWS
HANDLE process = GetCurrentProcess();
SymInitialize (process, nullptr, TRUE);
void* stack[128];
int frames = (int) CaptureStackBackTrace (0, numElementsInArray (stack), stack, nullptr);
HeapBlock<SYMBOL_INFO> symbol;
symbol.calloc (sizeof (SYMBOL_INFO) + 256, 1);
symbol->MaxNameLen = 255;
symbol->SizeOfStruct = sizeof (SYMBOL_INFO);
for (int i = 0; i < frames; ++i)
{
DWORD64 displacement = 0;
if (SymFromAddr (process, (DWORD64) stack[i], &displacement, symbol))
{
result << i << ": ";
IMAGEHLP_MODULE64 moduleInfo;
zerostruct (moduleInfo);
moduleInfo.SizeOfStruct = sizeof (moduleInfo);
if (::SymGetModuleInfo64 (process, symbol->ModBase, &moduleInfo))
result << moduleInfo.ModuleName << ": ";
result << symbol->Name << " + 0x" << String::toHexString ((int64) displacement) << newLine;
}
}
#else
void* stack[128];
int frames = backtrace (stack, numElementsInArray (stack));
char** frameStrings = backtrace_symbols (stack, frames);
for (int i = 0; i < frames; ++i)
result << frameStrings[i] << newLine;
::free (frameStrings);
#endif
return result;
}
//==============================================================================
static SystemStats::CrashHandlerFunction globalCrashHandler = nullptr;
#if JUCE_WINDOWS
static LONG WINAPI handleCrash (LPEXCEPTION_POINTERS)
{
globalCrashHandler();
return EXCEPTION_EXECUTE_HANDLER;
}
#else
static void handleCrash (int)
{
globalCrashHandler();
kill (getpid(), SIGKILL);
}
int juce_siginterrupt (int sig, int flag);
#endif
void SystemStats::setApplicationCrashHandler (CrashHandlerFunction handler)
{
jassert (handler != nullptr); // This must be a valid function.
globalCrashHandler = handler;
#if JUCE_WINDOWS
SetUnhandledExceptionFilter (handleCrash);
#else
const int signals[] = { SIGFPE, SIGILL, SIGSEGV, SIGBUS, SIGABRT, SIGSYS };
for (int i = 0; i < numElementsInArray (signals); ++i)
{
::signal (signals[i], handleCrash);
juce_siginterrupt (signals[i], 1);
}
#endif
}
| {
"content_hash": "4f89e1694207f04b5510fb125ddc2c2a",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 103,
"avg_line_length": 30.35,
"alnum_prop": 0.6182042833607908,
"repo_name": "nepholi/ScoringTable",
"id": "fa3419dbc0a2436dc88e6d7e2af5f5547b1162c9",
"size": "6235",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "third_party/JUCE/modules/juce_core/system/juce_SystemStats.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "7930"
},
{
"name": "C++",
"bytes": "32865"
}
],
"symlink_target": ""
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.8.12"/>
<meta name="viewport" content="width=device-width, initial-scale=1"/>
<title>MysoreScript: main.cc File Reference</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="search/searchdata.js"></script>
<script type="text/javascript" src="search/search.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 56px;">
<td id="projectalign" style="padding-left: 0.5em;">
<div id="projectname">MysoreScript
</div>
</td>
</tr>
</tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.8.12 -->
<script type="text/javascript">
var searchBox = new SearchBox("searchBox", "search",false,'Search');
</script>
<script type="text/javascript" src="menudata.js"></script>
<script type="text/javascript" src="menu.js"></script>
<script type="text/javascript">
$(function() {
initMenu('',true,false,'search.php','Search');
$(document).ready(function() { init_search(); });
});
</script>
<div id="main-nav"></div>
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
onkeydown="return searchBox.OnSearchSelectKey(event)">
</div>
<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0"
name="MSearchResults" id="MSearchResults">
</iframe>
</div>
</div><!-- top -->
<div class="header">
<div class="summary">
<a href="#func-members">Functions</a> </div>
<div class="headertitle">
<div class="title">main.cc File Reference</div> </div>
</div><!--header-->
<div class="contents">
<div class="textblock"><code>#include <iostream></code><br />
<code>#include <ctype.h></code><br />
<code>#include <fcntl.h></code><br />
<code>#include <stdlib.h></code><br />
<code>#include <sys/resource.h></code><br />
<code>#include <time.h></code><br />
<code>#include <unistd.h></code><br />
<code>#include <gc.h></code><br />
<code>#include <readline.h></code><br />
<code>#include "<a class="el" href="parser_8hh_source.html">parser.hh</a>"</code><br />
<code>#include "<a class="el" href="interpreter_8hh_source.html">interpreter.hh</a>"</code><br />
</div><div class="textblock"><div class="dynheader">
Include dependency graph for main.cc:</div>
<div class="dyncontent">
<div class="center"><img src="main_8cc__incl.png" border="0" usemap="#main_8cc" alt=""/></div>
<map name="main_8cc" id="main_8cc">
<area shape="rect" id="node11" href="parser_8hh.html" title="parser.hh" alt="" coords="1030,80,1108,107"/>
<area shape="rect" id="node30" href="interpreter_8hh.html" title="interpreter.hh" alt="" coords="772,252,873,279"/>
<area shape="rect" id="node12" href="grammar_8hh.html" title="grammar.hh" alt="" coords="1828,155,1921,181"/>
<area shape="rect" id="node14" href="ast_8hh.html" title="ast.hh" alt="" coords="1255,155,1313,181"/>
<area shape="rect" id="node27" href="runtime_8hh.html" title="runtime.hh" alt="" coords="721,349,807,376"/>
</map>
</div>
</div>
<p><a href="main_8cc_source.html">Go to the source code of this file.</a></p>
<table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="func-members"></a>
Functions</h2></td></tr>
<tr class="memitem:af67fc157cc4c8f667fa54fce9aa5b81c"><td class="memItemLeft" align="right" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="main_8cc.html#af67fc157cc4c8f667fa54fce9aa5b81c">usage</a> (const char *cmd)</td></tr>
<tr class="memdesc:af67fc157cc4c8f667fa54fce9aa5b81c"><td class="mdescLeft"> </td><td class="mdescRight">Print the usage message. <a href="#af67fc157cc4c8f667fa54fce9aa5b81c">More...</a><br /></td></tr>
<tr class="separator:af67fc157cc4c8f667fa54fce9aa5b81c"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a3c04138a5bfe5d72780bb7e82a18e627"><td class="memItemLeft" align="right" valign="top">int </td><td class="memItemRight" valign="bottom"><a class="el" href="main_8cc.html#a3c04138a5bfe5d72780bb7e82a18e627">main</a> (int argc, char **argv)</td></tr>
<tr class="separator:a3c04138a5bfe5d72780bb7e82a18e627"><td class="memSeparator" colspan="2"> </td></tr>
</table>
<h2 class="groupheader">Function Documentation</h2>
<a id="a3c04138a5bfe5d72780bb7e82a18e627"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a3c04138a5bfe5d72780bb7e82a18e627">§ </a></span>main()</h2>
<div class="memitem">
<div class="memproto">
<table class="memname">
<tr>
<td class="memname">int main </td>
<td>(</td>
<td class="paramtype">int </td>
<td class="paramname"><em>argc</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">char ** </td>
<td class="paramname"><em>argv</em> </td>
</tr>
<tr>
<td></td>
<td>)</td>
<td></td><td></td>
</tr>
</table>
</div><div class="memdoc">
<p>Definition at line <a class="el" href="main_8cc_source.html#l00068">68</a> of file <a class="el" href="main_8cc_source.html">main.cc</a>.</p>
<div class="dynheader">
Here is the call graph for this function:</div>
<div class="dyncontent">
<div class="center"><img src="main_8cc_a3c04138a5bfe5d72780bb7e82a18e627_cgraph.png" border="0" usemap="#main_8cc_a3c04138a5bfe5d72780bb7e82a18e627_cgraph" alt=""/></div>
<map name="main_8cc_a3c04138a5bfe5d72780bb7e82a18e627_cgraph" id="main_8cc_a3c04138a5bfe5d72780bb7e82a18e627_cgraph">
<area shape="rect" id="node2" href="main_8cc.html#af67fc157cc4c8f667fa54fce9aa5b81c" title="Print the usage message. " alt="" coords="104,5,163,32"/>
</map>
</div>
</div>
</div>
<a id="af67fc157cc4c8f667fa54fce9aa5b81c"></a>
<h2 class="memtitle"><span class="permalink"><a href="#af67fc157cc4c8f667fa54fce9aa5b81c">§ </a></span>usage()</h2>
<div class="memitem">
<div class="memproto">
<table class="memname">
<tr>
<td class="memname">void usage </td>
<td>(</td>
<td class="paramtype">const char * </td>
<td class="paramname"><em>cmd</em></td><td>)</td>
<td></td>
</tr>
</table>
</div><div class="memdoc">
<p>Print the usage message. </p>
<p>Definition at line <a class="el" href="main_8cc_source.html#l00057">57</a> of file <a class="el" href="main_8cc_source.html">main.cc</a>.</p>
<div class="dynheader">
Here is the caller graph for this function:</div>
<div class="dyncontent">
<div class="center"><img src="main_8cc_af67fc157cc4c8f667fa54fce9aa5b81c_icgraph.png" border="0" usemap="#main_8cc_af67fc157cc4c8f667fa54fce9aa5b81c_icgraph" alt=""/></div>
<map name="main_8cc_af67fc157cc4c8f667fa54fce9aa5b81c_icgraph" id="main_8cc_af67fc157cc4c8f667fa54fce9aa5b81c_icgraph">
<area shape="rect" id="node2" href="main_8cc.html#a3c04138a5bfe5d72780bb7e82a18e627" title="main" alt="" coords="112,5,163,32"/>
</map>
</div>
</div>
</div>
</div><!-- contents -->
<!-- start footer part -->
<hr class="footer"/><address class="footer"><small>
Generated on Thu Jun 1 2017 18:33:55 for MysoreScript by  <a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/>
</a> 1.8.12
</small></address>
</body>
</html>
| {
"content_hash": "3840e62d4c2787c0ffd835666a7d987e",
"timestamp": "",
"source": "github",
"line_count": 178,
"max_line_length": 271,
"avg_line_length": 45.82022471910113,
"alnum_prop": 0.6705492888670918,
"repo_name": "CompilerTeaching/CompilerTeaching.github.io",
"id": "18bbd9c982bd62c8a451459b6f17354e07465f5b",
"size": "8156",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mysorescript/doxygen/main_8cc.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "118682"
},
{
"name": "HTML",
"bytes": "4461393"
},
{
"name": "JavaScript",
"bytes": "149964"
},
{
"name": "Ruby",
"bytes": "50"
},
{
"name": "TeX",
"bytes": "265"
}
],
"symlink_target": ""
} |
Contributing to Girder
======================
There are many ways to contribute to Girder, with varying levels of effort. Do try to
look through the documentation first if something is unclear, and let us know how we can
do better.
- Ask a question on the `Girder users email list <http://public.kitware.com/mailman/listinfo/girder-users>`_
- Ask a question in the `Gitter Forum <https://gitter.im/girder/girder>`_
- Submit a feature request or bug, or add to the discussion on the `Girder issue tracker <https://github.com/girder/girder/issues>`_
- Submit a `Pull Request <https://github.com/girder/girder/pulls>`_ to improve Girder or its documentation
We encourage a range of contributions, from patches that include passing tests and
documentation, all the way down to half-baked ideas that launch discussions.
The PR Process, CircleCI, and Related Gotchas
---------------------------------------------
How to submit a PR
^^^^^^^^^^^^^^^^^^
If you are new to Girder development and you don't have push access to the Girder
repository, here are the steps:
1. `Fork and clone <https://help.github.com/articles/fork-a-repo/>`_ the repository.
2. Create a branch.
3. `Push <https://help.github.com/articles/pushing-to-a-remote/>`_ the branch to your GitHub fork.
4. Create a `Pull Request <https://github.com/girder/girder/pulls>`_.
This corresponds to the ``Fork & Pull Model`` mentioned in the
`GitHub flow <https://guides.github.com/introduction/flow/index.html>`_ guides.
If you have push access to Girder repository, you could simply push your branch
into the main repository and create a `Pull Request <https://github.com/girder/girder/pulls>`_. This
corresponds to the ``Shared Repository Model`` and will facilitate other developers to checkout your
topic without having to `configure a remote <https://help.github.com/articles/configuring-a-remote-for-a-fork/>`_.
It will also simplify the workflow when you are *co-developing* a branch.
When submitting a PR, make sure to add a ``Cc: @girder/developers`` comment to notify Girder
developers of your awesome contributions. Based on the
comments posted by the reviewers, you may have to revisit your patches.
Automatic testing of pull requests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When you submit a PR to the Girder repo, CircleCI will run the build and test suite on the
head of the branch. If you add new commits onto the branch, those will also automatically
be run through the CI process. The status of the CI process (passing, failing, or in progress) will
be displayed directly in the PR page in GitHub.
The CircleCI build will run according to the `circle.yml file <https://github.com/girder/girder/blob/master/circle.yml>`_,
which is useful as an example for how to set up your own environment for testing.
The tests that run in CircleCI are harnessed with CTest, which submits the results of its
automated testing to `Girder's CDash dashboard <http://my.cdash.org/index.php?project=girder>`_
where the test and coverage results can be easily visualized and explored.
Confusing failing test message "AttributeError: 'module' object has no attribute 'x_test'"
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This is also a gotcha for your local testing environment. If a new dependency is
introduced during development, but is not in the test environment, usually because the
dependency is not included in a ``requirements.txt`` or ``requirements-dev.txt`` file, or
because those requirements are not installed via ``pip``, a test can fail that attempts to
import that dependency and can print a confusing message in the test logs like
"AttributeError: 'module' object has no attribute 'x_test'".
As an example, the HDFS plugin has a dependency on the Python module ``snakebite``, specified in the
`HDFS plugin requirements.txt file <https://github.com/girder/girder/blob/master/plugins/hdfs_assetstore/requirements.txt>`_.
If this dependency was not included in the requirements file, or if that requirements file
was not included in the `circle.yml file <https://github.com/girder/girder/blob/master/circle.yml>`_
(or that requirements file was not ``pip`` installed in a local test environment), when the test defined in
`the assetstore_test.py file <https://github.com/girder/girder/blob/master/plugins/hdfs_assetstore/plugin_tests/assetstore_test.py#L27-L28>`_
is run, the ``snakebite`` module will not be found, but the exception will be swallowed by
the testing environment and instead the ``assetstore_test`` module will be considered
invalid, resulting in the confusing error message::
AttributeError: 'module' object has no attribute 'assetstore_test'
but you won't be confused now, will you?
How to integrate a PR
^^^^^^^^^^^^^^^^^^^^^
Getting your contributions integrated is relatively straightforward, here is the checklist:
- All tests pass
- Any significant changes are added to the ``CHANGELOG.rst`` with human-readable and understandable
text (i.e. not a commit message). Text should be placed in the "Unreleased" section, and grouped
into the appropriate sub-section of:
- Bug fixes
- Security fixes
- Added features
- Changes
- Deprecations
- Removals
- Consensus is reached. This requires that a reviewer adds an "approved" review via GitHub with no
changes requested, and a reasonable amount of time passed without anyone objecting.
Next, there are two scenarios:
- You do NOT have push access: A Girder core developer will integrate your PR.
- You have push access: Simply click on the "Merge pull request" button.
Then, click on the "Delete branch" button that appears afterward.
| {
"content_hash": "6807546034279759c213ee08bd4f23d3",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 141,
"avg_line_length": 52.90654205607477,
"alnum_prop": 0.7433315668609787,
"repo_name": "adsorensen/girder",
"id": "06b5018035ab263a7a277f2c1c9b0f729049818e",
"size": "5661",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CONTRIBUTING.rst",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CMake",
"bytes": "46894"
},
{
"name": "CSS",
"bytes": "53110"
},
{
"name": "HTML",
"bytes": "151777"
},
{
"name": "JavaScript",
"bytes": "1215426"
},
{
"name": "Mako",
"bytes": "8228"
},
{
"name": "Python",
"bytes": "2119660"
},
{
"name": "Roff",
"bytes": "17"
},
{
"name": "Ruby",
"bytes": "10593"
},
{
"name": "Shell",
"bytes": "9063"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
Index Fungorum
#### Published in
null
#### Original name
Uromyces viennot-bourginii J. Anikster & I. Wahl
### Remarks
null | {
"content_hash": "c10f3bf050fff5156135fd8ca4c87c56",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 48,
"avg_line_length": 11.76923076923077,
"alnum_prop": 0.7058823529411765,
"repo_name": "mdoering/backbone",
"id": "41cea1d44e748e8305ab3f7e1a5eb6a323c945e1",
"size": "225",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Fungi/Basidiomycota/Pucciniomycetes/Pucciniales/Pucciniaceae/Uromyces/Uromyces viennot-bourginii/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
namespace content {
struct PresentationRequest;
class PresentationScreenAvailabilityListener;
using PresentationConnectionCallback =
base::OnceCallback<void(blink::mojom::PresentationConnectionResultPtr)>;
using PresentationConnectionErrorCallback =
base::OnceCallback<void(const blink::mojom::PresentationError&)>;
using DefaultPresentationConnectionCallback = base::RepeatingCallback<void(
blink::mojom::PresentationConnectionResultPtr)>;
struct PresentationConnectionStateChangeInfo {
explicit PresentationConnectionStateChangeInfo(
blink::mojom::PresentationConnectionState state)
: state(state),
close_reason(
blink::mojom::PresentationConnectionCloseReason::CONNECTION_ERROR) {
}
~PresentationConnectionStateChangeInfo() = default;
blink::mojom::PresentationConnectionState state;
// |close_reason| and |messsage| are only used for state change to CLOSED.
blink::mojom::PresentationConnectionCloseReason close_reason;
std::string message;
};
using PresentationConnectionStateChangedCallback =
base::RepeatingCallback<void(const PresentationConnectionStateChangeInfo&)>;
using ReceiverConnectionAvailableCallback = base::RepeatingCallback<void(
blink::mojom::PresentationInfoPtr,
mojo::PendingRemote<blink::mojom::PresentationConnection>,
mojo::PendingReceiver<blink::mojom::PresentationConnection>)>;
// Base class for ControllerPresentationServiceDelegate and
// ReceiverPresentationServiceDelegate.
class CONTENT_EXPORT PresentationServiceDelegate {
public:
// Observer interface to listen for changes to PresentationServiceDelegate.
class CONTENT_EXPORT Observer {
public:
// Called when the PresentationServiceDelegate is being destroyed.
virtual void OnDelegateDestroyed() = 0;
protected:
virtual ~Observer() {}
};
virtual ~PresentationServiceDelegate() {}
// Registers an observer associated with frame with |render_process_id|
// and |render_frame_id| with this class to listen for updates.
// This class does not own the observer.
// It is an error to add an observer if there is already an observer for that
// frame.
virtual void AddObserver(int render_process_id,
int render_frame_id,
Observer* observer) = 0;
// Unregisters the observer associated with the frame with |render_process_id|
// and |render_frame_id|.
// The observer will no longer receive updates.
virtual void RemoveObserver(int render_process_id, int render_frame_id) = 0;
// Resets the presentation state for the frame given by |render_process_id|
// and |render_frame_id|.
// This unregisters all screen availability associated with the given frame,
// and clears the default presentation URL for the frame.
virtual void Reset(int render_process_id, int render_frame_id) = 0;
};
// An interface implemented by embedders to handle Presentation API calls
// forwarded from PresentationServiceImpl.
class CONTENT_EXPORT ControllerPresentationServiceDelegate
: public PresentationServiceDelegate {
public:
using SendMessageCallback = base::OnceCallback<void(bool)>;
// Registers |listener| to continuously listen for
// availability updates for a presentation URL, originated from the frame
// given by |render_process_id| and |render_frame_id|.
// This class does not own |listener|.
// Returns true on success.
// This call will return false if a listener with the same presentation URL
// from the same frame is already registered.
virtual bool AddScreenAvailabilityListener(
int render_process_id,
int render_frame_id,
PresentationScreenAvailabilityListener* listener) = 0;
// Unregisters |listener| originated from the frame given by
// |render_process_id| and |render_frame_id| from this class. The listener
// will no longer receive availability updates.
virtual void RemoveScreenAvailabilityListener(
int render_process_id,
int render_frame_id,
PresentationScreenAvailabilityListener* listener) = 0;
// Sets the default presentation URLs represented by |request|. When the
// default presentation is started on this frame, |callback| will be invoked
// with the corresponding blink::mojom::PresentationInfo object.
// If |request.presentation_urls| is empty, the default presentation URLs will
// be cleared and the previously registered callback (if any) will be removed.
virtual void SetDefaultPresentationUrls(
const content::PresentationRequest& request,
DefaultPresentationConnectionCallback callback) = 0;
// Starts a new presentation.
// |request.presentation_urls| contains a list of possible URLs for the
// presentation. Typically, the embedder will allow the user to select a
// screen to show one of the URLs.
// |request|: The request to start a presentation.
// |success_cb|: Invoked with presentation info, if presentation started
// successfully.
// |error_cb|: Invoked with error reason, if presentation did not
// start.
virtual void StartPresentation(
const content::PresentationRequest& request,
PresentationConnectionCallback success_cb,
PresentationConnectionErrorCallback error_cb) = 0;
// Reconnects to an existing presentation. Unlike StartPresentation(), this
// does not bring a screen list UI.
// |request|: The request to reconnect to a presentation.
// |presentation_id|: The ID of the presentation to reconnect.
// |success_cb|: Invoked with presentation info, if presentation reconnected
// successfully.
// |error_cb|: Invoked with error reason, if reconnection failed.
virtual void ReconnectPresentation(
const content::PresentationRequest& request,
const std::string& presentation_id,
PresentationConnectionCallback success_cb,
PresentationConnectionErrorCallback error_cb) = 0;
// Closes an existing presentation connection.
// |render_process_id|, |render_frame_id|: ID for originating frame.
// |presentation_id|: The ID of the presentation to close.
virtual void CloseConnection(int render_process_id,
int render_frame_id,
const std::string& presentation_id) = 0;
// Terminates an existing presentation.
// |render_process_id|, |render_frame_id|: ID for originating frame.
// |presentation_id|: The ID of the presentation to terminate.
virtual void Terminate(int render_process_id,
int render_frame_id,
const std::string& presentation_id) = 0;
// Gets a FlingingController for a given presentation ID.
// |render_process_id|, |render_frame_id|: ID of originating frame.
// |presentation_id|: The ID of the presentation for which we want a
// Controller.
virtual std::unique_ptr<media::FlingingController> GetFlingingController(
int render_process_id,
int render_frame_id,
const std::string& presentation_id) = 0;
// Continuously listen for state changes for a PresentationConnection in a
// frame.
// |render_process_id|, |render_frame_id|: ID of frame.
// |connection|: PresentationConnection to listen for state changes.
// |state_changed_cb|: Invoked with the PresentationConnection and its new
// state whenever there is a state change.
virtual void ListenForConnectionStateChange(
int render_process_id,
int render_frame_id,
const blink::mojom::PresentationInfo& connection,
const PresentationConnectionStateChangedCallback& state_changed_cb) = 0;
};
// An interface implemented by embedders to handle
// PresentationService calls from a presentation receiver.
class CONTENT_EXPORT ReceiverPresentationServiceDelegate
: public PresentationServiceDelegate {
public:
// Registers a callback from the embedder when an offscreeen presentation has
// been successfully started.
// |receiver_available_callback|: Invoked when successfully starting a
// local presentation.
virtual void RegisterReceiverConnectionAvailableCallback(
const content::ReceiverConnectionAvailableCallback&
receiver_available_callback) = 0;
};
} // namespace content
#endif // CONTENT_PUBLIC_BROWSER_PRESENTATION_SERVICE_DELEGATE_H_
| {
"content_hash": "c815ace4d92eb1df1cf59bfdd8d4b500",
"timestamp": "",
"source": "github",
"line_count": 189,
"max_line_length": 80,
"avg_line_length": 43.61904761904762,
"alnum_prop": 0.7410237748665697,
"repo_name": "scheib/chromium",
"id": "a7b337bb1dd1f729ed9fc6d3c70e87441376886e",
"size": "8908",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "content/public/browser/presentation_service_delegate.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
Pushes JSON data into Loggly.com from the browser. Tries to have readable code, while keeping the code length small.
*Beware* window.screen, window.navigator, window.location, and window.performance.timing are NOT enumerable and values need to be manually copied if you want to pass them into `TinyLoggly.log()`.
You must change the variable LOGGLY_KEY to your own Loggly key for the code to work.
Please don't report bugs, features, or ask me anything. I didn't actually use this code because of data privacy issues.
Keys are modified:
- appends $n to keys for number values to prevent nasty loggly error "Removed parsed fields because of mapping conflict while indexing (i.e originally sent as one type and later sent as new type)"
- Loggly itself limits keys to 64 characters and it also replaces any spaces or dots (within keys) by underscores.
Some values are modified or removed:
- nulls are NOT logged
- Arrays are NOT logged (there is some commented out code that works though).
- Empty objects are NOT logged
- Objects that only contain empty objects or nulls are NOT logged.
- Booleans true and false are changed to 'true' and 'false' strings.
- Number values - see above comment on appending $n to the key
- NaN/Inifinity/-Infinity are converted to strings
Loggly limits keys to 100 or so (I think across all JSON input) and just won't parse anything after that. So be careful to limit the number of variables in your JSON.
This code improves the randomisation of sessionId's (compared to the default loggly JavaScript code) by making them properly random in modern browsers.
Maybe still get Mapping Conflict if mix Object/Array/String for the same key - fix would be to append other unique identifiers to the key for each of the accepted types (same as we do for Number).
Relevant loggly documentation is at:
- https://www.loggly.com/docs/http-endpoint/
- https://www.loggly.com/docs/automated-parsing/
| {
"content_hash": "aff9732e9dc9127f08591d840aad887b",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 197,
"avg_line_length": 58.72727272727273,
"alnum_prop": 0.7781217750257998,
"repo_name": "MorrisJohns/TinyLoggly",
"id": "73908644a1857de3bcc1cb71534312b8ef1426ea",
"size": "1952",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "HTML",
"bytes": "298"
},
{
"name": "JavaScript",
"bytes": "7262"
}
],
"symlink_target": ""
} |
"""
This module provies an interface to the Elastic MapReduce (EMR)
service from AWS.
"""
from boto.emr.connection import EmrConnection
from boto.emr.step import Step, StreamingStep, JarStep
from boto.emr.bootstrap_action import BootstrapAction
from boto.regioninfo import RegionInfo, get_regions
from boto.regioninfo import connect
def regions():
"""
Get all available regions for the Amazon Elastic MapReduce service.
:rtype: list
:return: A list of :class:`boto.regioninfo.RegionInfo`
"""
return get_regions('elasticmapreduce', connection_cls=EmrConnection)
def connect_to_region(region_name, **kw_params):
return connect('elasticmapreduce', region_name,
connection_cls=EmrConnection, **kw_params)
| {
"content_hash": "a0378f24b974b293ff6239860b31341b",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 72,
"avg_line_length": 31.375,
"alnum_prop": 0.7410358565737052,
"repo_name": "xq262144/hue",
"id": "dfa53c7337195e9d7ff07c5124d6130b28aa4cf9",
"size": "1963",
"binary": false,
"copies": "32",
"ref": "refs/heads/master",
"path": "desktop/core/ext-py/boto-2.46.1/boto/emr/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "3096"
},
{
"name": "Batchfile",
"bytes": "41710"
},
{
"name": "C",
"bytes": "2692409"
},
{
"name": "C++",
"bytes": "199897"
},
{
"name": "CSS",
"bytes": "521820"
},
{
"name": "Emacs Lisp",
"bytes": "11704"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Go",
"bytes": "6671"
},
{
"name": "Groff",
"bytes": "16669"
},
{
"name": "HTML",
"bytes": "24188238"
},
{
"name": "Java",
"bytes": "575404"
},
{
"name": "JavaScript",
"bytes": "4987047"
},
{
"name": "M4",
"bytes": "1377"
},
{
"name": "Makefile",
"bytes": "144341"
},
{
"name": "Mako",
"bytes": "3052598"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "PLSQL",
"bytes": "13774"
},
{
"name": "PLpgSQL",
"bytes": "3646"
},
{
"name": "Perl",
"bytes": "3499"
},
{
"name": "PigLatin",
"bytes": "328"
},
{
"name": "Python",
"bytes": "44291483"
},
{
"name": "Shell",
"bytes": "44147"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "Thrift",
"bytes": "278712"
},
{
"name": "Visual Basic",
"bytes": "2884"
},
{
"name": "XSLT",
"bytes": "518588"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<plugin id="web-more" name="diplay information from index-more plgin"
version="1.0.0" provider-name="apache.org">
<runtime>
<library name="web-more.jar">
<export name="*" />
</library>
</runtime>
<requires>
<import plugin="webui-extensionpoints" />
</requires>
<extension id="org.apache.nutch.webapp.extension.UIExtensionPoint"
name="Nutch ui extension point"
point="org.apache.nutch.webapp.extension.UIExtensionPoint">
<implementation id="web-more"
class="org.apache.nutch.webapp.extension.UIExtension.VoidImplementation" />
</extension>
</plugin>
| {
"content_hash": "02cec806e2e38dcc5316a1c165efe2ec",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 81,
"avg_line_length": 37.63157894736842,
"alnum_prop": 0.7328671328671329,
"repo_name": "toddlipcon/nutch",
"id": "4ec14b16498ac553af2db0b1c06a185ec1d269cf",
"size": "1430",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "contrib/web2/plugins/web-more/plugin.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "2454126"
},
{
"name": "JavaScript",
"bytes": "16632"
}
],
"symlink_target": ""
} |
#include <wangle/acceptor/ConnectionManager.h>
#include <glog/logging.h>
#include <folly/io/async/EventBase.h>
using folly::HHWheelTimer;
using std::chrono::milliseconds;
namespace wangle {
ConnectionManager::ConnectionManager(folly::EventBase* eventBase,
milliseconds timeout, Callback* callback)
: connTimeouts_(new HHWheelTimer(eventBase)),
callback_(callback),
eventBase_(eventBase),
drainIterator_(conns_.end()),
idleIterator_(conns_.end()),
idleLoopCallback_(this),
timeout_(timeout),
idleConnEarlyDropThreshold_(timeout_ / 2) {
}
void
ConnectionManager::addConnection(ManagedConnection* connection,
bool timeout) {
CHECK_NOTNULL(connection);
ConnectionManager* oldMgr = connection->getConnectionManager();
if (oldMgr != this) {
if (oldMgr) {
// 'connection' was being previously managed in a different thread.
// We must remove it from that manager before adding it to this one.
oldMgr->removeConnection(connection);
}
// put the connection into busy part first. This should not matter at all
// because the last callback for an idle connection must be onDeactivated(),
// so the connection must be moved to idle part then.
conns_.push_front(*connection);
connection->setConnectionManager(this);
if (callback_) {
callback_->onConnectionAdded(*this);
}
}
if (timeout) {
scheduleTimeout(connection, timeout_);
}
if (shutdownState_ >= ShutdownState::CLOSE_WHEN_IDLE) {
// shouldn't really hapen
connection->closeWhenIdle();
} else if (shutdownState_ >= ShutdownState::NOTIFY_PENDING_SHUTDOWN) {
connection->notifyPendingShutdown();
}
}
void
ConnectionManager::scheduleTimeout(ManagedConnection* const connection,
std::chrono::milliseconds timeout) {
if (timeout > std::chrono::milliseconds(0)) {
connTimeouts_->scheduleTimeout(connection, timeout);
}
}
void ConnectionManager::scheduleTimeout(
folly::HHWheelTimer::Callback* callback,
std::chrono::milliseconds timeout) {
connTimeouts_->scheduleTimeout(callback, timeout);
}
void
ConnectionManager::removeConnection(ManagedConnection* connection) {
if (connection->getConnectionManager() == this) {
connection->cancelTimeout();
connection->setConnectionManager(nullptr);
// Un-link the connection from our list, being careful to keep the iterator
// that we're using for idle shedding valid
auto it = conns_.iterator_to(*connection);
if (it == drainIterator_) {
++drainIterator_;
}
if (it == idleIterator_) {
++idleIterator_;
}
conns_.erase(it);
if (callback_) {
callback_->onConnectionRemoved(*this);
if (getNumConnections() == 0) {
callback_->onEmpty(*this);
}
}
}
}
void
ConnectionManager::initiateGracefulShutdown(
std::chrono::milliseconds idleGrace) {
VLOG(3) << this << " initiateGracefulShutdown with nconns=" << conns_.size();
if (shutdownState_ != ShutdownState::NONE) {
VLOG(3) << "Ignoring redundant call to initiateGracefulShutdown";
return;
}
if (idleGrace.count() > 0) {
shutdownState_ = ShutdownState::NOTIFY_PENDING_SHUTDOWN;
idleLoopCallback_.scheduleTimeout(idleGrace);
VLOG(3) << "Scheduling idle grace period of " << idleGrace.count() << "ms";
} else {
shutdownState_ = ShutdownState::CLOSE_WHEN_IDLE;
VLOG(3) << "proceeding directly to closing idle connections";
}
drainIterator_ = conns_.begin();
drainAllConnections();
}
void
ConnectionManager::drainAllConnections() {
DestructorGuard g(this);
size_t numCleared = 0;
size_t numKept = 0;
auto it = drainIterator_;
CHECK(shutdownState_ == ShutdownState::NOTIFY_PENDING_SHUTDOWN ||
shutdownState_ == ShutdownState::CLOSE_WHEN_IDLE);
while (it != conns_.end() && (numKept + numCleared) < 64) {
ManagedConnection& conn = *it++;
if (shutdownState_ == ShutdownState::NOTIFY_PENDING_SHUTDOWN) {
conn.notifyPendingShutdown();
numKept++;
} else { // CLOSE_WHEN_IDLE
// Second time around: close idle sessions. If they aren't idle yet,
// have them close when they are idle
if (conn.isBusy()) {
numKept++;
} else {
numCleared++;
}
conn.closeWhenIdle();
}
}
if (shutdownState_ == ShutdownState::CLOSE_WHEN_IDLE) {
VLOG(2) << "Idle connections cleared: " << numCleared <<
", busy conns kept: " << numKept;
} else {
VLOG(3) << this << " notified n=" << numKept;
}
drainIterator_ = it;
if (it != conns_.end()) {
eventBase_->runInLoop(&idleLoopCallback_);
} else {
if (shutdownState_ == ShutdownState::NOTIFY_PENDING_SHUTDOWN) {
VLOG(3) << this << " finished notify_pending_shutdown";
shutdownState_ = ShutdownState::NOTIFY_PENDING_SHUTDOWN_COMPLETE;
if (!idleLoopCallback_.isScheduled()) {
// The idle grace timer already fired, start over immediately
shutdownState_ = ShutdownState::CLOSE_WHEN_IDLE;
drainIterator_ = conns_.begin();
eventBase_->runInLoop(&idleLoopCallback_);
}
} else {
shutdownState_ = ShutdownState::CLOSE_WHEN_IDLE_COMPLETE;
}
}
}
void
ConnectionManager::idleGracefulTimeoutExpired() {
VLOG(2) << this << " idleGracefulTimeoutExpired";
if (shutdownState_ == ShutdownState::NOTIFY_PENDING_SHUTDOWN_COMPLETE) {
shutdownState_ = ShutdownState::CLOSE_WHEN_IDLE;
drainIterator_ = conns_.begin();
drainAllConnections();
} else {
VLOG(4) << this << " idleGracefulTimeoutExpired during "
"NOTIFY_PENDING_SHUTDOWN, ignoring";
}
}
void
ConnectionManager::dropAllConnections() {
DestructorGuard g(this);
shutdownState_ = ShutdownState::CLOSE_WHEN_IDLE_COMPLETE;
// Iterate through our connection list, and drop each connection.
VLOG(3) << "connections to drop: " << conns_.size();
idleLoopCallback_.cancelTimeout();
unsigned i = 0;
while (!conns_.empty()) {
ManagedConnection& conn = conns_.front();
conns_.pop_front();
conn.cancelTimeout();
conn.setConnectionManager(nullptr);
// For debugging purposes, dump information about the first few
// connections.
static const unsigned MAX_CONNS_TO_DUMP = 2;
if (++i <= MAX_CONNS_TO_DUMP) {
conn.dumpConnectionState(3);
}
conn.dropConnection();
}
drainIterator_ = conns_.end();
idleIterator_ = conns_.end();
idleLoopCallback_.cancelLoopCallback();
if (callback_) {
callback_->onEmpty(*this);
}
}
void
ConnectionManager::onActivated(ManagedConnection& conn) {
auto it = conns_.iterator_to(conn);
if (it == idleIterator_) {
idleIterator_++;
}
conns_.erase(it);
conns_.push_front(conn);
}
void
ConnectionManager::onDeactivated(ManagedConnection& conn) {
auto it = conns_.iterator_to(conn);
bool moveDrainIter = false;
if (it == drainIterator_) {
drainIterator_++;
moveDrainIter = true;
}
conns_.erase(it);
conns_.push_back(conn);
if (idleIterator_ == conns_.end()) {
idleIterator_--;
}
if (moveDrainIter && drainIterator_ == conns_.end()) {
drainIterator_--;
}
}
size_t
ConnectionManager::dropIdleConnections(size_t num) {
VLOG(4) << "attempt to drop " << num << " idle connections";
if (idleConnEarlyDropThreshold_ >= timeout_) {
return 0;
}
size_t count = 0;
while(count < num) {
auto it = idleIterator_;
if (it == conns_.end()) {
return count; // no more idle session
}
auto idleTime = it->getIdleTime();
if (idleTime == std::chrono::milliseconds(0) ||
idleTime <= idleConnEarlyDropThreshold_) {
VLOG(4) << "conn's idletime: " << idleTime.count()
<< ", earlyDropThreshold: " << idleConnEarlyDropThreshold_.count()
<< ", attempt to drop " << count << "/" << num;
return count; // idleTime cannot be further reduced
}
ManagedConnection& conn = *it;
idleIterator_++;
conn.timeoutExpired();
count++;
}
return count;
}
} // wangle
| {
"content_hash": "171082e3c60041f8c296453ab47c983e",
"timestamp": "",
"source": "github",
"line_count": 274,
"max_line_length": 80,
"avg_line_length": 29.156934306569344,
"alnum_prop": 0.660282888972337,
"repo_name": "jamperry/wangle",
"id": "5648b879f6825a33a5f4ed1f818aeac7e6398c92",
"size": "8296",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wangle/acceptor/ConnectionManager.cpp",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "2575"
},
{
"name": "C++",
"bytes": "676896"
},
{
"name": "CMake",
"bytes": "6375"
},
{
"name": "Objective-C",
"bytes": "260"
}
],
"symlink_target": ""
} |
code: true
type: page
title: refreshToken
description: Refresh an authentication token
---
# refreshToken
<SinceBadge version="6.1.0" />
Refreshes a valid, non-expired authentication token.
If this action is successful, then the [jwt](/sdk/js/7/core-classes/kuzzle/properties) property of this class instance is set to the new authentication token.
All further requests emitted by this SDK instance will be on behalf of the authenticated user, until either the authenticated token expires, the [logout](/sdk/js/7/controllers/auth/logout) action is called, or the `jwt` property is manually set to another value.
## Arguments
```js
refreshToken ([options])
```
<br/>
| Arguments | Type | Description |
| --------- | ----------------- | ------------- |
| `options` | <pre>object</pre> | Query options |
### options
Additional query options
| Property | Type<br/>(default) | Description |
| ----------- | ------------------------------- | --------------------------------------------------------------------------------------------------------------------- |
| `expiresIn` | <pre>string</pre> | Expiration time in [ms library](https://www.npmjs.com/package/ms) format. (e.g. `2h`) |
| `queuable` | <pre>boolean</pre><br/>(`true`) | If true, queues the request during downtime, until connected to Kuzzle again |
| [`timeout`](/sdk/7/core-classes/kuzzle/query#timeout) | <pre>number</pre><br/>(`-1`) | Time (in ms) during which a request will still be waited to be resolved. Set it `-1` if you want to wait indefinitely |
### expiresIn
The default value for the `expiresIn` option is defined at server level, in Kuzzle's [configuration file](/core/2/guides/advanced/configuration).
## Resolves
The `refreshToken` action resolves to a token object with the following properties:
| Property | Type | Description |
| ----------- | ----------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
| `_id` | <pre>string</pre> | User unique identifier ([kuid](/core/2/guides/main-concepts/authentication#kuzzle-user-identifier-kuid)) |
| `expiresAt` | <pre>number</pre> | Expiration timestamp in Epoch-millis format (UTC) |
| `jwt` | <pre>string</pre> | Authentication token (returned only if the option [cookieAuth](/sdk/js/7/core-classes/kuzzle/constructor) is not enabled in the SDK, otherwise stored in an http cookie) |
| `ttl` | <pre>number</pre> | Time to live of the authentication token, in milliseconds |
## Usage
<<< ./snippets/refreshToken.js
| {
"content_hash": "32ff8d7c60a447e0f16508408bdb6496",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 262,
"avg_line_length": 57,
"alnum_prop": 0.47580157289776165,
"repo_name": "kuzzleio/sdk-javascript",
"id": "a74718431f0d3958aaf34a0afb83cea43819b6b8",
"size": "3310",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doc/7/controllers/auth/refresh-token/index.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "1048"
},
{
"name": "JavaScript",
"bytes": "468625"
},
{
"name": "Shell",
"bytes": "2025"
},
{
"name": "TypeScript",
"bytes": "252195"
}
],
"symlink_target": ""
} |
class Command implements ICommand, IConfigureCommand {
public name: string;
private allowedMarks: Array<string> = new Array<string>();
private leftMarks: Array<string> = new Array<string>();
private renderers: Array<{id: Constants.OutputTypes, renderer: IRenderer}> =
new Array<{id: Constants.OutputTypes, renderer: IRenderer}>();
public configureGutterMark(mark: string, parameters?: Object): IConfigureCommand {
if (InstanceHelper.IsNullOrUndefined(mark) ||
mark === String()) throw new Error('You are required to specify a mark');
this.allowedMarks.push(mark);
return this;
}
public configureLeftMark(mark: string, parameters?: Object): IConfigureCommand {
if (InstanceHelper.IsNullOrUndefined(mark) ||
mark === String()) throw new Error('You are required to specify a mark');
if (/^\s+$/.test(mark))
throw new Error('Whitespaces are not allowed for leftMarks');
this.allowedMarks.push(mark);
this.leftMarks.push(mark);
return this;
}
public configureQuickMark(mark: string, parameters?: Object): IConfigureCommand {
if (InstanceHelper.IsNullOrUndefined(mark) ||
mark === String()) throw new Error('You are required to specify a mark');
if (/^\s+$/.test(mark))
throw new Error('Whitespaces are not allowed for quickMarks');
this.allowedMarks.push(mark);
return this;
}
public configureRestrictions(children?: string[], parents?: string[]): IConfigureCommand {
return this;
}
public allowsMark(mark: string): boolean {
if (InstanceHelper.IsNullOrUndefined(mark) ||
mark === String()) return false;
return _.includes(this.allowedMarks, mark);
}
public allowsLeftMark(mark: string): boolean {
if (InstanceHelper.IsNullOrUndefined(mark) ||
mark === String()) return false;
if (/^\s+$/.test(mark))
throw new Error('Whitespaces are not allowed for leftMarks');
return _.includes(this.leftMarks, mark);
}
public getRenderer(rendererType: Constants.OutputTypes): IRenderer {
let rendererPointer = _(this.renderers).find(x => x.id === rendererType);
if (InstanceHelper.IsNullOrUndefined(rendererPointer))
throw new ReferenceError(`You should configure a renderer for '${this.name}'` +
` before rendering`);
return rendererPointer.renderer;
}
public configureRenderer(rendererType: Constants.OutputTypes, factory: () => IRenderer): IConfigureCommand {
this.renderers.push({
id: rendererType,
renderer: factory()
});
return this;
}
} | {
"content_hash": "8769b1ac4d7ff26245a6c708f5ffed51",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 112,
"avg_line_length": 37.93150684931507,
"alnum_prop": 0.6359696641386782,
"repo_name": "Marvin-Brouwer/XMD",
"id": "1c8bf15a7bf35f31f81ad7e933fc69144462017f",
"size": "3086",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Commands/Command.ts",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2361"
},
{
"name": "HTML",
"bytes": "3244"
},
{
"name": "TypeScript",
"bytes": "27082"
}
],
"symlink_target": ""
} |
import Vue from 'vue'
import VueLazyload from 'vue-lazyload'
import loading from 'assets/lazy-loading.png'
Vue.use(VueLazyload, {loading})
| {
"content_hash": "88017a6815787f6339e203f65d67d87b",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 45,
"avg_line_length": 23.5,
"alnum_prop": 0.7730496453900709,
"repo_name": "JounQin/MIC",
"id": "09284131a5ecfdc5064805d0bf506be9accc1638",
"size": "141",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/plugins/lazy.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "14310"
},
{
"name": "HTML",
"bytes": "1536"
},
{
"name": "JavaScript",
"bytes": "49511"
},
{
"name": "Shell",
"bytes": "111"
},
{
"name": "Vue",
"bytes": "26804"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
Interim Register of Marine and Nonmarine Genera
#### Published in
Senckenb Lethaea 83 (1-2), 30 Dezember: 4.
#### Original name
null
### Remarks
null | {
"content_hash": "287d8d70175ab9496cf4963467d4f3cc",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 47,
"avg_line_length": 13.846153846153847,
"alnum_prop": 0.7111111111111111,
"repo_name": "mdoering/backbone",
"id": "0c0d38b1aeae555e2b47381ba77890b2fc55b032",
"size": "237",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Protozoa/Granuloreticulosea/Foraminiferida/Vaginulinidae/Clarifovea/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
cask 'syphon-virtual-screen' do
version '1.3'
sha256 '0cf56d171f3427d623d4b12d55e0342a34cd8d12dd7082c7ed372b5effac8a46'
# github.com/andreacremaschi/Syphon-virtual-screen was verified as official when first introduced to the cask
url 'https://github.com/andreacremaschi/Syphon-virtual-screen/releases/download/1.3/Syphon.Virtual.Screen.mpkg.zip'
appcast 'https://github.com/andreacremaschi/Syphon-virtual-screen/releases.atom',
checkpoint: '99793e70b315957b663123c844fb442f2fffef84e7ec4731506b6324fc70fcca'
name 'Syphon Virtual Screen'
homepage 'https://andreacremaschi.github.io/Syphon-virtual-screen/'
pkg 'Syphon Virtual Screen.mpkg'
uninstall kext: 'EWProxyFrameBuffer',
delete: '/System/Library/Caches/com.apple.kext.caches'
caveats 'To use different resolutions modify EWProxyFramebuffer.kext/Contents/Info.plist'
end
| {
"content_hash": "6b328e4c63e206c5ae03686338c01a1c",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 117,
"avg_line_length": 48.44444444444444,
"alnum_prop": 0.7912844036697247,
"repo_name": "decrement/homebrew-cask",
"id": "4c1a0a7aa6fea0e8e6c6c8e27b0d39397f7ddb4d",
"size": "872",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "Casks/syphon-virtual-screen.rb",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Ruby",
"bytes": "1751705"
},
{
"name": "Shell",
"bytes": "56109"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<style>
.chart div {
font: 10px sans-serif;
background-color: steelblue;
text-align: right;
padding: 3px;
margin: 1px;
color: white;
}
</style>
</head>
<body>
<div class="chart"></div>
<script src="packages/d3/d3.js" charset="utf-8"></script>
<script type="application/dart" src="main.dart"></script>
<script src="packages/browser/dart.js"></script>
</body>
</html>
| {
"content_hash": "8e67f4cb7e7d089169e2ef7c0167f705",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 59,
"avg_line_length": 21.681818181818183,
"alnum_prop": 0.59958071278826,
"repo_name": "rwl/d3.dart",
"id": "cd5d504aec228e1e8416bdcd144fa134a1e15d59",
"size": "477",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "example/bar_chart1/index.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dart",
"bytes": "274021"
},
{
"name": "HTML",
"bytes": "8742"
}
],
"symlink_target": ""
} |
/** @module transition */ /** for typedoc */
import { TransitionHookOptions, IEventHook, HookResult } from "./interface";
import { Transition } from "./transition";
import { State } from "../state/stateObject";
/** @hidden */
export declare class TransitionHook {
private transition;
private stateContext;
private eventHook;
private options;
constructor(transition: Transition, stateContext: State, eventHook: IEventHook, options: TransitionHookOptions);
private isSuperseded;
invokeHook(): Promise<HookResult>;
/**
* This method handles the return value of a Transition Hook.
*
* A hook can return false (cancel), a TargetState (redirect),
* or a promise (which may later resolve to false or a redirect)
*
* This also handles "transition superseded" -- when a new transition
* was started while the hook was still running
*/
handleHookResult(result: HookResult): Promise<any>;
toString(): string;
/**
* Given an array of TransitionHooks, runs each one synchronously and sequentially.
*
* Returns a promise chain composed of any promises returned from each hook.invokeStep() call
*/
static runSynchronousHooks(hooks: TransitionHook[], swallowExceptions?: boolean): Promise<any>;
}
| {
"content_hash": "171aca4fdc568f14fbae8b5cd0d5b069",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 116,
"avg_line_length": 41.58064516129032,
"alnum_prop": 0.6982156710628394,
"repo_name": "MadhavBitra/jsdelivr",
"id": "f74dbfbbbbde5bc6a6f991a8cbdcbca50e1cf77e",
"size": "1289",
"binary": false,
"copies": "17",
"ref": "refs/heads/master",
"path": "files/angular.ui-router/1.0.0-beta.3/transition/transitionHook.d.ts",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
package org.springframework.richclient.command.config;
import java.awt.Color;
import javax.swing.Icon;
import javax.swing.JButton;
import javax.swing.JMenu;
import javax.swing.JMenuItem;
import junit.framework.TestCase;
import org.springframework.richclient.test.TestIcon;
/**
* Testcase for CommandButtonIconInfo
*
* @author Peter De Bruycker
*/
public class CommandButtonIconInfoTests extends TestCase {
private Icon icon;
private Icon selectedIcon;
private Icon rolloverIcon;
private Icon disabledIcon;
private Icon pressedIcon;
private CommandButtonIconInfo completeInfo;
public void testConstructor() {
CommandButtonIconInfo info = new CommandButtonIconInfo(icon);
assertEquals(icon, info.getIcon());
assertNull(info.getSelectedIcon());
assertNull(info.getRolloverIcon());
assertNull(info.getDisabledIcon());
assertNull(info.getPressedIcon());
}
public void testConstructor2() {
CommandButtonIconInfo info = new CommandButtonIconInfo(icon, selectedIcon);
assertEquals(icon, info.getIcon());
assertEquals(selectedIcon, info.getSelectedIcon());
assertNull(info.getRolloverIcon());
assertNull(info.getDisabledIcon());
assertNull(info.getPressedIcon());
}
public void testConstructor3() {
CommandButtonIconInfo info = new CommandButtonIconInfo(icon, selectedIcon, rolloverIcon);
assertEquals(icon, info.getIcon());
assertEquals(selectedIcon, info.getSelectedIcon());
assertEquals(rolloverIcon, info.getRolloverIcon());
assertNull(info.getDisabledIcon());
assertNull(info.getPressedIcon());
}
public void testConstructor4() {
CommandButtonIconInfo info = new CommandButtonIconInfo(icon, selectedIcon, rolloverIcon, disabledIcon,
pressedIcon);
assertEquals(icon, info.getIcon());
assertEquals(selectedIcon, info.getSelectedIcon());
assertEquals(rolloverIcon, info.getRolloverIcon());
assertEquals(disabledIcon, info.getDisabledIcon());
assertEquals(pressedIcon, info.getPressedIcon());
}
public void testConfigureWithNullButton() {
CommandButtonIconInfo info = new CommandButtonIconInfo(icon);
try {
info.configure(null);
fail("Should throw IllegalArgumentException");
}
catch (IllegalArgumentException e) {
pass();
}
}
public void testConfigureWithJButton() {
JButton button = new JButton("Test");
JButton result = (JButton)completeInfo.configure(button);
assertSame(button, result);
assertEquals(icon, button.getIcon());
assertEquals(selectedIcon, button.getSelectedIcon());
assertEquals(rolloverIcon, button.getRolloverIcon());
assertEquals(disabledIcon, button.getDisabledIcon());
assertEquals(pressedIcon, button.getPressedIcon());
}
public void testConfigureWithJMenuItem() {
JMenuItem button = new JMenuItem("Test");
JMenuItem result = (JMenuItem)completeInfo.configure(button);
assertSame(button, result);
assertEquals(icon, button.getIcon());
assertEquals(selectedIcon, button.getSelectedIcon());
assertEquals(rolloverIcon, button.getRolloverIcon());
assertEquals(disabledIcon, button.getDisabledIcon());
assertEquals(pressedIcon, button.getPressedIcon());
}
public void testConfigureWithJMenu() {
JMenu button = new JMenu("Test");
button.setIcon(icon);
button.setSelectedIcon(selectedIcon);
button.setRolloverIcon(rolloverIcon);
button.setDisabledIcon(disabledIcon);
button.setPressedIcon(pressedIcon);
JMenuItem result = (JMenuItem)completeInfo.configure(button);
assertSame(button, result);
assertEquals(icon, button.getIcon());
assertEquals(selectedIcon, button.getSelectedIcon());
assertEquals(rolloverIcon, button.getRolloverIcon());
assertEquals(disabledIcon, button.getDisabledIcon());
assertEquals(pressedIcon, button.getPressedIcon());
}
private static void pass() {
// test passes
}
protected void setUp() throws Exception {
icon = new TestIcon(Color.BLUE);
selectedIcon = new TestIcon(Color.BLACK);
rolloverIcon = new TestIcon(Color.GREEN);
disabledIcon = new TestIcon(Color.GRAY);
pressedIcon = new TestIcon(Color.WHITE);
completeInfo = new CommandButtonIconInfo(icon, selectedIcon, rolloverIcon, disabledIcon, pressedIcon);
}
} | {
"content_hash": "a78e4b8f9cee826271f63ed9bdb70ba8",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 110,
"avg_line_length": 33.99270072992701,
"alnum_prop": 0.6864934507193472,
"repo_name": "springrichclient/springrcp",
"id": "b811ce4dd49eadafe8ae59630de1f6f635d884ec",
"size": "5280",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "spring-richclient-core/src/test/java/org/springframework/richclient/command/config/CommandButtonIconInfoTests.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Haskell",
"bytes": "1484"
},
{
"name": "Java",
"bytes": "4963844"
},
{
"name": "JavaScript",
"bytes": "22973"
},
{
"name": "Shell",
"bytes": "2550"
}
],
"symlink_target": ""
} |
module.exports = require('./make-webpack-config')({
devtool: 'eval',
env: "development"
});
| {
"content_hash": "5958acede08e1f94631a7dd8c21e9540",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 51,
"avg_line_length": 24,
"alnum_prop": 0.6458333333333334,
"repo_name": "KeweiCodes/snake",
"id": "501ef1e8a3a53bcfbee0854567c5abe2b8434f20",
"size": "96",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "conf/webpack.development.js",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "570"
},
{
"name": "HTML",
"bytes": "919"
},
{
"name": "JavaScript",
"bytes": "14712"
}
],
"symlink_target": ""
} |
import {AmpAudio} from '../amp-audio';
import {adopt} from '../../../../src/runtime';
import {naturalDimensions_} from '../../../../src/layout';
import {createIframePromise} from '../../../../testing/iframe';
import * as sinon from 'sinon';
import '../amp-audio';
adopt(window);
describe('amp-audio', () => {
let iframe;
let ampAudio;
let sandbox;
beforeEach(() => {
sandbox = sinon.sandbox.create();
return createIframePromise(/* runtimeOff */ true).then(i => {
iframe = i;
});
});
afterEach(() => {
sandbox.restore();
document.body.removeChild(iframe.iframe);
});
function getAmpAudio(attributes, opt_childNodesAttrs) {
ampAudio = iframe.doc.createElement('amp-audio');
for (const key in attributes) {
ampAudio.setAttribute(key, attributes[key]);
}
if (opt_childNodesAttrs) {
opt_childNodesAttrs.forEach(childNodeAttrs => {
let child;
if (childNodeAttrs.tag === 'text') {
child = iframe.doc.createTextNode(childNodeAttrs.text);
} else {
child = iframe.doc.createElement(childNodeAttrs.tag);
for (const key in childNodeAttrs) {
if (key !== 'tag') {
child.setAttribute(key, childNodeAttrs[key]);
}
}
}
ampAudio.appendChild(child);
});
}
return ampAudio;
}
function attachAndRun(attributes, opt_childNodesAttrs) {
const ampAudio = getAmpAudio(attributes, opt_childNodesAttrs);
naturalDimensions_['AMP-AUDIO'] = {width: '300px', height: '30px'};
return iframe.addElement(ampAudio);
}
it('should load audio through attribute', () => {
return attachAndRun({
src: 'https://origin.com/audio.mp3',
}).then(a => {
const audio = a.querySelector('audio');
expect(audio.tagName).to.equal('AUDIO');
expect(audio.getAttribute('src'))
.to.equal('https://origin.com/audio.mp3');
expect(audio.hasAttribute('controls')).to.be.true;
expect(a.style.width).to.be.equal('300px');
expect(a.style.height).to.be.equal('30px');
});
});
it('should load audio through sources', () => {
return attachAndRun({
width: 503,
height: 53,
autoplay: '',
muted: '',
loop: '',
}, [
{tag: 'source', src: 'https://origin.com/audio.mp3',
type: 'audio/mpeg'},
{tag: 'source', src: 'https://origin.com/audio.ogg', type: 'audio/ogg'},
{tag: 'text', text: 'Unsupported.'},
]).then(a => {
const audio = a.querySelector('audio');
expect(audio.tagName).to.equal('AUDIO');
expect(a.getAttribute('width')).to.be.equal('503');
expect(a.getAttribute('height')).to.be.equal('53');
expect(audio.offsetWidth).to.be.greaterThan('1');
expect(audio.offsetHeight).to.be.greaterThan('1');
expect(audio.hasAttribute('controls')).to.be.true;
expect(audio.hasAttribute('autoplay')).to.be.true;
expect(audio.hasAttribute('muted')).to.be.true;
expect(audio.hasAttribute('loop')).to.be.true;
expect(audio.hasAttribute('src')).to.be.false;
expect(audio.childNodes[0].tagName).to.equal('SOURCE');
expect(audio.childNodes[0].getAttribute('src'))
.to.equal('https://origin.com/audio.mp3');
expect(audio.childNodes[1].tagName).to.equal('SOURCE');
expect(audio.childNodes[1].getAttribute('src'))
.to.equal('https://origin.com/audio.ogg');
expect(audio.childNodes[2].nodeType).to.equal(Node.TEXT_NODE);
expect(audio.childNodes[2].textContent).to.equal('Unsupported.');
});
});
it('should set its dimensions to the browser natural', () => {
return attachAndRun({
src: 'https://origin.com/audio.mp3',
}).then(a => {
const audio = a.querySelector('audio');
expect(a.style.width).to.be.equal('300px');
expect(a.style.height).to.be.equal('30px');
if (/Safari|Firefox/.test(navigator.userAgent)) {
// Safari has default sizes for audio tags that cannot
// be overridden.
return;
}
expect(audio.offsetWidth).to.be.equal(300);
expect(audio.offsetHeight).to.be.equal(30);
});
});
it('should set its natural dimension only if not specified', () => {
return attachAndRun({
'width': '500',
src: 'https://origin.com/audio.mp3',
}).then(a => {
expect(a.style.width).to.be.equal('500px');
expect(a.style.height).to.be.equal('30px');
});
});
it('should fallback when not available', () => {
const savedCreateElement = document.createElement;
document.createElement = name => {
if (name == 'audio') {
return savedCreateElement.call(document, 'audio2');
}
return savedCreateElement.call(document, name);
};
const element = document.createElement('div');
element.toggleFallback = sandbox.spy();
const audio = new AmpAudio(element);
const promise = audio.layoutCallback();
document.createElement = savedCreateElement;
return promise.then(() => {
expect(element.toggleFallback).to.be.calledOnce;
});
});
it('should propagate ARIA attributes', () => {
return attachAndRun({
src: 'https://origin.com/audio.mp3',
'aria-label': 'Hello',
'aria-labelledby': 'id2',
'aria-describedby': 'id3',
}).then(a => {
const audio = a.querySelector('audio');
expect(audio.getAttribute('aria-label')).to.equal('Hello');
expect(audio.getAttribute('aria-labelledby')).to.equal('id2');
expect(audio.getAttribute('aria-describedby')).to.equal('id3');
});
});
});
| {
"content_hash": "0cd29f967c2559e5592d5325f09ce735",
"timestamp": "",
"source": "github",
"line_count": 166,
"max_line_length": 80,
"avg_line_length": 33.84939759036145,
"alnum_prop": 0.607225484961737,
"repo_name": "sklobovskaya/amphtml",
"id": "457c1f8b6b78ffa06a864ae132fc7b56e707e0d0",
"size": "6246",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "extensions/amp-audio/0.1/test/test-amp-audio.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "73680"
},
{
"name": "Go",
"bytes": "7459"
},
{
"name": "HTML",
"bytes": "876009"
},
{
"name": "Java",
"bytes": "36596"
},
{
"name": "JavaScript",
"bytes": "7538285"
},
{
"name": "Protocol Buffer",
"bytes": "29210"
},
{
"name": "Python",
"bytes": "82782"
},
{
"name": "Ruby",
"bytes": "6079"
},
{
"name": "Shell",
"bytes": "6942"
},
{
"name": "Yacc",
"bytes": "20286"
}
],
"symlink_target": ""
} |
__ _ _____ _
___ _ __ / _/ |___ / __ __(_)_ __ ___
/ __| '_ \| |_| | |_ \ _____\ \ / /| | '_ ` _ \
\__ \ |_) | _| |___) |_____|\ V / | | | | | | |
|___/ .__/|_| |_|____/ \_/ |_|_| |_| |_|
|_|
spf13-vim is a distribution of vim plugins and resources for Vim, Gvim and [MacVim].
It is a good starting point for anyone intending to use VIM for development running equally well on Windows, Linux, \*nix and Mac.
The distribution is completely customisable using a `~/.vimrc.local`, `~/.vimrc.bundles.local`, and `~/.vimrc.before.local` Vim RC files.
![spf13-vim image][spf13-vim-img]
Unlike traditional VIM plugin structure, which similar to UNIX throws all files into common directories, making updating or disabling plugins a real mess, spf13-vim 3 uses the [Vundle] plugin management system to have a well organized vim directory (Similar to mac's app folders). Vundle also ensures that the latest versions of your plugins are installed and makes it easy to keep them up to date.
Great care has been taken to ensure that each plugin plays nicely with others, and optional configuration has been provided for what we believe is the most efficient use.
Lastly (and perhaps, most importantly) It is completely cross platform. It works well on Windows, Linux and OSX without any modifications or additional configurations. If you are using [MacVim] or Gvim additional features are enabled. So regardless of your environment just clone and run.
# Installation
## Requirements
To make all the plugins work, specifically [neocomplete](https://github.com/Shougo/neocomplete.vim), you need [vim with lua](https://github.com/Shougo/neocomplete.vim#requirements).
## Linux, \*nix, Mac OSX Installation
The easiest way to install spf13-vim is to use our [automatic installer](https://j.mp/spf13-vim3) by simply copying and pasting the following line into a terminal. This will install spf13-vim and backup your existing vim configuration. If you are upgrading from a prior version (before 3.0) this is also the recommended installation.
*Requires Git 1.7+ and Vim 7.3+*
```bash
curl https://j.mp/spf13-vim3 -L > spf13-vim.sh && sh spf13-vim.sh
```
If you have a bash-compatible shell you can run the script directly:
```bash
sh <(curl https://j.mp/spf13-vim3 -L)
```
## Installing on Windows
On Windows and \*nix [Git] and [Curl] are required. Also, if you haven't done so already, you'll need to install [Vim].
The quickest option to install all three dependencies ([Git], [Curl], [Vim] and [spf13-vim]) is via [Chocolatey] NuGet. After installing [Chocolatey], execute the following commands on the _command prompt_:
C:\> choco install spf13-vim
_Note: The [spf13-vim package] will install Vim also!_
If you want to install [msysgit], [Curl] and [spf13-vim] individually, follow the directions below.
### Installing dependencies
#### Install [Vim]
After the installation of Vim you must add a new directory to your environment variables path to make it work with the script installation of spf13.
Open Vim and write the following command, it will show the installed directory:
:echo $VIMRUNTIME
C:\Program Files (X86)\Vim\vim74
Then you need to add it to your environment variable path. After that try execute `vim` within command prompt (press Win-R, type `cmd`, press Enter) and you’ll see the default vim page.
#### Install [msysgit]
After installation try running `git --version` within _command prompt_ (press Win-R, type `cmd`, press Enter) to make sure all good:
C:\> git --version
git version 1.7.4.msysgit.0
#### Setup [Curl]
_Instructions blatently copied from vundle readme_
Installing Curl on Windows is easy as [Curl] is bundled with [msysgit]!
But before it can be used with [Vundle] it's required make `curl` run in _command prompt_.
The easiest way is to create `curl.cmd` with [this content](https://gist.github.com/912993)
@rem Do not use "echo off" to not affect any child calls.
@setlocal
@rem Get the abolute path to the parent directory, which is assumed to be the
@rem Git installation root.
@for /F "delims=" %%I in ("%~dp0..") do @set git_install_root=%%~fI
@set PATH=%git_install_root%\bin;%git_install_root%\mingw\bin;%PATH%
@if not exist "%HOME%" @set HOME=%HOMEDRIVE%%HOMEPATH%
@if not exist "%HOME%" @set HOME=%USERPROFILE%
@curl.exe %*
And copy it to `C:\Program Files\Git\cmd\curl.cmd`, assuming [msysgit] was installed to `c:\Program Files\Git`
to verify all good, run:
C:\> curl --version
curl 7.21.1 (i686-pc-mingw32) libcurl/7.21.1 OpenSSL/0.9.8k zlib/1.2.3
Protocols: dict file ftp ftps http https imap imaps ldap ldaps pop3 pop3s rtsp smtp smtps telnet tftp
Features: Largefile NTLM SSL SSPI libz
#### Installing spf13-vim on Windows
The easiest way is to download and run the spf13-vim-windows-install.cmd file. Remember to run this file in **Administrator Mode** if you want the symlinks to be created successfully.
## Updating to the latest version
The simpliest (and safest) way to update is to simply rerun the installer. It will completely and non destructively upgrade to the latest version.
```bash
curl https://j.mp/spf13-vim3 -L -o - | sh
```
Alternatively you can manually perform the following steps. If anything has changed with the structure of the configuration you will need to create the appropriate symlinks.
```bash
cd $HOME/to/spf13-vim/
git pull
vim +BundleInstall! +BundleClean +q
```
### Fork me on GitHub
I'm always happy to take pull requests from others. A good number of people are already [contributors] to [spf13-vim]. Go ahead and fork me.
# A highly optimized .vimrc config file
![spf13-vimrc image][spf13-vimrc-img]
The .vimrc file is suited to programming. It is extremely well organized and folds in sections.
Each section is labeled and each option is commented.
It fixes many of the inconveniences of vanilla vim including
* A single config can be used across Windows, Mac and linux
* Eliminates swap and backup files from littering directories, preferring to store in a central location.
* Fixes common typos like :W, :Q, etc
* Setup a solid set of settings for Formatting (change to meet your needs)
* Setup the interface to take advantage of vim's features including
* omnicomplete
* line numbers
* syntax highlighting
* A better ruler & status line
* & more
* Configuring included plugins
## Customization
Create `~/.vimrc.local` and `~/.gvimrc.local` for any local
customizations.
For example, to override the default color schemes:
```bash
echo colorscheme ir_black >> ~/.vimrc.local
```
### Before File
Create a `~/.vimrc.before.local` file to define any customizations
that get loaded *before* the spf13-vim `.vimrc`.
For example, to prevent autocd into a file directory:
```bash
echo let g:spf13_no_autochdir = 1 >> ~/.vimrc.before.local
```
For a list of available spf13-vim specific customization options, look at the `~/.vimrc.before` file.
### Fork Customization
There is an additional tier of customization available to those who want to maintain a
fork of spf13-vim specialized for a particular group. These users can create `.vimrc.fork`
and `.vimrc.bundles.fork` files in the root of their fork. The load order for the configuration is:
1. `.vimrc.before` - spf13-vim before configuration
2. `.vimrc.before.fork` - fork before configuration
3. `.vimrc.before.local` - before user configuration
4. `.vimrc.bundles` - spf13-vim bundle configuration
5. `.vimrc.bundles.fork` - fork bundle configuration
6. `.vimrc.bundles.local` - local user bundle configuration
6. `.vimrc` - spf13-vim vim configuration
7. `.vimrc.fork` - fork vim configuration
8. `.vimrc.local` - local user configuration
See `.vimrc.bundles` for specifics on what options can be set to override bundle configuration. See `.vimrc.before` for specifics
on what options can be overridden. Most vim configuration options should be set in your `.vimrc.fork` file, bundle configuration
needs to be set in your `.vimrc.bundles.fork` file.
You can specify the default bundles for your fork using `.vimrc.before.fork` file. Here is how to create an example `.vimrc.before.fork` file
in a fork repo for the default bundles.
```bash
echo let g:spf13_bundle_groups=[\'general\', \'programming\', \'misc\', \'youcompleteme\'] >> .vimrc.before.fork
```
Once you have this file in your repo, only the bundles you specified will be installed during the first installation of your fork.
You may also want to update your `README.markdown` file so that the `bootstrap.sh` link points to your repository and your `bootstrap.sh`
file to pull down your fork.
For an example of a fork of spf13-vim that provides customization in this manner see [taxilian's fork](https://github.com/taxilian/spf13-vim).
### Easily Editing Your Configuration
`<Leader>ev` opens a new tab containing the .vimrc configuration files listed above. This makes it easier to get an overview of your
configuration and make customizations.
`<Leader>sv` sources the .vimrc file, instantly applying your customizations to the currently running vim instance.
These two mappings can themselves be customized by setting the following in .vimrc.before.local:
```bash
let g:spf13_edit_config_mapping='<Leader>ev'
let g:spf13_apply_config_mapping='<Leader>sv'
```
# Plugins
spf13-vim contains a curated set of popular vim plugins, colors, snippets and syntaxes. Great care has been made to ensure that these plugins play well together and have optimal configuration.
## Adding new plugins
Create `~/.vimrc.bundles.local` for any additional bundles.
To add a new bundle, just add one line for each bundle you want to install. The line should start with the word "Bundle" followed by a string of either the vim.org project name or the githubusername/githubprojectname. For example, the github project [spf13/vim-colors](https://github.com/spf13/vim-colors) can be added with the following command
```bash
echo Bundle \'spf13/vim-colors\' >> ~/.vimrc.bundles.local
```
Once new plugins are added, they have to be installed.
```bash
vim +BundleInstall! +BundleClean +q
```
## Removing (disabling) an included plugin
Create `~/.vimrc.local` if it doesn't already exist.
Add the UnBundle command to this line. It takes the same input as the Bundle line, so simply copy the line you want to disable and add 'Un' to the beginning.
For example, disabling the 'AutoClose' and 'scrooloose/syntastic' plugins
```bash
echo UnBundle \'AutoClose\' >> ~/.vimrc.bundles.local
echo UnBundle \'scrooloose/syntastic\' >> ~/.vimrc.bundles.local
```
**Remember to run ':BundleClean!' after this to remove the existing directories**
Here are a few of the plugins:
## [Undotree]
If you undo changes and then make a new change, in most editors the changes you undid are gone forever, as their undo-history is a simple list.
Since version 7.0 vim uses an undo-tree instead. If you make a new change after undoing changes, a new branch is created in that tree.
Combined with persistent undo, this is nearly as flexible and safe as git ;-)
Undotree makes that feature more accessible by creating a visual representation of said undo-tree.
**QuickStart** Launch using `<Leader>u`.
## [NERDTree]
NERDTree is a file explorer plugin that provides "project drawer"
functionality to your vim editing. You can learn more about it with
`:help NERDTree`.
**QuickStart** Launch using `<Leader>e`.
**Customizations**:
* Use `<C-E>` to toggle NERDTree
* Use `<leader>e` or `<leader>nt` to load NERDTreeFind which opens NERDTree where the current file is located.
* Hide clutter ('\.pyc', '\.git', '\.hg', '\.svn', '\.bzr')
* Treat NERDTree more like a panel than a split.
## [ctrlp]
Ctrlp replaces the Command-T plugin with a 100% viml plugin. It provides an intuitive and fast mechanism to load files from the file system (with regex and fuzzy find), from open buffers, and from recently used files.
**QuickStart** Launch using `<c-p>`.
## [Surround]
This plugin is a tool for dealing with pairs of "surroundings." Examples
of surroundings include parentheses, quotes, and HTML tags. They are
closely related to what Vim refers to as text-objects. Provided
are mappings to allow for removing, changing, and adding surroundings.
Details follow on the exact semantics, but first, consider the following
examples. An asterisk (*) is used to denote the cursor position.
Old text Command New text ~
"Hello *world!" ds" Hello world!
[123+4*56]/2 cs]) (123+456)/2
"Look ma, I'm *HTML!" cs"<q> <q>Look ma, I'm HTML!</q>
if *x>3 { ysW( if ( x>3 ) {
my $str = *whee!; vllllS' my $str = 'whee!';
For instance, if the cursor was inside `"foo bar"`, you could type
`cs"'` to convert the text to `'foo bar'`.
There's a lot more, check it out at `:help surround`
## [NERDCommenter]
NERDCommenter allows you to wrangle your code comments, regardless of
filetype. View `help :NERDCommenter` or checkout my post on [NERDCommenter](http://spf13.com/post/vim-plugins-nerd-commenter).
**QuickStart** Toggle comments using `<Leader>c<space>` in Visual or Normal mode.
## [neocomplete]
Neocomplete is an amazing autocomplete plugin with additional support for snippets. It can complete simulatiously from the dictionary, buffer, omnicomplete and snippets. This is the one true plugin that brings Vim autocomplete on par with the best editors.
**QuickStart** Just start typing, it will autocomplete where possible
**Customizations**:
* Automatically present the autocomplete menu
* Support tab and enter for autocomplete
* `<C-k>` for completing snippets using [Neosnippet](https://github.com/Shougo/neosnippet.vim).
![neocomplete image][autocomplete-img]
## [YouCompleteMe]
YouCompleteMe is another amazing completion engine. It is slightly more involved to set up as it contains a binary component that the user needs to compile before it will work. As a result of this however it is very fast.
To enable YouCompleteMe add `youcompleteme` to your list of groups by overriding it in your `.vimrc.before.local` like so: `let g:spf13_bundle_groups=['general', 'programming', 'misc', 'scala', 'youcompleteme']` This is just an example. Remember to choose the other groups you want here.
Once you have done this you will need to get Vundle to grab the latest code from git. You can do this by calling `:BundleInstall!`. You should see YouCompleteMe in the list.
You will now have the code in your bundles directory and can proceed to compile the core. Change to the directory it has been downloaded to. If you have a vanilla install then `cd ~/.spf13-vim-3/.vim/bundle/YouCompleteMe/` should do the trick. You should see a file in this directory called install.sh. There are a few options to consider before running the installer:
* Do you want clang support (if you don't know what this is then you likely don't need it)?
* Do you want to link against a local libclang or have the installer download the latest for you?
* Do you want support for c# via the omnisharp server?
The plugin is well documented on the site linked above. Be sure to give that a read and make sure you understand the options you require.
For java users wanting to use eclim be sure to add `let g:EclimCompletionMethod = 'omnifunc'` to your .vimrc.local.
## [Syntastic]
Syntastic is a syntax checking plugin that runs buffers through external syntax
checkers as they are saved and opened. If syntax errors are detected, the user
is notified and is happy because they didn't have to compile their code or
execute their script to find them.
## [AutoClose]
AutoClose does what you expect. It's simple, if you open a bracket, paren, brace, quote,
etc, it automatically closes it. It handles curlys correctly and doesn't get in the
way of double curlies for things like jinja and twig.
## [Fugitive]
Fugitive adds pervasive git support to git directories in vim. For more
information, use `:help fugitive`
Use `:Gstatus` to view `git status` and type `-` on any file to stage or
unstage it. Type `p` on a file to enter `git add -p` and stage specific
hunks in the file.
Use `:Gdiff` on an open file to see what changes have been made to that
file
**QuickStart** `<leader>gs` to bring up git status
**Customizations**:
* `<leader>gs` :Gstatus<CR>
* `<leader>gd` :Gdiff<CR>
* `<leader>gc` :Gcommit<CR>
* `<leader>gb` :Gblame<CR>
* `<leader>gl` :Glog<CR>
* `<leader>gp` :Git push<CR>
* `<leader>gw` :Gwrite<CR>
* :Git ___ will pass anything along to git.
![fugitive image][fugitive-img]
## [PIV]
The most feature complete and up to date PHP Integration for Vim with proper support for PHP 5.3+ including latest syntax, functions, better fold support, etc.
PIV provides:
* PHP 5.3 support
* Auto generation of PHP Doc (,pd on (function, variable, class) definition line)
* Autocomplete of classes, functions, variables, constants and language keywords
* Better indenting
* Full PHP documentation manual (hit K on any function for full docs)
![php vim itegration image][phpmanual-img]
## [Ack.vim]
Ack.vim uses ack to search inside the current directory for a pattern.
You can learn more about it with `:help Ack`
**QuickStart** :Ack
## [Tabularize]
Tabularize lets you align statements on their equal signs and other characters
**Customizations**:
* `<Leader>a= :Tabularize /=<CR>`
* `<Leader>a: :Tabularize /:<CR>`
* `<Leader>a:: :Tabularize /:\zs<CR>`
* `<Leader>a, :Tabularize /,<CR>`
* `<Leader>a<Bar> :Tabularize /<Bar><CR>`
## [Tagbar]
spf13-vim includes the Tagbar plugin. This plugin requires exuberant-ctags and will automatically generate tags for your open files. It also provides a panel to navigate easily via tags
**QuickStart** `CTRL-]` while the cursor is on a keyword (such as a function name) to jump to its definition.
**Customizations**: spf13-vim binds `<Leader>tt` to toggle the tagbar panel
![tagbar image][tagbar-img]
**Note**: For full language support, run `brew install ctags` to install
exuberant-ctags.
**Tip**: Check out `:help ctags` for information about VIM's built-in
ctag support. Tag navigation creates a stack which can traversed via
`Ctrl-]` (to find the source of a token) and `Ctrl-T` (to jump back up
one level).
## [EasyMotion]
EasyMotion provides an interactive way to use motions in Vim.
It quickly maps each possible jump destination to a key allowing very fast and
straightforward movement.
**QuickStart** EasyMotion is triggered using the normal movements, but prefixing them with `<leader><leader>`
For example this screen shot demonstrates pressing `,,w`
![easymotion image][easymotion-img]
## [Airline]
Airline provides a lightweight themable statusline with no external dependencies. By default this configuration uses the symbols `‹` and `›` as separators for different statusline sections but can be configured to use the same symbols as [Powerline]. An example first without and then with powerline symbols is shown here:
![airline image][airline-img]
To enable powerline symbols first install one of the [Powerline Fonts] or patch your favorite font using the provided instructions. Configure your terminal, MacVim, or Gvim to use the desired font. Finally add `let g:airline_powerline_fonts=1` to your `.vimrc.before.local`.
## Additional Syntaxes
spf13-vim ships with a few additional syntaxes:
* Markdown (bound to \*.markdown, \*.md, and \*.mk)
* Twig
* Git commits (set your `EDITOR` to `mvim -f`)
## Amazing Colors
spf13-vim includes [solarized] and [spf13 vim color pack](https://github.com/spf13/vim-colors/):
* ir_black
* molokai
* peaksea
Use `:color molokai` to switch to a color scheme.
Terminal Vim users will benefit from solarizing their terminal emulators and setting solarized support to 16 colors:
let g:solarized_termcolors=16
color solarized
Terminal emulator colorschemes:
* http://ethanschoonover.com/solarized (iTerm2, Terminal.app)
* https://github.com/phiggins/konsole-colors-solarized (KDE Konsole)
* https://github.com/sigurdga/gnome-terminal-colors-solarized (Gnome Terminal)
## Snippets
It also contains a very complete set of [snippets](https://github.com/spf13/snipmate-snippets) for use with snipmate or [neocomplete].
# Intro to VIM
Here's some tips if you've never used VIM before:
## Tutorials
* Type `vimtutor` into a shell to go through a brief interactive
tutorial inside VIM.
* Read the slides at [VIM: Walking Without Crutches](https://walking-without-crutches.heroku.com/#1).
## Modes
* VIM has two (common) modes:
* insert mode- stuff you type is added to the buffer
* normal mode- keys you hit are interpreted as commands
* To enter insert mode, hit `i`
* To exit insert mode, hit `<ESC>`
## Useful commands
* Use `:q` to exit vim
* Certain commands are prefixed with a `<Leader>` key, which by default maps to `\`.
Spf13-vim uses `let mapleader = ","` to change this to `,` which is in a consistent and
convenient location.
* Keyboard [cheat sheet](http://www.viemu.com/vi-vim-cheat-sheet.gif).
[](https://github.com/igrigorik/ga-beacon)
[](https://bitdeli.com/free "Bitdeli Badge")
[Git]:http://git-scm.com
[Curl]:http://curl.haxx.se
[Vim]:http://www.vim.org/download.php#pc
[msysgit]:http://msysgit.github.io
[Chocolatey]: http://chocolatey.org/
[spf13-vim package]: https://chocolatey.org/packages/spf13-vim
[MacVim]:http://code.google.com/p/macvim/
[spf13-vim]:https://github.com/spf13/spf13-vim
[contributors]:https://github.com/spf13/spf13-vim/contributors
[Vundle]:https://github.com/VundleVim/Vundle.vim
[PIV]:https://github.com/spf13/PIV
[NERDCommenter]:https://github.com/scrooloose/nerdcommenter
[Undotree]:https://github.com/mbbill/undotree
[NERDTree]:https://github.com/scrooloose/nerdtree
[ctrlp]:https://github.com/kien/ctrlp.vim
[solarized]:https://github.com/altercation/vim-colors-solarized
[neocomplete]:https://github.com/shougo/neocomplete
[Fugitive]:https://github.com/tpope/vim-fugitive
[Surround]:https://github.com/tpope/vim-surround
[Tagbar]:https://github.com/majutsushi/tagbar
[Syntastic]:https://github.com/scrooloose/syntastic
[vim-easymotion]:https://github.com/Lokaltog/vim-easymotion
[YouCompleteMe]:https://github.com/Valloric/YouCompleteMe
[Matchit]:http://www.vim.org/scripts/script.php?script_id=39
[Tabularize]:https://github.com/godlygeek/tabular
[EasyMotion]:https://github.com/Lokaltog/vim-easymotion
[Airline]:https://github.com/bling/vim-airline
[Powerline]:https://github.com/lokaltog/powerline
[Powerline Fonts]:https://github.com/Lokaltog/powerline-fonts
[AutoClose]:https://github.com/spf13/vim-autoclose
[Ack.vim]:https://github.com/mileszs/ack.vim
[spf13-vim-img]:https://i.imgur.com/UKToY.png
[spf13-vimrc-img]:https://i.imgur.com/kZWj1.png
[autocomplete-img]:https://i.imgur.com/90Gg7.png
[tagbar-img]:https://i.imgur.com/cjbrC.png
[fugitive-img]:https://i.imgur.com/4NrxV.png
[nerdtree-img]:https://i.imgur.com/9xIfu.png
[phpmanual-img]:https://i.imgur.com/c0GGP.png
[easymotion-img]:https://i.imgur.com/ZsrVL.png
[airline-img]:https://i.imgur.com/D4ZYADr.png
| {
"content_hash": "a1bd6f671e6a398bfa6040886df21bb5",
"timestamp": "",
"source": "github",
"line_count": 565,
"max_line_length": 398,
"avg_line_length": 41.55929203539823,
"alnum_prop": 0.7333162982837188,
"repo_name": "metcalfc/spf13-vim",
"id": "f97b4802a3382633ccdb2a5cc0e879b437fcfd84",
"size": "23535",
"binary": false,
"copies": "1",
"ref": "refs/heads/3.0",
"path": "README.markdown",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4122"
},
{
"name": "Shell",
"bytes": "5419"
},
{
"name": "Vim script",
"bytes": "50427"
}
],
"symlink_target": ""
} |
<?php
/* vim: set expandtab sw=4 ts=4 sts=4: */
/**
* Normalization process (temporarily specific to 1NF)
*
* @package PhpMyAdmin
*/
/**
*
*/
require_once 'libraries/common.inc.php';
require_once 'libraries/transformations.lib.php';
require_once 'libraries/normalization.lib.php';
require_once 'libraries/Index.class.php';
if (isset($_REQUEST['getColumns'])) {
$html = '<option selected disabled>' . __('Select one…') . '</option>'
. '<option value="no_such_col">' . __('No such column') . '</option>';
//get column whose datatype falls under string category
$html .= PMA_getHtmlForColumnsList(
$db,
$table,
_pgettext('string types', 'String')
);
echo $html;
exit;
}
if (isset($_REQUEST['splitColumn'])) {
$num_fields = $_REQUEST['numFields'];
$html = PMA_getHtmlForCreateNewColumn($num_fields, $db, $table);
$html .= PMA_URL_getHiddenInputs($db, $table);
echo $html;
exit;
}
if (isset($_REQUEST['addNewPrimary'])) {
$num_fields = 1;
$columnMeta = array('Field'=>$table . "_id", 'Extra'=>'auto_increment');
$html = PMA_getHtmlForCreateNewColumn(
$num_fields, $db, $table, $columnMeta
);
$html .= PMA_URL_getHiddenInputs($db, $table);
echo $html;
exit;
}
if (isset($_REQUEST['findPdl'])) {
$html = PMA_findPartialDependencies($table, $db);
echo $html;
exit;
}
if (isset($_REQUEST['getNewTables2NF'])) {
$partialDependencies = json_decode($_REQUEST['pd']);
$html = PMA_getHtmlForNewTables2NF($partialDependencies, $table);
echo $html;
exit;
}
if (isset($_REQUEST['getNewTables3NF'])) {
$dependencies = json_decode($_REQUEST['pd']);
$tables = json_decode($_REQUEST['tables']);
$newTables = PMA_getHtmlForNewTables3NF($dependencies, $tables, $db);
PMA_Response::getInstance()->disable();
PMA_headerJSON();
echo json_encode($newTables);
exit;
}
$response = PMA_Response::getInstance();
$header = $response->getHeader();
$scripts = $header->getScripts();
$scripts->addFile('normalization.js');
$scripts->addFile('jquery/jquery.uitablefilter.js');
$normalForm = '1nf';
if (isset($_REQUEST['normalizeTo'])) {
$normalForm = $_REQUEST['normalizeTo'];
}
if (isset($_REQUEST['createNewTables2NF'])) {
$partialDependencies = json_decode($_REQUEST['pd']);
$tablesName = json_decode($_REQUEST['newTablesName']);
$res = PMA_createNewTablesFor2NF($partialDependencies, $tablesName, $table, $db);
$response->addJSON($res);
exit;
}
if (isset($_REQUEST['createNewTables3NF'])) {
$newtables = json_decode($_REQUEST['newTables']);
$res = PMA_createNewTablesFor3NF($newtables, $db);
$response->addJSON($res);
exit;
}
if (isset($_POST['repeatingColumns'])) {
$repeatingColumns = $_POST['repeatingColumns'];
$newTable = $_POST['newTable'];
$newColumn = $_POST['newColumn'];
$primary_columns = $_POST['primary_columns'];
$res = PMA_moveRepeatingGroup(
$repeatingColumns, $primary_columns, $newTable, $newColumn, $table, $db
);
$response->addJSON($res);
exit;
}
if (isset($_REQUEST['step1'])) {
$html = PMA_getHtmlFor1NFStep1($db, $table, $normalForm);
$response->addHTML($html);
} else if (isset($_REQUEST['step2'])) {
$res = PMA_getHtmlContentsFor1NFStep2($db, $table);
$response->addJSON($res);
} else if (isset($_REQUEST['step3'])) {
$res = PMA_getHtmlContentsFor1NFStep3($db, $table);
$response->addJSON($res);
} else if (isset ($_REQUEST['step4'])) {
$res = PMA_getHtmlContentsFor1NFStep4($db, $table);
$response->addJSON($res);
} else if (isset($_REQUEST['step']) && $_REQUEST['step'] == 2.1) {
$res = PMA_getHtmlFor2NFstep1($db, $table);
$response->addJSON($res);
} else if (isset($_REQUEST['step']) && $_REQUEST['step'] == 3.1) {
$tables = $_REQUEST['tables'];
$res = PMA_getHtmlFor3NFstep1($db, $tables);
$response->addJSON($res);
} else {
$response->addHTML(PMA_getHtmlForNormalizetable());
}
| {
"content_hash": "7529bbc65e3f56281f38bfd676c5f265",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 85,
"avg_line_length": 32.45528455284553,
"alnum_prop": 0.6312625250501002,
"repo_name": "jothamhernandez/ThesisProject",
"id": "4cb956918104c7dac53d97accb9f5e31c380c9db",
"size": "3994",
"binary": false,
"copies": "13",
"ref": "refs/heads/master",
"path": "db/normalization.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "659"
},
{
"name": "Batchfile",
"bytes": "12934"
},
{
"name": "CSS",
"bytes": "92872"
},
{
"name": "Groff",
"bytes": "58"
},
{
"name": "HTML",
"bytes": "2159883"
},
{
"name": "JavaScript",
"bytes": "9930313"
},
{
"name": "Makefile",
"bytes": "14684"
},
{
"name": "PHP",
"bytes": "17257824"
},
{
"name": "Python",
"bytes": "32580"
},
{
"name": "Shell",
"bytes": "4584"
}
],
"symlink_target": ""
} |
Artwork-Installer LinuxEDU
Trebuie instalat pachetul "ubiquity-slideshow-xubuntu"
sudo apt-get install ubiquity-slideshow-xubuntu
Apoi, editate fișierele din /usr/share/ubiquity-slideshow
Merge testat/dezvoltat în felul următor :
- se ia iso-ul și se rulează într-o mașină virtuală
- se înlocuiesc fișierele din /usr/share/ubiquity-slideshow cu unele personale
- se pornește installerul și se instalează sistemul (aici se vor vedea noul artwork)
| {
"content_hash": "62cd5f26600e78710defe44d4880bf2d",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 84,
"avg_line_length": 40.81818181818182,
"alnum_prop": 0.8151447661469933,
"repo_name": "educatie/installer",
"id": "3b636dd198da67c613df6ed4327c4609954dfd93",
"size": "483",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4643"
},
{
"name": "HTML",
"bytes": "247576"
},
{
"name": "JavaScript",
"bytes": "9025"
}
],
"symlink_target": ""
} |
package org.apache.struts2.convention;
import com.opensymphony.xwork2.ActionInvocation;
import com.opensymphony.xwork2.interceptor.AbstractInterceptor;
public class TestInterceptor extends AbstractInterceptor {
private String string1;
@Override
public String intercept(ActionInvocation invocation) throws Exception {
return null;
}
public String getString1() {
return string1;
}
public void setString1(String string1) {
this.string1 = string1;
}
}
| {
"content_hash": "b2573815b016988240ad301652bd0728",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 72,
"avg_line_length": 22.285714285714285,
"alnum_prop": 0.7927350427350427,
"repo_name": "TheTypoMaster/struts-2.3.24",
"id": "12a0988aba831bd6dc95c22e72f1d0cb75ec65c5",
"size": "468",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "src/plugins/convention/src/test/java/org/apache/struts2/convention/TestInterceptor.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ActionScript",
"bytes": "22970"
},
{
"name": "CSS",
"bytes": "73781"
},
{
"name": "HTML",
"bytes": "1055902"
},
{
"name": "Java",
"bytes": "10166736"
},
{
"name": "JavaScript",
"bytes": "3966249"
},
{
"name": "XSLT",
"bytes": "8112"
}
],
"symlink_target": ""
} |
<h1 id="datasets">Datasets</h1><blockquote><p>Datasets.</p></blockquote><section class="usage"><h2 id="usage">Usage</h2><pre><code class="hljs language-javascript"><span class="hljs-keyword">var</span> datasets = <span class="hljs-built_in">require</span>( <span class="hljs-string">'@stdlib/datasets'</span> );
</code></pre><h4 id="datasets-name-options-">datasets( name[, options] )</h4><p>Returns standard library datasets.</p><pre><code class="hljs language-javascript"><span class="hljs-keyword">var</span> data = datasets( <span class="hljs-string">'MONTH_NAMES_EN'</span> );
<span class="hljs-comment">/* returns
[
'January',
'February',
'March',
'April',
'May',
'June',
'July',
'August',
'September',
'October',
'November',
'December'
]
*/</span>
</code></pre><p>The function forwards provided <code>options</code> to the dataset interface specified by <code>name</code>.</p><pre><code class="hljs language-javascript"><span class="hljs-keyword">var</span> opts = {
<span class="hljs-string">'data'</span>: <span class="hljs-string">'cities'</span>
};
<span class="hljs-keyword">var</span> data = datasets( <span class="hljs-string">'MINARD_NAPOLEONS_MARCH'</span>, opts );
<span class="hljs-comment">/* returns
[
{'lon': 24,'lat': 55,'city': 'Kowno',
{'lon': 25.3,'lat': 54.7,'city': 'Wilna',
{'lon': 26.4,'lat': 54.4,'city': 'Smorgoni',
{'lon': 26.8,'lat': 54.3,'city': 'Molodexno',
{'lon': 27.7,'lat': 55.2,'city': 'Gloubokoe',
{'lon': 27.6,'lat': 53.9,'city': 'Minsk',
{'lon': 28.5,'lat': 54.3,'city': 'Studienska',
{'lon': 28.7,'lat': 55.5,'city': 'Polotzk',
{'lon': 29.2,'lat': 54.4,'city': 'Bobr',
{'lon': 30.2,'lat': 55.3,'city': 'Witebsk',
{'lon': 30.4,'lat': 54.5,'city': 'Orscha',
{'lon': 30.4,'lat': 53.9,'city': 'Mohilow',
{'lon': 32,'lat': 54.8,'city': 'Smolensk',
{'lon': 33.2,'lat': 54.9,'city': 'Dorogobouge',
{'lon': 34.3,'lat': 55.2,'city': 'Wixma',
{'lon': 34.4,'lat': 55.5,'city': 'Chjat',
{'lon': 36,'lat': 55.5,'city': 'Mojaisk',
{'lon': 37.6,'lat': 55.8,'city': 'Moscou',
{'lon': 36.6,'lat': 55.3,'city': 'Tarantino',
{'lon': 36.5,'lat': 55,'city': 'Malo-Jarosewli'
]
*/</span>
</code></pre></section><section class="examples"><h2 id="examples">Examples</h2><pre><code class="hljs language-javascript"><span class="hljs-keyword">var</span> datasets = <span class="hljs-built_in">require</span>( <span class="hljs-string">'@stdlib/datasets'</span> );
<span class="hljs-keyword">var</span> data = datasets( <span class="hljs-string">'MONTH_NAMES_EN'</span> );
<span class="hljs-built_in">console</span>.log( data );
</code></pre></section><hr><section class="cli"><h2 id="cli">CLI</h2><section class="usage"><h3 id="usage-1">Usage</h3><pre><code class="no-highlight language-text">Usage: datasets [options] [--name=<name>]
Options:
-h, --help Print this message.
-V, --version Print the package version.
--name name Dataset name.
--ls List datasets.
</code></pre></section><section class="notes"><h3 id="notes">Notes</h3><ul><li>Dataset specific options should follow two hyphen characters <code>--</code> in order to indicate that those options should not be parsed as normal command-line options.</li></ul></section><section class="examples"><h3 id="examples-1">Examples</h3><pre><code class="hljs language-bash">$ datasets --name MONTH_NAMES_EN
January
February
March
...
</code></pre><p>Use two hyphen characters <code>--</code> to delineate dataset specific options.</p><pre><code class="hljs language-bash">$ datasets --name MINARD_NAPOLEONS_MARCH -- --data army
lon,lat,size,direction,division
24.0,54.9,340000,A,1
24.5,55.0,340000,A,1
25.5,54.5,340000,A,1
...
</code></pre></section></section><section class="related"></section><section class="links"></section> | {
"content_hash": "80616f62e93f77cc5093c21ed7e0ba3c",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 397,
"avg_line_length": 57.267605633802816,
"alnum_prop": 0.6013280865715691,
"repo_name": "stdlib-js/www",
"id": "4a8d6bcc7acc59922ec2eb4003dab2fbff9029d6",
"size": "4066",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "public/docs/api/latest/@stdlib/datasets/index.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "190538"
},
{
"name": "HTML",
"bytes": "158086013"
},
{
"name": "Io",
"bytes": "14873"
},
{
"name": "JavaScript",
"bytes": "5395746994"
},
{
"name": "Makefile",
"bytes": "40479"
},
{
"name": "Shell",
"bytes": "9744"
}
],
"symlink_target": ""
} |
FROM balenalib/artik520-ubuntu:disco-build
ENV NODE_VERSION 15.6.0
ENV YARN_VERSION 1.22.4
RUN for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \
done \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-armv7l.tar.gz" \
&& echo "234871415c54174f91764f332a72631519a6af7b1a87797ad7c729855182f9cd node-v$NODE_VERSION-linux-armv7l.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-armv7l.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-armv7l.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/[email protected]" \
&& echo "Running test-stack@node" \
&& chmod +x [email protected] \
&& bash [email protected] \
&& rm -rf [email protected]
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Ubuntu disco \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v15.6.0, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | {
"content_hash": "885fb7467817fc7d57d9c5ea62044ca9",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 692,
"avg_line_length": 66.73170731707317,
"alnum_prop": 0.7097953216374269,
"repo_name": "nghiant2710/base-images",
"id": "8570b392e054aacee967fc718c879c76d0cb8816",
"size": "2757",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "balena-base-images/node/artik520/ubuntu/disco/15.6.0/build/Dockerfile",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "144558581"
},
{
"name": "JavaScript",
"bytes": "16316"
},
{
"name": "Shell",
"bytes": "368690"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Text;
using wojilu.Common.Menus.Interface;
using wojilu.Web.Context;
using System.Collections;
using wojilu.Web.Url;
using wojilu.Web.Mvc.Routes;
using wojilu.Web.Mvc;
namespace wojilu.Web.Controller.Layouts {
public class MenuHelper {
public static void bindMenuSingle( IBlock block, IMenu menu, MvcContext ctx ) {
block.Set( "menu.Id", menu.Id );
block.Set( "menu.Name", menu.Name );
block.Set( "menu.Style", menu.Style );
block.Set( "menu.Link", UrlConverter.toMenu( menu, ctx ) );
String lnkTarget = menu.OpenNewWindow == 1 ? lnkTarget = " target=\"_blank\"" : "";
block.Set( "menu.LinkTarget", lnkTarget );
block.Next();
}
public static void bindSubMenus( IBlock block, List<IMenu> list, MvcContext ctx ) {
foreach (IMenu menu in list) {
bindMenuSingle( block, menu, ctx );
}
}
public static List<IMenu> getSubMenus( IList menus, IMenu menu ) {
List<IMenu> list = new List<IMenu>();
if (menu == null) return list;
foreach (IMenu m in menus) {
if (m.ParentId == menu.Id) list.Add( m );
}
return list;
}
public static List<IMenu> getRootMenus( IList menus ) {
List<IMenu> list = new List<IMenu>();
foreach (IMenu m in menus) {
if (m.ParentId == 0) list.Add( m );
}
return list;
}
//--------------------------------------------------------------------------------------------------
public static IMenu getCurrentRootMenu( List<IMenu> list, MvcContext ctx ) {
IMenu m = getCurrentMenuByUrl( list, ctx );
if (m != null) {
if (m.ParentId == 0)
return m;
else {
return getParentMenu( list, m, ctx );
}
}
else {
return getRootMenuByAppAndNs( list, ctx );
}
}
//--------------------------------------------
private static IMenu getCurrentMenuByUrl( List<IMenu> list, MvcContext ctx ) {
String currentPath = strUtil.TrimEnd( ctx.url.Path, MvcConfig.Instance.UrlExt );
currentPath = currentPath.TrimStart( '/' );
Boolean isHomepage = false;
if (strUtil.IsNullOrEmpty( currentPath )) isHomepage = true;// 在无后缀名的情况下,首页是空""
foreach (IMenu menu in list) {
if (isHomepage) {
if ("default".Equals( menu.Url )) return menu;
continue;
}
if (currentPath.Equals( menu.Url ) || currentPath.Equals( menu.RawUrl )) { // 未设置友好网址的url也是空
return menu;
}
}
return null;
}
private static IMenu getParentMenu( List<IMenu> list, IMenu menu, MvcContext ctx ) {
foreach (IMenu m in list) {
if (m.Id == menu.ParentId) return m;
}
return null;
}
//--------------------------------------------
private static IMenu getRootMenuByAppAndNs( List<IMenu> list, MvcContext ctx ) {
IMenu menu = getRootMenuByNs( list, ctx );
if (menu == null) return null;
// 如果是app,则还要比较appId
if (ctx.app.Id > 0) {
Route menuRoute = RouteTool.RecognizePath( menu.RawUrl );
if (ctx.app.Id != menuRoute.appId) return null;
}
return menu;
}
private static IMenu getRootMenuByNs( List<IMenu> list, MvcContext ctx ) {
// 先找到同一命名空间的
IMenu menu = getMenuBySameNs( list, ctx );
if (menu == null) return null;
// 找到父菜单
if (menu.ParentId > 0) menu = getParentMenu( list, menu, ctx );
return menu;
}
private static IMenu getMenuBySameNs( List<IMenu> list, MvcContext ctx ) {
foreach (IMenu m in list) {
if (m.RawUrl.StartsWith( "http:" )) continue;
Route rt = RouteTool.RecognizePath( m.RawUrl );
if (!ctx.route.ns.StartsWith( rt.ns )) continue;
if (ctx.app.Id <= 0 || ctx.app.Id == rt.appId) return m;
}
return null;
}
}
}
| {
"content_hash": "c0409690916c4305b286058d1a486550",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 108,
"avg_line_length": 30.18709677419355,
"alnum_prop": 0.477025005343022,
"repo_name": "songboriceboy/cnblogsbywojilu",
"id": "a1c623f2f516718a55d68f4e9f8b1aa548d6dcf7",
"size": "4779",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wojilu.Controller/Layouts/MenuHelper.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "1648"
},
{
"name": "ActionScript",
"bytes": "1034007"
},
{
"name": "Batchfile",
"bytes": "113"
},
{
"name": "C#",
"bytes": "5144510"
},
{
"name": "CSS",
"bytes": "494476"
},
{
"name": "HTML",
"bytes": "1357282"
},
{
"name": "JavaScript",
"bytes": "1469091"
},
{
"name": "PHP",
"bytes": "10164"
}
],
"symlink_target": ""
} |
import { Mob, IMob, IMobTag, MobTag } from '../../core';
/**
* @name ElderGuardian
* @description
* The elder guardian is a hostile mob which only spawns underwater in ocean monuments.
* It is a stronger variant of the guardian.
*/
export interface IElderGuardian extends IMob {
}
/**
* @name ElderGuardian
* @description
* The elder guardian is a hostile mob which only spawns underwater in ocean monuments.
* It is a stronger variant of the guardian.
*/
export class ElderGuardian extends Mob implements IElderGuardian {
/**
* @description
* Initializes the Zombie
*/
constructor() {
super('elder_guardian', new MobTag());
}
/**
* Tags which modify the entity with your given values.
*/
public get Tag(): IMobTag {
return this.entityTag as MobTag;
}
} | {
"content_hash": "9f063e63a3d7716d642c0675b6eb80d9",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 87,
"avg_line_length": 24.515151515151516,
"alnum_prop": 0.6823238566131026,
"repo_name": "BrunnerLivio/MinecraftCommandAPI",
"id": "8ad353fafe788f0f0a3ed890884bb94346663397",
"size": "809",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/entities/ElderGuardian/ElderGuardian.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "2577"
},
{
"name": "TypeScript",
"bytes": "55005"
}
],
"symlink_target": ""
} |
<statemodal>
<h4>Are you sure you want to delete <strong>{{deleteText}}</strong>?</h4>
<div class="margin-t-20">
<button id="del_btn" class="btn btn-danger" ng-click="remove()">Delete</button>
<a class="btn btn-default" ng-click="cancel()">Cancel</a>
</div>
</statemodal>
| {
"content_hash": "11b9b5bbf0907139f262bbeae8092261",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 83,
"avg_line_length": 40.857142857142854,
"alnum_prop": 0.6503496503496503,
"repo_name": "torgartor21/mfl_admin_web",
"id": "9def26d39e9b1b6e631c493dfe9ff7a04eb4bf08",
"size": "286",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/app/common/tpls/delete.tpl.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "300434"
},
{
"name": "HTML",
"bytes": "524869"
},
{
"name": "JavaScript",
"bytes": "1588217"
},
{
"name": "Python",
"bytes": "1115"
},
{
"name": "Shell",
"bytes": "2084"
}
],
"symlink_target": ""
} |
using System;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Moq;
using Owin;
using System.Collections.Generic;
using System.Linq;
using Microsoft.Owin.Mapping;
using Microsoft.Owin.Extensions;
using System.Threading.Tasks;
using MMMWrapper.Logic;
using MMMWrapper.Logic.Config;
namespace MMMWrapper.Tests
{
[TestClass]
public class OwinTests
{
[TestMethod]
public void StartUpOk()
{
var startup = new Startup();
var mockedAppBuilder = new Mock<IAppBuilder>();
var properties = new Dictionary<string, object>();
mockedAppBuilder.Setup(a => a.Properties).Returns(properties);
mockedAppBuilder.Setup(a => a.Use(It.IsAny<GlobalExceptionMiddleware>())).Verifiable();
mockedAppBuilder.Setup(a => a.New()).Returns(mockedAppBuilder.Object);
mockedAppBuilder.Setup(a => a.Use(typeof(MapMiddleware), It.IsAny<MapOptions>())).Returns(mockedAppBuilder.Object.New());
startup.Configuration(mockedAppBuilder.Object);
Assert.IsTrue(properties.Keys.Any(a => a == "swagger"));
}
}
}
| {
"content_hash": "ea37b4d301d2ccc38d6624630f6ef186",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 133,
"avg_line_length": 27.285714285714285,
"alnum_prop": 0.6684118673647469,
"repo_name": "martijnvaandering/MMMWrapper",
"id": "4655460f57020418a94f36d7e4b0eaa8a8e86166",
"size": "1148",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "MMMWrapper.Tests/OwinTest.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "185241"
},
{
"name": "CSS",
"bytes": "102034"
},
{
"name": "HTML",
"bytes": "4252"
},
{
"name": "JavaScript",
"bytes": "2229519"
}
],
"symlink_target": ""
} |
'use strict';
import type {SourceMap} from './output/source-map';
import type {Ast} from 'babel-core';
import type {Console} from 'console';
export type Callback<A = void, B = void>
= (Error => void)
& ((null | void, A, B) => void);
type Dependency = {|
id: string,
path: string,
|};
export type File = {|
code: string,
map?: ?Object,
path: string,
type: FileTypes,
|};
type FileTypes = 'module' | 'script' | 'asset';
export type GraphFn = (
entryPoints: Iterable<string>,
platform: string,
options?: ?GraphOptions,
callback?: Callback<GraphResult>,
) => void;
type GraphOptions = {|
log?: Console,
optimize?: boolean,
skip?: Set<string>,
|};
export type GraphResult = {|
entryModules: Array<Module>,
modules: Array<Module>,
|};
export type IdForPathFn = {path: string} => number;
export type LoadFn = (
file: string,
options: LoadOptions,
callback: Callback<File, Array<string>>,
) => void;
type LoadOptions = {|
log?: Console,
optimize?: boolean,
platform?: string,
|};
export type Module = {|
dependencies: Array<Dependency>,
file: File,
|};
export type OutputFn = (
modules: Iterable<Module>,
filename?: string,
idForPath: IdForPathFn,
) => OutputResult;
type OutputResult = {|
code: string,
map: SourceMap,
|};
export type PackageData = {|
browser?: Object | string,
main?: string,
name?: string,
'react-native'?: Object | string,
|};
export type ResolveFn = (
id: string,
source: ?string,
platform: string,
options?: ResolveOptions,
callback: Callback<string>,
) => void;
type ResolveOptions = {
log?: Console,
};
export type TransformerResult = {|
ast: ?Ast,
code: string,
map: ?SourceMap,
|};
export type Transformer = {
transform: (
sourceCode: string,
filename: string,
options: ?{},
plugins?: Array<string | Object | [string | Object, any]>,
) => {ast: ?Ast, code: string, map: ?SourceMap}
};
export type TransformResult = {|
code: string,
dependencies: Array<string>,
dependencyMapName?: string,
map: ?Object,
|};
export type TransformResults = {[string]: TransformResult};
export type TransformVariants = {[key: string]: Object};
export type TransformedFile = {
assetContent: ?string,
code: string,
file: string,
hasteID: ?string,
package?: PackageData,
transformed: TransformResults,
type: FileTypes,
};
| {
"content_hash": "460a40a79026e54ee299f145881fd774",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 62,
"avg_line_length": 18.661417322834644,
"alnum_prop": 0.6535864978902953,
"repo_name": "Maxwell2022/react-native",
"id": "92ecc3ceaab5d041c0f72f41ccc3459083289e9e",
"size": "2690",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packager/src/ModuleGraph/types.flow.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "15392"
},
{
"name": "Awk",
"bytes": "121"
},
{
"name": "Batchfile",
"bytes": "683"
},
{
"name": "C",
"bytes": "203427"
},
{
"name": "C++",
"bytes": "684247"
},
{
"name": "CSS",
"bytes": "40564"
},
{
"name": "HTML",
"bytes": "172601"
},
{
"name": "IDL",
"bytes": "1837"
},
{
"name": "Java",
"bytes": "2839413"
},
{
"name": "JavaScript",
"bytes": "3944826"
},
{
"name": "Makefile",
"bytes": "7585"
},
{
"name": "Objective-C",
"bytes": "1382785"
},
{
"name": "Objective-C++",
"bytes": "237329"
},
{
"name": "Prolog",
"bytes": "287"
},
{
"name": "Python",
"bytes": "137789"
},
{
"name": "Ruby",
"bytes": "7566"
},
{
"name": "Shell",
"bytes": "40461"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>elpi: Not compatible 👼</title>
<link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" />
<link href="../../../../../bootstrap.min.css" rel="stylesheet">
<link href="../../../../../bootstrap-custom.css" rel="stylesheet">
<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet">
<script src="../../../../../moment.min.js"></script>
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div class="container">
<div class="navbar navbar-default" role="navigation">
<div class="container-fluid">
<div class="navbar-header">
<a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a>
</div>
<div id="navbar" class="collapse navbar-collapse">
<ul class="nav navbar-nav">
<li><a href="../..">clean / released</a></li>
<li class="active"><a href="">8.13.0 / elpi - 1.8.2~8.12</a></li>
</ul>
</div>
</div>
</div>
<div class="article">
<div class="row">
<div class="col-md-12">
<a href="../..">« Up</a>
<h1>
elpi
<small>
1.8.2~8.12
<span class="label label-info">Not compatible 👼</span>
</small>
</h1>
<p>📅 <em><script>document.write(moment("2022-11-26 03:34:25 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-11-26 03:34:25 UTC)</em><p>
<h2>Context</h2>
<pre># Packages matching: installed
# Name # Installed # Synopsis
base-bigarray base
base-threads base
base-unix base
conf-findutils 1 Virtual package relying on findutils
conf-gmp 4 Virtual package relying on a GMP lib system installation
coq 8.13.0 Formal proof management system
num 1.4 The legacy Num library for arbitrary-precision integer and rational arithmetic
ocaml 4.06.1 The OCaml compiler (virtual package)
ocaml-base-compiler 4.06.1 Official 4.06.1 release
ocaml-config 1 OCaml Switch Configuration
ocamlfind 1.9.5 A library manager for OCaml
zarith 1.12 Implements arithmetic and logical operations over arbitrary-precision integers
# opam file:
opam-version: "2.0"
maintainer: "Enrico Tassi <[email protected]>"
authors: [ "Enrico Tassi" ]
license: "LGPL-2.1-or-later"
homepage: "https://github.com/LPCIC/coq-elpi"
bug-reports: "https://github.com/LPCIC/coq-elpi/issues"
dev-repo: "git+https://github.com/LPCIC/coq-elpi"
build: [ make "COQBIN=%{bin}%/" "ELPIDIR=%{prefix}%/lib/elpi" "OCAMLWARN=" ]
install: [ make "install" "COQBIN=%{bin}%/" "ELPIDIR=%{prefix}%/lib/elpi" ]
depends: [
"elpi" {>= "1.12.0" & < "1.13.0~"}
"coq" {>= "8.12" & < "8.13~" }
]
tags: [ "logpath:elpi" ]
synopsis: "Elpi extension language for Coq"
description: """
Coq-elpi provides a Coq plugin that embeds ELPI.
It also provides a way to embed Coq's terms into λProlog using
the Higher-Order Abstract Syntax approach
and a way to read terms back. In addition to that it exports to ELPI a
set of Coq's primitives, e.g. printing a message, accessing the
environment of theorems and data types, defining a new constant and so on.
For convenience it also provides a quotation and anti-quotation for Coq's
syntax in λProlog. E.g. `{{nat}}` is expanded to the type name of natural
numbers, or `{{A -> B}}` to the representation of a product by unfolding
the `->` notation. Finally it provides a way to define new vernacular commands
and
new tactics."""
url {
src: "https://github.com/LPCIC/coq-elpi/archive/v1.8.2_8.12.tar.gz"
checksum: "sha256=fa7008d75abafa9fcb8d8ed5a26873c4cc00663963425de55e230b7835c9e9a4"
}
</pre>
<h2>Lint</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Dry install 🏜️</h2>
<p>Dry install with the current Coq version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam install -y --show-action coq-elpi.1.8.2~8.12 coq.8.13.0</code></dd>
<dt>Return code</dt>
<dd>5120</dd>
<dt>Output</dt>
<dd><pre>[NOTE] Package coq is already installed (current version is 8.13.0).
The following dependencies couldn't be met:
- coq-elpi -> coq < 8.13~ -> ocaml < 4.06.0
base of this switch (use `--unlock-base' to force)
Your request can't be satisfied:
- No available version of coq satisfies the constraints
No solution found, exiting
</pre></dd>
</dl>
<p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-elpi.1.8.2~8.12</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Install dependencies</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Install 🚀</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Installation size</h2>
<p>No files were installed.</p>
<h2>Uninstall 🧹</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Missing removes</dt>
<dd>
none
</dd>
<dt>Wrong removes</dt>
<dd>
none
</dd>
</dl>
</div>
</div>
</div>
<hr/>
<div class="footer">
<p class="text-center">
Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣
</p>
</div>
</div>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script src="../../../../../bootstrap.min.js"></script>
</body>
</html>
| {
"content_hash": "f30fe44eb2a1f3c5f6961a024ac13f3a",
"timestamp": "",
"source": "github",
"line_count": 174,
"max_line_length": 159,
"avg_line_length": 43.54597701149425,
"alnum_prop": 0.5568166820641415,
"repo_name": "coq-bench/coq-bench.github.io",
"id": "aa58f150916011449454e8a16119e83ac8be698b",
"size": "7604",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "clean/Linux-x86_64-4.06.1-2.0.5/released/8.13.0/elpi/1.8.2~8.12.html",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
#ifndef _UAPI__LINUX_NETLINK_H
#define _UAPI__LINUX_NETLINK_H
#include <linux/kernel.h>
#include <linux/socket.h>
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#include <linux/types.h>
#define NETLINK_ROUTE 0
#define NETLINK_UNUSED 1
#define NETLINK_USERSOCK 2
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define NETLINK_FIREWALL 3
#define NETLINK_SOCK_DIAG 4
#define NETLINK_NFLOG 5
#define NETLINK_XFRM 6
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define NETLINK_SELINUX 7
#define NETLINK_ISCSI 8
#define NETLINK_AUDIT 9
#define NETLINK_FIB_LOOKUP 10
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define NETLINK_CONNECTOR 11
#define NETLINK_NETFILTER 12
#define NETLINK_IP6_FW 13
#define NETLINK_DNRTMSG 14
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define NETLINK_KOBJECT_UEVENT 15
#define NETLINK_GENERIC 16
#define NETLINK_SCSITRANSPORT 18
#define NETLINK_ECRYPTFS 19
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define NETLINK_RDMA 20
#define NETLINK_CRYPTO 21
#define NETLINK_INET_DIAG NETLINK_SOCK_DIAG
#define MAX_LINKS 32
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
struct sockaddr_nl {
__kernel_sa_family_t nl_family;
unsigned short nl_pad;
__u32 nl_pid;
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
__u32 nl_groups;
};
struct nlmsghdr {
__u32 nlmsg_len;
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
__u16 nlmsg_type;
__u16 nlmsg_flags;
__u32 nlmsg_seq;
__u32 nlmsg_pid;
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
};
#define NLM_F_REQUEST 1
#define NLM_F_MULTI 2
#define NLM_F_ACK 4
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define NLM_F_ECHO 8
#define NLM_F_DUMP_INTR 16
#define NLM_F_ROOT 0x100
#define NLM_F_MATCH 0x200
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define NLM_F_ATOMIC 0x400
#define NLM_F_DUMP (NLM_F_ROOT|NLM_F_MATCH)
#define NLM_F_REPLACE 0x100
#define NLM_F_EXCL 0x200
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define NLM_F_CREATE 0x400
#define NLM_F_APPEND 0x800
#define NLMSG_ALIGNTO 4U
#define NLMSG_ALIGN(len) ( ((len)+NLMSG_ALIGNTO-1) & ~(NLMSG_ALIGNTO-1) )
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define NLMSG_HDRLEN ((int) NLMSG_ALIGN(sizeof(struct nlmsghdr)))
#define NLMSG_LENGTH(len) ((len) + NLMSG_HDRLEN)
#define NLMSG_SPACE(len) NLMSG_ALIGN(NLMSG_LENGTH(len))
#define NLMSG_DATA(nlh) ((void*)(((char*)nlh) + NLMSG_LENGTH(0)))
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define NLMSG_NEXT(nlh,len) ((len) -= NLMSG_ALIGN((nlh)->nlmsg_len), (struct nlmsghdr*)(((char*)(nlh)) + NLMSG_ALIGN((nlh)->nlmsg_len)))
#define NLMSG_OK(nlh,len) ((len) >= (int)sizeof(struct nlmsghdr) && (nlh)->nlmsg_len >= sizeof(struct nlmsghdr) && (nlh)->nlmsg_len <= (len))
#define NLMSG_PAYLOAD(nlh,len) ((nlh)->nlmsg_len - NLMSG_SPACE((len)))
#define NLMSG_NOOP 0x1
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define NLMSG_ERROR 0x2
#define NLMSG_DONE 0x3
#define NLMSG_OVERRUN 0x4
#define NLMSG_MIN_TYPE 0x10
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
struct nlmsgerr {
int error;
struct nlmsghdr msg;
};
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define NETLINK_ADD_MEMBERSHIP 1
#define NETLINK_DROP_MEMBERSHIP 2
#define NETLINK_PKTINFO 3
#define NETLINK_BROADCAST_ERROR 4
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define NETLINK_NO_ENOBUFS 5
#define NETLINK_RX_RING 6
#define NETLINK_TX_RING 7
struct nl_pktinfo {
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
__u32 group;
};
struct nl_mmap_req {
unsigned int nm_block_size;
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
unsigned int nm_block_nr;
unsigned int nm_frame_size;
unsigned int nm_frame_nr;
};
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
struct nl_mmap_hdr {
unsigned int nm_status;
unsigned int nm_len;
__u32 nm_group;
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
__u32 nm_pid;
__u32 nm_uid;
__u32 nm_gid;
};
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
enum nl_mmap_status {
NL_MMAP_STATUS_UNUSED,
NL_MMAP_STATUS_RESERVED,
NL_MMAP_STATUS_VALID,
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
NL_MMAP_STATUS_COPY,
NL_MMAP_STATUS_SKIP,
};
#define NL_MMAP_MSG_ALIGNMENT NLMSG_ALIGNTO
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define NL_MMAP_MSG_ALIGN(sz) __ALIGN_KERNEL(sz, NL_MMAP_MSG_ALIGNMENT)
#define NL_MMAP_HDRLEN NL_MMAP_MSG_ALIGN(sizeof(struct nl_mmap_hdr))
#define NET_MAJOR 36
enum {
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
NETLINK_UNCONNECTED = 0,
NETLINK_CONNECTED,
};
struct nlattr {
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
__u16 nla_len;
__u16 nla_type;
};
#define NLA_F_NESTED (1 << 15)
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define NLA_F_NET_BYTEORDER (1 << 14)
#define NLA_TYPE_MASK ~(NLA_F_NESTED | NLA_F_NET_BYTEORDER)
#define NLA_ALIGNTO 4
#define NLA_ALIGN(len) (((len) + NLA_ALIGNTO - 1) & ~(NLA_ALIGNTO - 1))
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define NLA_HDRLEN ((int) NLA_ALIGN(sizeof(struct nlattr)))
#endif
| {
"content_hash": "e5b6f4d335524d443452cb161a83643e",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 145,
"avg_line_length": 37.39869281045752,
"alnum_prop": 0.7221251310730514,
"repo_name": "efortuna/AndroidSDKClone",
"id": "b5567b01adc40d4c8e3f56793aecff6b9dd46032",
"size": "6696",
"binary": false,
"copies": "95",
"ref": "refs/heads/master",
"path": "ndk_experimental/platforms/android-20/arch-x86/usr/include/linux/netlink.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AppleScript",
"bytes": "0"
},
{
"name": "Assembly",
"bytes": "79928"
},
{
"name": "Awk",
"bytes": "101642"
},
{
"name": "C",
"bytes": "110780727"
},
{
"name": "C++",
"bytes": "62609188"
},
{
"name": "CSS",
"bytes": "318944"
},
{
"name": "Component Pascal",
"bytes": "220"
},
{
"name": "Emacs Lisp",
"bytes": "4737"
},
{
"name": "Groovy",
"bytes": "82931"
},
{
"name": "IDL",
"bytes": "31867"
},
{
"name": "Java",
"bytes": "102919416"
},
{
"name": "JavaScript",
"bytes": "44616"
},
{
"name": "Objective-C",
"bytes": "196166"
},
{
"name": "Perl",
"bytes": "45617403"
},
{
"name": "Prolog",
"bytes": "1828886"
},
{
"name": "Python",
"bytes": "34997242"
},
{
"name": "Rust",
"bytes": "17781"
},
{
"name": "Shell",
"bytes": "1585527"
},
{
"name": "Visual Basic",
"bytes": "962"
},
{
"name": "XC",
"bytes": "802542"
}
],
"symlink_target": ""
} |
// .NAME vtkQuaternion - templated base type for storage of quaternions.
// .SECTION Description
// This class is a templated data type for storing and manipulating
// quaternions. The quaternions have the form [w, x, y, z].
// Given a rotation of angle theta and axis v, the corresponding
// quaternion is [w, x, y, z] = [cos(theta/2), v*sin(theta/2)]
//
// This class implements the Spherical Linear interpolation (SLERP)
// and the Spherical Spline Quaternion interpolation (SQUAD).
// It is advised to use the vtkQuaternionInterpolator when dealing
// with multiple quaternions and or interpolations.
//
// .SECTION See also
// vtkQuaternionInterpolator
#ifndef __vtkQuaternion_h
#define __vtkQuaternion_h
#include "vtkTuple.h"
template<typename T> class vtkQuaternion : public vtkTuple<T, 4>
{
public:
// Description:
// Default constructor. Creates an identity quaternion.
vtkQuaternion();
// Description:
// Initialize all of the quaternion's elements with the supplied scalar.
explicit vtkQuaternion(const T& scalar) : vtkTuple<T, 4>(scalar) {}
// Description:
// Initalize the quaternion's elements with the elements of the supplied array.
// Note that the supplied pointer must contain at least as many elements as
// the quaternion, or it will result in access to out of bounds memory.
explicit vtkQuaternion(const T* init) : vtkTuple<T, 4>(init) {}
// Description:
// Initialize the quaternion element explicitly.
vtkQuaternion(const T& w, const T& x, const T& y, const T& z);
// Description:
// Get the squared norm of the quaternion.
T SquaredNorm() const;
// Description:
// Get the norm of the quaternion, i.e. its length.
T Norm() const;
// Description:
// Set the quaternion to identity in place.
void ToIdentity();
// Description:
// Return the identity quaternion.
// Note that the default constructor also creates an identity quaternion.
static vtkQuaternion<T> Identity();
// Description:
// Normalize the quaternion in place.
// Return the norm of the quaternion.
T Normalize();
// Description:
// Return the normalized form of this quaternion.
vtkQuaternion<T> Normalized() const;
// Description:
// Conjugate the quaternion in place.
void Conjugate();
// Description:
// Return the conjugate form of this quaternion.
vtkQuaternion<T> Conjugated() const;
// Description:
// Invert the quaternion in place.
// This is equivalent to conjugate the quaternion and then divide
// it by its squared norm.
void Invert();
// Description:
// Return the inverted form of this quaternion.
vtkQuaternion<T> Inverse() const;
// Description:
// Return the dot product of two quaternions.
T Dot(const vtkQuaternion<T>& q)const;
// Description:
// Convert this quaternion to a unit log quaternion.
// The unit log quaternion is defined by:
// [w, x, y, z] = [0.0, v*sin(theta)].
void ToUnitLog();
// Description:
// Return the unit log version of this quaternion.
// The unit log quaternion is defined by:
// [w, x, y, z] = [0.0, v*sin(theta)].
vtkQuaternion<T> UnitLog() const;
// Description:
// Convert this quaternion to a unit exponential quaternion.
// The unit exponential quaternion is defined by:
// [w, x, y, z] = [cos(theta), v*sin(theta)].
void ToUnitExp();
// Description:
// Return the unit exponential version of this quaternion.
// The unit exponential quaternion is defined by:
// [w, x, y, z] = [cos(theta), v*sin(theta)].
vtkQuaternion<T> UnitExp() const;
// Description:
// Normalize a quaternion in place and transform it to
// so its angle is in degrees and its axis normalized.
void NormalizeWithAngleInDegrees();
// Description:
// Returns a quaternion normalized and transformed
// so its angle is in degrees and its axis normalized.
vtkQuaternion<T> NormalizedWithAngleInDegrees() const;
// Description:
// Set/Get the w, x, y and z components of the quaternion.
void Set(const T& w, const T& x, const T& y, const T& z);
void Set(T quat[4]);
void Get(T quat[4]) const;
// Description:
// Set/Get the w component of the quaternion, i.e. element 0.
void SetW(const T& w);
const T& GetW() const;
// Description:
// Set/Get the x component of the quaternion, i.e. element 1.
void SetX(const T& x);
const T& GetX() const;
// Description:
// Set/Get the y component of the quaternion, i.e. element 2.
void SetY(const T& y);
const T& GetY() const;
// Description:
// Set/Get the y component of the quaternion, i.e. element 3.
void SetZ(const T& z);
const T& GetZ() const;
// Description:
// Set/Get the angle (in radians) and the axis corresponding to
// the axis-angle rotation of this quaternion.
T GetRotationAngleAndAxis(T axis[3]) const;
void SetRotationAngleAndAxis(T angle, T axis[3]);
void SetRotationAngleAndAxis(
const T& angle, const T& x, const T& y, const T& z);
// Description:
// Cast the quaternion to the specified type and return the result.
template<typename CastTo> vtkQuaternion<CastTo> Cast() const;
// Description:
// Convert a quaternion to a 3x3 rotation matrix. The quaternion
// does not have to be normalized beforehand.
// @sa FromMatrix3x3()
void ToMatrix3x3(T A[3][3]) const;
// Description:
// Convert a 3x3 matrix into a quaternion. This will provide the
// best possible answer even if the matrix is not a pure rotation matrix.
// The method used is that of B.K.P. Horn.
// @sa ToMatrix3x3()
void FromMatrix3x3(const T A[3][3]);
// Description:
// Interpolate quaternions using spherical linear interpolation between
// this quaternion and q1 to produce the output.
// The parametric coordinate t belongs to [0,1] and lies between (this,q1).
// @sa vtkQuaternionInterpolator
vtkQuaternion<T> Slerp(T t, const vtkQuaternion<T>& q) const;
// Description:
// Interpolates between quaternions, using spherical quadrangle
// interpolation.
// @sa vtkQuaternionInterpolator
vtkQuaternion<T> InnerPoint(const vtkQuaternion<T>& q1,
const vtkQuaternion<T>& q2) const;
// Description:
// Performs the copy of a quaternion of the same basic type.
void operator=(const vtkQuaternion<T>& q);
// Description:
// Performs addition of quaternion of the same basic type.
vtkQuaternion<T> operator+(const vtkQuaternion<T>& q) const;
// Description:
// Performs subtraction of quaternions of the same basic type.
vtkQuaternion<T> operator-(const vtkQuaternion<T>& q) const;
// Description:
// Negate the quaternion by multiplying each component by -1.
vtkQuaternion<T> operator-() const;
// Description:
// Performs multiplication of quaternion of the same basic type.
vtkQuaternion<T> operator*(const vtkQuaternion<T>& q) const;
// Description:
// Performs multiplication of the quaternions by a scalar value.
vtkQuaternion<T> operator*(const T& scalar) const;
// Description:
// Performs in place multiplication of the quaternions by a scalar value.
void operator*=(const T& scalar) const;
// Description:
// Performs division of quaternions of the same type.
vtkQuaternion<T> operator/(const vtkQuaternion<T>& q) const;
// Description:
// Performs division of the quaternions by a scalar value.
vtkQuaternion<T> operator/(const T& scalar) const;
// Description:
// Performs in place division of the quaternions by a scalar value.
void operator/=(const T& scalar);
// Decription:
// Compare two quaternions. Return true if the quaternions are equal, false
// otherwise.
bool operator==(const vtkQuaternion<T>& q) const;
};
// Description:
// Several macros to define the various operator overloads for the quaternions.
// These are necessary for the derived classes that are commonly used.
#define vtkQuaternionIdentity(quaternionType, type) \
quaternionType Identity() const \
{ \
return quaternionType(vtkQuaternion<type>::Identity().GetData()); \
}
#define vtkQuaternionNormalized(quaternionType, type) \
quaternionType Normalized() const \
{ \
return quaternionType(vtkQuaternion<type>::Normalized().GetData()); \
}
#define vtkQuaternionConjugated(quaternionType, type) \
quaternionType Conjugated() const \
{ \
return quaternionType(vtkQuaternion<type>::Conjugated().GetData()); \
}
#define vtkQuaternionInverse(quaternionType, type) \
quaternionType Inverse() const \
{ \
return quaternionType(vtkQuaternion<type>::Inverse().GetData()); \
}
#define vtkQuaternionUnitLog(quaternionType, type) \
quaternionType UnitLog() const \
{ \
return quaternionType( \
vtkQuaternion<type>::UnitLog().GetData()); \
}
#define vtkQuaternionUnitExp(quaternionType, type) \
quaternionType UnitExp() const \
{ \
return quaternionType( \
vtkQuaternion<type>::UnitExp().GetData()); \
}
#define vtkQuaternionNormalizedWithAngleInDegrees(quaternionType, type) \
quaternionType NormalizedWithAngleInDegrees() const \
{ \
return quaternionType( \
vtkQuaternion<type>::NormalizedWithAngleInDegrees().GetData()); \
}
#define vtkQuaternionSlerp(quaternionType, type) \
quaternionType Slerp(type t, const quaternionType& q) const \
{ \
return quaternionType( \
vtkQuaternion<type>::Slerp(t, q).GetData()); \
}
#define vtkQuaternionInnerPoint(quaternionType, type) \
quaternionType InnerPoint(const quaternionType& q1, \
const quaternionType& q2) const \
{ \
return quaternionType( \
vtkQuaternion<type>::InnerPoint(q1, q2).GetData()); \
}
#define vtkQuaternionOperatorPlus(quaternionType, type) \
inline quaternionType operator+(const quaternionType& q) const \
{ \
return quaternionType( ( \
static_cast< vtkQuaternion<type> > (*this) + \
static_cast< vtkQuaternion<type> > (q)).GetData()); \
}
#define vtkQuaternionOperatorMinus(quaternionType, type) \
inline quaternionType operator-(const quaternionType& q) const \
{ \
return quaternionType( ( \
static_cast< vtkQuaternion<type> > (*this) - \
static_cast< vtkQuaternion<type> > (q)).GetData()); \
}
#define vtkQuaternionOperatorMultiply(quaternionType, type) \
inline quaternionType operator*(const quaternionType& q) const \
{ \
return quaternionType( ( \
static_cast< vtkQuaternion<type> > (*this) * \
static_cast< vtkQuaternion<type> > (q)).GetData()); \
}
#define vtkQuaternionOperatorMultiplyScalar(quaternionType, type) \
inline quaternionType operator*(const type& scalar) const \
{ \
return quaternionType( ( \
static_cast< vtkQuaternion<type> > (*this) * \
scalar).GetData()); \
}
#define vtkQuaternionOperatorDivide(quaternionType, type) \
inline quaternionType operator/(const quaternionType& q) const \
{ \
return quaternionType( ( \
static_cast< vtkQuaternion<type> > (*this) / \
static_cast< vtkQuaternion<type> > (q)).GetData()); \
}
#define vtkQuaternionOperatorDivideScalar(quaternionType, type) \
inline quaternionType operator/(const type& scalar) const \
{ \
return quaternionType( ( \
static_cast< vtkQuaternion<type> > (*this) / \
scalar).GetData()); \
}
#define vtkQuaternionOperatorMacro(quaternionType, type) \
vtkQuaternionIdentity(quaternionType, type) \
vtkQuaternionNormalized(quaternionType, type) \
vtkQuaternionConjugated(quaternionType, type) \
vtkQuaternionInverse(quaternionType, type) \
vtkQuaternionUnitLog(quaternionType, type) \
vtkQuaternionUnitExp(quaternionType, type) \
vtkQuaternionNormalizedWithAngleInDegrees(quaternionType, type) \
vtkQuaternionSlerp(quaternionType, type) \
vtkQuaternionInnerPoint(quaternionType, type) \
vtkQuaternionOperatorPlus(quaternionType, type) \
vtkQuaternionOperatorMinus(quaternionType, type) \
vtkQuaternionOperatorMultiply(quaternionType, type) \
vtkQuaternionOperatorMultiplyScalar(quaternionType, type) \
vtkQuaternionOperatorDivide(quaternionType, type) \
vtkQuaternionOperatorDivideScalar(quaternionType, type)
// .NAME vtkQuaternionf - Float quaternion type.
//
// .SECTION Description
// This class is uses vtkQuaternion with float type data.
// For futher description, see the templated class vtkQuaternion.
// @sa vtkQuaterniond vtkQuaternion
class vtkQuaternionf : public vtkQuaternion<float>
{
public:
vtkQuaternionf() {}
explicit vtkQuaternionf(float w, float x, float y, float z)
: vtkQuaternion<float>(w, x, y, z) {}
explicit vtkQuaternionf(float scalar) : vtkQuaternion<float>(scalar) {}
explicit vtkQuaternionf(const float *init) : vtkQuaternion<float>(init) {}
vtkQuaternionOperatorMacro(vtkQuaternionf, float)
};
// .NAME vtkQuaterniond - Double quaternion type.
//
// .SECTION Description
// This class is uses vtkQuaternion with double type data.
// For futher description, seethe templated class vtkQuaternion.
// @sa vtkQuaternionf vtkQuaternion
class vtkQuaterniond : public vtkQuaternion<double>
{
public:
vtkQuaterniond() {}
explicit vtkQuaterniond(double w, double x, double y, double z)
: vtkQuaternion<double>(w, x, y, z) {}
explicit vtkQuaterniond(double scalar) : vtkQuaternion<double>(scalar) {}
explicit vtkQuaterniond(const double *init) : vtkQuaternion<double>(init) {}
vtkQuaternionOperatorMacro(vtkQuaterniond, double);
};
#include "vtkQuaternion.txx"
#endif // __vtkQuaternion_h
// VTK-HeaderTest-Exclude: vtkQuaternion.h
| {
"content_hash": "7d2aff1292aed1958971f49f91b7e70f",
"timestamp": "",
"source": "github",
"line_count": 387,
"max_line_length": 81,
"avg_line_length": 34.2609819121447,
"alnum_prop": 0.7244136058526284,
"repo_name": "ricortiz/Bender",
"id": "4f7226256310e6cf6bc72ed31d0c34a8ac373255",
"size": "13842",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "Libs/VTK/Common/vtkQuaternion.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "81018"
},
{
"name": "C++",
"bytes": "775818"
},
{
"name": "Python",
"bytes": "132283"
},
{
"name": "TypeScript",
"bytes": "63251"
}
],
"symlink_target": ""
} |
/**
* @license Highmaps JS v9.0.1 (2021-02-16)
* @module highcharts/highmaps
*
* (c) 2011-2021 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
import Highcharts from './highcharts.src.js';
import './modules/map.src.js';
Highcharts.product = 'Highmaps';
export default Highcharts;
| {
"content_hash": "b597c4a37e57747d462faf2cf91fe347",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 45,
"avg_line_length": 22.285714285714285,
"alnum_prop": 0.6987179487179487,
"repo_name": "cdnjs/cdnjs",
"id": "9180db67512b922a17a26e92cba3099598c2c4a0",
"size": "312",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ajax/libs/highcharts/9.0.1/es-modules/masters/highmaps.src.js",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
A small study project on the [sbt-native-packager](http://www.scala-sbt.org/sbt-native-packager/).
# Documentation
The following documentation is available:
- [SBT Native Packager Universal Plugin](http://www.scala-sbt.org/sbt-native-packager/formats/universal.html)
- [SBT Native Packager Archetype Cheatsheet](http://www.scala-sbt.org/sbt-native-packager/archetypes/cheatsheet.html)
- [SBT Native Packager Packaging Formats](http://www.scala-sbt.org/sbt-native-packager/formats/index.html)
- [Nepomuk Seiler - SBT Native Packager Examples](https://github.com/muuki88/sbt-native-packager-examples)
## Examples
The following examples are sorted from very simple to complex. Simple projects only use defaults that come out
of the box and the more complex override these default settings because we don't like the 'acceptable default' and
override it with our own setting.
- [simple-javaapp-packaging](https://github.com/dnvriend/sbt-native-packager-demo/tree/master/simple-javaapp-packaging)
- An introduction to packaging a simple Java/Scala application with a minimum of configuration.
- [simple-javaapp-with-config](https://github.com/dnvriend/sbt-native-packager-demo/tree/master/simple-javaapp-with-config)
- A simple Java application that uses the Typesafe Config library to lookup configuration. It introduces how to configure the universal plugin.
## Basic Configuration
- In `project/build.properties` add:
```bash
sbt.version=0.13.9
```
- In `project/plugins.sbt` add:
```scala
// to resolve jars //
resolvers += "bintray-sbt-plugin-releases" at "http://dl.bintray.com/content/sbt/sbt-plugin-releases"
// to package applications //
addSbtPlugin("com.typesafe.sbt" %% "sbt-native-packager" % "1.1.0-M3")
```
- Configure the project with minimum settings:
```scala
name := "helloworld"
version := "1.0.0"
scalaVersion := "2.11.7"
enablePlugins(JavaAppPackaging)
```
- Package your project eg. `universal:packageBin` to create a `zip`, `universal:packageZipTarball` to create a `tgz` or
`docker:publishLocal` to create a docker image in your local repository.
# Available Archetypes
[Archetypes](http://www.scala-sbt.org/sbt-native-packager/gettingstarted.html#archetypes) are __packaging defaults__ that make assumptions how to package our application and make it easy for us to
package our application quickly. Of course, when we don't like (some of) the default behaviors we can override
those by overriding the appropriate key using the [archetype cheatsheet](http://www.scala-sbt.org/sbt-native-packager/archetypes/cheatsheet.html).
Archetypes are enabled in the `build.sbt` file. For example, to enable the packaging defaults for an application
that will be a standalone application, we can use the `JavaAppPackaging` archtype. Just add the line `enablePlugins(JavaAppPackaging)`
to `build.sbt` and you are set.
The following [archetypes](http://www.scala-sbt.org/sbt-native-packager/gettingstarted.html#archetypes) are available:
- [Java Application](http://www.scala-sbt.org/sbt-native-packager/archetypes/java_app/):
- `enablePlugins(JavaAppPackaging)`
- Creates a standalone package, with a `bin/lib` directory structure and an executable bash/bat script.
- [Java Server](http://www.scala-sbt.org/sbt-native-packager/archetypes/java_server/):
- `enablePlugins(JavaServerAppPackaging)`
- Creates a standalone package with an executable bash/bat script and additional configuration and autostart.
# Available Packaging Formats
Believe it or not, the `zip`, `tgz` and `docker` are not the only packaging formats available. The `sbt-native-packager`
also supports the following packaging formats:
__Note:__ Some packaging formats may only be created when the environment SBT runs in supports it, eg. when running on
Ubuntu, the plugin can create the `deb` (Debian) package and packaging `docker` is only supported when `docker` is available
and so on.
- [deb](http://www.scala-sbt.org/sbt-native-packager/formats/debian.html):
- `debian:packageBin`
- Packaging format for Debian based systems like Ubuntu using the `Debian plugin`.
- [rpm](http://www.scala-sbt.org/sbt-native-packager/formats/rpm.html):
- `rpm:packageBin`
- Packaging format for Redhat based systems like RHEL or CentOS using the `Rpm plugin`.
- [msi](http://www.scala-sbt.org/sbt-native-packager/formats/windows.html):
- `windows:packageBin`
- Packaging format for windows systems using the `Windows plugin`.
- [dmg](http://www.scala-sbt.org/sbt-native-packager/formats/universal.html):
- `universal:packageOsxDmg`
- Packaging format for osx based systems using the `Universal plugin`.
- [docker](http://www.scala-sbt.org/sbt-native-packager/formats/docker.html):
- `docker:publishLocal`
- Package your application in a docker container using the `Docker plugin`.
- [zip](http://www.scala-sbt.org/sbt-native-packager/formats/universal.html):
- `universal:packageBin`
- Packaging format for all systems supporting zip using the `Universal plugin`.
- [tar](http://www.scala-sbt.org/sbt-native-packager/formats/universal.html):
- ``:packageZipTarball`
- Packaging format for all systems supporting tar using the `Universal plugin`.
- [xz](http://www.scala-sbt.org/sbt-native-packager/formats/universal.html):
- `universal:packageXzTarball`
- Packaging format for all systems supporting xz using the `Universal plugin`.
- [jdkpackager](http://www.scala-sbt.org/sbt-native-packager/formats/jdkpackager.html):
- `jdkPackager:packageBinl`
- Oracle javapackager create packages for your running platform using the `JDK Packager plugin`.
| {
"content_hash": "bc50de36e6c54a60d32c1baea119edb1",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 197,
"avg_line_length": 51.97222222222222,
"alnum_prop": 0.7641190094423659,
"repo_name": "dnvriend/sbt-native-packager-demo",
"id": "daa11caa1993466b3b4c90e8bba5e369069957f0",
"size": "5640",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Scala",
"bytes": "11739"
}
],
"symlink_target": ""
} |
package com.signalcollect.serialization
import org.junit.runner.RunWith
import org.specs2.mock.Mockito
import org.specs2.mutable.SpecificationWithJUnit
import org.specs2.runner.JUnitRunner
import com.signalcollect.GraphBuilder
import com.signalcollect.configuration.ActorSystemRegistry
import akka.serialization.SerializationExtension
import com.romix.akka.serialization.kryo.KryoSerializer
@RunWith(classOf[JUnitRunner])
class SerializerSpec extends SpecificationWithJUnit with Mockito {
sequential
"Kryo" should {
"correctly serialize Scala immutable maps" in {
val g = GraphBuilder.build
try {
// Scala uses special representations for small maps.
kryoSerializeAndDeserialize(Map.empty[Int, Double])
kryoSerializeAndDeserialize(Map(1 -> 1.5))
kryoSerializeAndDeserialize(Map(1 -> 1.5, 2 -> 5.4))
kryoSerializeAndDeserialize(Map(1 -> 1.5, 2 -> 5.4, 3 -> 4.5))
kryoSerializeAndDeserialize(Map(1 -> 1.5, 2 -> 5.4, 3 -> 4.5, 4 -> 1.2))
kryoSerializeAndDeserialize(Map(1 -> 1.5, 2 -> 5.4, 3 -> 4.5, 4 -> 1.2, 6 -> 3.2))
true
} finally {
g.shutdown
}
}
"correctly serialize Scala immutable sets" in {
val g = GraphBuilder.build
try {
// Scala uses special representations for small sets.
kryoSerializeAndDeserialize(Set.empty[Int])
kryoSerializeAndDeserialize(Set(1))
kryoSerializeAndDeserialize(Set(1, 2))
kryoSerializeAndDeserialize(Set(1, 2, 3))
kryoSerializeAndDeserialize(Set(1, 2, 3, 4))
kryoSerializeAndDeserialize(Set(1, 2, 3, 4, 5))
true
} finally {
g.shutdown
}
}
"correctly serialize Scala None" in {
val g = GraphBuilder.build
try {
kryoSerializeAndDeserialize(None)
true
} finally {
g.shutdown
}
}
"correctly serialize Scala List" in {
val g = GraphBuilder.build
try {
kryoSerializeAndDeserialize(List.empty[Int])
kryoSerializeAndDeserialize(List(1))
true
} finally {
g.shutdown
}
}
"correctly serialize Scala Vector" in {
val g = GraphBuilder.build
try {
kryoSerializeAndDeserialize(Vector.empty[Int])
kryoSerializeAndDeserialize(Vector(1))
true
} finally {
g.shutdown
}
}
"correctly serialize Scala Seq" in {
val g = GraphBuilder.build
try {
kryoSerializeAndDeserialize(Seq.empty[Int])
kryoSerializeAndDeserialize(Seq(1))
true
} finally {
g.shutdown
}
}
"correctly serialize Scala Array" in {
val g = GraphBuilder.build
try {
assert(kryoSerializeAndDeserializeSpecial(Array.empty[Int]).toList == List())
assert(kryoSerializeAndDeserializeSpecial(Array(1)).toList == List(1))
assert(kryoSerializeAndDeserializeSpecial(Array(1.0)).toList == List(1.0))
assert(kryoSerializeAndDeserializeSpecial(Array(1l)).toList == List(1l))
assert(kryoSerializeAndDeserializeSpecial(Array("abc")).toList == List("abc"))
true
} finally {
g.shutdown
}
}
"correctly serialize Array[Array[Int]]" in {
val g = GraphBuilder.build
try {
assert(kryoSerializeAndDeserializeSpecial(
Array(Array(1, 2, 3), Array(3, 4, 5))).map(_.toList).toList == List(List(1, 2, 3), List(3, 4, 5)))
true
} finally {
g.shutdown
}
}
"correctly serialize integers" in {
val g = GraphBuilder.build
try {
kryoSerializeAndDeserialize(Integer.valueOf(1))
true
} finally {
g.shutdown
}
}
"correctly serialize longs" in {
val g = GraphBuilder.build
try {
kryoSerializeAndDeserialize(Long.box(1l))
true
} finally {
g.shutdown
}
}
"correctly serialize floats" in {
val g = GraphBuilder.build
try {
kryoSerializeAndDeserialize(Float.box(1.0f))
true
} finally {
g.shutdown
}
}
"correctly serialize doubles" in {
val g = GraphBuilder.build
try {
kryoSerializeAndDeserialize(Double.box(1.0d))
true
} finally {
g.shutdown
}
}
"correctly serialize booleans" in {
val g = GraphBuilder.build
try {
kryoSerializeAndDeserialize(Boolean.box(true))
true
} finally {
g.shutdown
}
}
"correctly serialize shorts" in {
val g = GraphBuilder.build
try {
kryoSerializeAndDeserialize(Short.box(1))
true
} finally {
g.shutdown
}
}
"correctly serialize strings" in {
val g = GraphBuilder.build
try {
kryoSerializeAndDeserialize("abc")
true
} finally {
g.shutdown
}
}
"correctly serialize Java strings" in {
val g = GraphBuilder.build
try {
val javaString: java.lang.String = "abc"
kryoSerializeAndDeserialize(javaString)
true
} finally {
g.shutdown
}
}
"correctly serialize Tuple2" in {
val g = GraphBuilder.build
try {
kryoSerializeAndDeserialize((1, "second"))
true
} finally {
g.shutdown
}
}
"correctly serialize Tuple3" in {
val g = GraphBuilder.build
try {
kryoSerializeAndDeserialize((1, "second", 3.0))
true
} finally {
g.shutdown
}
}
def kryoSerializeAndDeserialize(instance: AnyRef) {
val akka = ActorSystemRegistry.retrieve("SignalCollect").get
val serialization = SerializationExtension(akka)
val s = serialization.findSerializerFor(instance)
// println(s"${s.getClass} for ${instance.getClass}")
assert(s.isInstanceOf[KryoSerializer])
val bytes = s.toBinary(instance)
val b = s.fromBinary(bytes, manifest = None)
assert(b == instance)
}
def kryoSerializeAndDeserializeSpecial[T <: AnyRef](instance: T): T = {
val akka = ActorSystemRegistry.retrieve("SignalCollect").get
val serialization = SerializationExtension(akka)
val s = serialization.findSerializerFor(instance)
assert(s.isInstanceOf[KryoSerializer])
val bytes = s.toBinary(instance)
val b = s.fromBinary(bytes, manifest = None).asInstanceOf[T]
b
}
}
"DefaultSerializer" should {
"correctly serialize/deserialize a Double" in {
DefaultSerializer.read[Double](DefaultSerializer.write(1024.0)) === 1024.0
}
"correctly serialize/deserialize a job configuration" in {
val job = new Job(
100,
Some(SpreadsheetConfiguration("[email protected]", "somePasswordHere", "someSpreadsheetNameHere", "someWorksheetNameHere")),
"someUsername",
"someJobDescription")
DefaultSerializer.read[Job](DefaultSerializer.write(job)) === job
}
}
}
case class SpreadsheetConfiguration(
gmailAccount: String,
gmailPassword: String,
spreadsheetName: String,
worksheetName: String)
case class Job(
var jobId: Int,
var spreadsheetConfiguration: Option[SpreadsheetConfiguration],
var submittedByUser: String,
var jobDescription: String) | {
"content_hash": "2ccce85df82b63b05d517b600f0e95ab",
"timestamp": "",
"source": "github",
"line_count": 274,
"max_line_length": 142,
"avg_line_length": 26.76277372262774,
"alnum_prop": 0.6218464475657984,
"repo_name": "danihegglin/DynDCO",
"id": "d540bf821768e5a33d384027bcb808765aea0150",
"size": "7980",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test/scala/com/signalcollect/serialization/SerializerSpec.scala",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "24989"
},
{
"name": "CoffeeScript",
"bytes": "25307"
},
{
"name": "Java",
"bytes": "23644"
},
{
"name": "JavaScript",
"bytes": "184348"
},
{
"name": "Perl",
"bytes": "16001"
},
{
"name": "R",
"bytes": "14767"
},
{
"name": "Scala",
"bytes": "901963"
},
{
"name": "Shell",
"bytes": "1419"
},
{
"name": "TeX",
"bytes": "146135"
}
],
"symlink_target": ""
} |
#include <sys/cdefs.h>
__FBSDID("$FreeBSD: releng/9.3/sys/mips/idt/obio.c 212413 2010-09-10 11:19:03Z avg $");
#include <sys/param.h>
#include <sys/systm.h>
#include <sys/bus.h>
#include <sys/interrupt.h>
#include <sys/kernel.h>
#include <sys/module.h>
#include <sys/rman.h>
#include <sys/malloc.h>
#include <machine/bus.h>
#include <mips/idt/idtreg.h>
#include <mips/idt/obiovar.h>
#define ICU_REG_READ(o) \
*((volatile uint32_t *)MIPS_PHYS_TO_KSEG1(IDT_BASE_ICU + (o)))
#define ICU_REG_WRITE(o,v) (ICU_REG_READ(o)) = (v)
#define GPIO_REG_READ(o) \
*((volatile uint32_t *)MIPS_PHYS_TO_KSEG1(IDT_BASE_GPIO + (o)))
#define GPIO_REG_WRITE(o,v) (GPIO_REG_READ(o)) = (v)
static int obio_activate_resource(device_t, device_t, int, int,
struct resource *);
static device_t obio_add_child(device_t, u_int, const char *, int);
static struct resource *
obio_alloc_resource(device_t, device_t, int, int *, u_long,
u_long, u_long, u_int);
static int obio_attach(device_t);
static int obio_deactivate_resource(device_t, device_t, int, int,
struct resource *);
static struct resource_list *
obio_get_resource_list(device_t, device_t);
static void obio_hinted_child(device_t, const char *, int);
static int obio_intr(void *);
static int obio_probe(device_t);
static int obio_release_resource(device_t, device_t, int, int,
struct resource *);
static int obio_setup_intr(device_t, device_t, struct resource *, int,
driver_filter_t *, driver_intr_t *, void *, void **);
static int obio_teardown_intr(device_t, device_t, struct resource *,
void *);
static void
obio_mask_irq(void *arg)
{
unsigned int irq = (unsigned int)arg;
int ip_bit, mask, mask_register;
/* mask IRQ */
mask_register = ICU_IRQ_MASK_REG(irq);
ip_bit = ICU_IP_BIT(irq);
mask = ICU_REG_READ(mask_register);
ICU_REG_WRITE(mask_register, mask | ip_bit);
}
static void
obio_unmask_irq(void *arg)
{
unsigned int irq = (unsigned int)arg;
int ip_bit, mask, mask_register;
/* unmask IRQ */
mask_register = ICU_IRQ_MASK_REG(irq);
ip_bit = ICU_IP_BIT(irq);
mask = ICU_REG_READ(mask_register);
ICU_REG_WRITE(mask_register, mask & ~ip_bit);
}
static int
obio_probe(device_t dev)
{
return (0);
}
static int
obio_attach(device_t dev)
{
struct obio_softc *sc = device_get_softc(dev);
int rid, irq;
sc->oba_mem_rman.rm_type = RMAN_ARRAY;
sc->oba_mem_rman.rm_descr = "OBIO memeory";
if (rman_init(&sc->oba_mem_rman) != 0 ||
rman_manage_region(&sc->oba_mem_rman, OBIO_MEM_START,
OBIO_MEM_START + OBIO_MEM_SIZE) != 0)
panic("obio_attach: failed to set up I/O rman");
sc->oba_irq_rman.rm_type = RMAN_ARRAY;
sc->oba_irq_rman.rm_descr = "OBIO IRQ";
if (rman_init(&sc->oba_irq_rman) != 0 ||
rman_manage_region(&sc->oba_irq_rman, IRQ_BASE, IRQ_END) != 0)
panic("obio_attach: failed to set up IRQ rman");
/* Hook up our interrupt handlers. We should handle IRQ0..IRQ4*/
for(irq = 0; irq < 5; irq++) {
if ((sc->sc_irq[irq] = bus_alloc_resource(dev, SYS_RES_IRQ,
&rid, irq, irq, 1, RF_SHAREABLE | RF_ACTIVE)) == NULL) {
device_printf(dev, "unable to allocate IRQ resource\n");
return (ENXIO);
}
if ((bus_setup_intr(dev, sc->sc_irq[irq], INTR_TYPE_MISC,
obio_intr, NULL, sc, &sc->sc_ih[irq]))) {
device_printf(dev,
"WARNING: unable to register interrupt handler\n");
return (ENXIO);
}
}
bus_generic_probe(dev);
bus_enumerate_hinted_children(dev);
bus_generic_attach(dev);
return (0);
}
static struct resource *
obio_alloc_resource(device_t bus, device_t child, int type, int *rid,
u_long start, u_long end, u_long count, u_int flags)
{
struct obio_softc *sc = device_get_softc(bus);
struct obio_ivar *ivar = device_get_ivars(child);
struct resource *rv;
struct resource_list_entry *rle;
struct rman *rm;
int isdefault, needactivate, passthrough;
isdefault = (start == 0UL && end == ~0UL);
needactivate = flags & RF_ACTIVE;
passthrough = (device_get_parent(child) != bus);
rle = NULL;
if (passthrough)
return (BUS_ALLOC_RESOURCE(device_get_parent(bus), child, type,
rid, start, end, count, flags));
/*
* If this is an allocation of the "default" range for a given RID,
* and we know what the resources for this device are (ie. they aren't
* maintained by a child bus), then work out the start/end values.
*/
if (isdefault) {
rle = resource_list_find(&ivar->resources, type, *rid);
if (rle == NULL)
return (NULL);
if (rle->res != NULL) {
panic("%s: resource entry is busy", __func__);
}
start = rle->start;
end = rle->end;
count = rle->count;
}
switch (type) {
case SYS_RES_IRQ:
rm = &sc->oba_irq_rman;
break;
case SYS_RES_MEMORY:
rm = &sc->oba_mem_rman;
break;
default:
printf("%s: unknown resource type %d\n", __func__, type);
return (0);
}
rv = rman_reserve_resource(rm, start, end, count, flags, child);
if (rv == 0) {
printf("%s: could not reserve resource\n", __func__);
return (0);
}
rman_set_rid(rv, *rid);
if (needactivate) {
if (bus_activate_resource(child, type, *rid, rv)) {
printf("%s: could not activate resource\n", __func__);
rman_release_resource(rv);
return (0);
}
}
return (rv);
}
static int
obio_activate_resource(device_t bus, device_t child, int type, int rid,
struct resource *r)
{
/* XXX: should we mask/unmask IRQ here? */
return (BUS_ACTIVATE_RESOURCE(device_get_parent(bus), child,
type, rid, r));
}
static int
obio_deactivate_resource(device_t bus, device_t child, int type, int rid,
struct resource *r)
{
/* XXX: should we mask/unmask IRQ here? */
return (BUS_DEACTIVATE_RESOURCE(device_get_parent(bus), child,
type, rid, r));
}
static int
obio_release_resource(device_t dev, device_t child, int type,
int rid, struct resource *r)
{
struct resource_list *rl;
struct resource_list_entry *rle;
rl = obio_get_resource_list(dev, child);
if (rl == NULL)
return (EINVAL);
rle = resource_list_find(rl, type, rid);
if (rle == NULL)
return (EINVAL);
rman_release_resource(r);
rle->res = NULL;
return (0);
}
static int
obio_setup_intr(device_t dev, device_t child, struct resource *ires,
int flags, driver_filter_t *filt, driver_intr_t *handler,
void *arg, void **cookiep)
{
struct obio_softc *sc = device_get_softc(dev);
struct intr_event *event;
int irq, ip_bit, error, mask, mask_register;
irq = rman_get_start(ires);
if (irq >= NIRQS)
panic("%s: bad irq %d", __func__, irq);
event = sc->sc_eventstab[irq];
if (event == NULL) {
error = intr_event_create(&event, (void *)irq, 0, irq,
obio_mask_irq, obio_unmask_irq,
NULL, NULL,
"obio intr%d:", irq);
sc->sc_eventstab[irq] = event;
}
intr_event_add_handler(event, device_get_nameunit(child), filt,
handler, arg, intr_priority(flags), flags, cookiep);
/* unmask IRQ */
mask_register = ICU_IRQ_MASK_REG(irq);
ip_bit = ICU_IP_BIT(irq);
mask = ICU_REG_READ(mask_register);
ICU_REG_WRITE(mask_register, mask & ~ip_bit);
return (0);
}
static int
obio_teardown_intr(device_t dev, device_t child, struct resource *ires,
void *cookie)
{
struct obio_softc *sc = device_get_softc(dev);
int irq, result;
uint32_t mask_register, mask, ip_bit;
irq = rman_get_start(ires);
if (irq >= NIRQS)
panic("%s: bad irq %d", __func__, irq);
if (sc->sc_eventstab[irq] == NULL)
panic("Trying to teardown unoccupied IRQ");
/* mask IRQ */
mask_register = ICU_IRQ_MASK_REG(irq);
ip_bit = ICU_IP_BIT(irq);
mask = ICU_REG_READ(mask_register);
ICU_REG_WRITE(mask_register, mask | ip_bit);
result = intr_event_remove_handler(cookie);
if (!result)
sc->sc_eventstab[irq] = NULL;
return (result);
}
static int
obio_intr(void *arg)
{
struct obio_softc *sc = arg;
struct intr_event *event;
uint32_t irqstat, ipend, imask, xpend;
int irq, thread, group, i;
irqstat = 0;
irq = 0;
for (group = 2; group <= 6; group++) {
ipend = ICU_REG_READ(ICU_GROUP_IPEND_REG(group));
imask = ICU_REG_READ(ICU_GROUP_MASK_REG(group));
xpend = ipend;
ipend &= ~imask;
while ((i = fls(xpend)) != 0) {
xpend &= ~(1 << (i - 1));
irq = IP_IRQ(group, i - 1);
}
while ((i = fls(ipend)) != 0) {
ipend &= ~(1 << (i - 1));
irq = IP_IRQ(group, i - 1);
event = sc->sc_eventstab[irq];
thread = 0;
if (!event || TAILQ_EMPTY(&event->ie_handlers)) {
/* TODO: Log stray IRQs */
continue;
}
/* TODO: frame instead of NULL? */
intr_event_handle(event, NULL);
/* XXX: Log stray IRQs */
}
}
#if 0
ipend = ICU_REG_READ(ICU_IPEND2);
printf("ipend2 = %08x!\n", ipend);
ipend = ICU_REG_READ(ICU_IPEND3);
printf("ipend3 = %08x!\n", ipend);
ipend = ICU_REG_READ(ICU_IPEND4);
printf("ipend4 = %08x!\n", ipend);
ipend = ICU_REG_READ(ICU_IPEND5);
printf("ipend5 = %08x!\n", ipend);
ipend = ICU_REG_READ(ICU_IPEND6);
printf("ipend6 = %08x!\n", ipend);
#endif
while (irqstat != 0) {
if ((irqstat & 1) == 1) {
}
irq++;
irqstat >>= 1;
}
return (FILTER_HANDLED);
}
static void
obio_hinted_child(device_t bus, const char *dname, int dunit)
{
device_t child;
long maddr;
int msize;
int irq;
int result;
child = BUS_ADD_CHILD(bus, 0, dname, dunit);
/*
* Set hard-wired resources for hinted child using
* specific RIDs.
*/
resource_long_value(dname, dunit, "maddr", &maddr);
resource_int_value(dname, dunit, "msize", &msize);
result = bus_set_resource(child, SYS_RES_MEMORY, 0,
maddr, msize);
if (result != 0)
device_printf(bus, "warning: bus_set_resource() failed\n");
if (resource_int_value(dname, dunit, "irq", &irq) == 0) {
result = bus_set_resource(child, SYS_RES_IRQ, 0, irq, 1);
if (result != 0)
device_printf(bus,
"warning: bus_set_resource() failed\n");
}
}
static device_t
obio_add_child(device_t bus, u_int order, const char *name, int unit)
{
device_t child;
struct obio_ivar *ivar;
ivar = malloc(sizeof(struct obio_ivar), M_DEVBUF, M_WAITOK | M_ZERO);
if (ivar == NULL) {
printf("Failed to allocate ivar\n");
return (0);
}
resource_list_init(&ivar->resources);
child = device_add_child_ordered(bus, order, name, unit);
if (child == NULL) {
printf("Can't add child %s%d ordered\n", name, unit);
return (0);
}
device_set_ivars(child, ivar);
return (child);
}
/*
* Helper routine for bus_generic_rl_get_resource/bus_generic_rl_set_resource
* Provides pointer to resource_list for these routines
*/
static struct resource_list *
obio_get_resource_list(device_t dev, device_t child)
{
struct obio_ivar *ivar;
ivar = device_get_ivars(child);
return (&(ivar->resources));
}
static device_method_t obio_methods[] = {
DEVMETHOD(bus_activate_resource, obio_activate_resource),
DEVMETHOD(bus_add_child, obio_add_child),
DEVMETHOD(bus_alloc_resource, obio_alloc_resource),
DEVMETHOD(bus_deactivate_resource, obio_deactivate_resource),
DEVMETHOD(bus_get_resource_list, obio_get_resource_list),
DEVMETHOD(bus_hinted_child, obio_hinted_child),
DEVMETHOD(bus_release_resource, obio_release_resource),
DEVMETHOD(bus_setup_intr, obio_setup_intr),
DEVMETHOD(bus_teardown_intr, obio_teardown_intr),
DEVMETHOD(device_attach, obio_attach),
DEVMETHOD(device_probe, obio_probe),
DEVMETHOD(bus_get_resource, bus_generic_rl_get_resource),
DEVMETHOD(bus_set_resource, bus_generic_rl_set_resource),
{0, 0},
};
static driver_t obio_driver = {
"obio",
obio_methods,
sizeof(struct obio_softc),
};
static devclass_t obio_devclass;
DRIVER_MODULE(obio, nexus, obio_driver, obio_devclass, 0, 0);
| {
"content_hash": "063844d89d1b1983f4dea176755879d0",
"timestamp": "",
"source": "github",
"line_count": 456,
"max_line_length": 87,
"avg_line_length": 25.24122807017544,
"alnum_prop": 0.6516941789748045,
"repo_name": "dcui/FreeBSD-9.3_kernel",
"id": "10911ec2d0dfbd97e1f3ec92d2814aca2d57d7af",
"size": "13242",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sys/mips/idt/obio.c",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "1740660"
},
{
"name": "Awk",
"bytes": "135150"
},
{
"name": "Batchfile",
"bytes": "158"
},
{
"name": "C",
"bytes": "189969174"
},
{
"name": "C++",
"bytes": "2113755"
},
{
"name": "DTrace",
"bytes": "19810"
},
{
"name": "Forth",
"bytes": "188128"
},
{
"name": "Groff",
"bytes": "147703"
},
{
"name": "Lex",
"bytes": "65561"
},
{
"name": "Logos",
"bytes": "6310"
},
{
"name": "Makefile",
"bytes": "594606"
},
{
"name": "Mathematica",
"bytes": "9538"
},
{
"name": "Objective-C",
"bytes": "527964"
},
{
"name": "PHP",
"bytes": "2404"
},
{
"name": "Perl",
"bytes": "3348"
},
{
"name": "Python",
"bytes": "7091"
},
{
"name": "Shell",
"bytes": "43402"
},
{
"name": "SourcePawn",
"bytes": "253"
},
{
"name": "Yacc",
"bytes": "160534"
}
],
"symlink_target": ""
} |
'use strict';
var UPPER_A_CP = 'A'.codePointAt(0);
var UPPER_Z_CP = 'Z'.codePointAt(0);
var LOWER_A_CP = 'a'.codePointAt(0);
var LOWER_Z_CP = 'z'.codePointAt(0);
var DIGIT_0_CP = '0'.codePointAt(0);
var DIGIT_9_CP = '9'.codePointAt(0);
/**
* A regexp-tree plugin to transform coded chars into simple chars.
*
* \u0061 -> a
*/
module.exports = {
Char: function Char(path) {
var node = path.node,
parent = path.parent;
if (isNaN(node.codePoint) || node.kind === 'simple') {
return;
}
if (parent.type === 'ClassRange') {
if (!isSimpleRange(parent)) {
return;
}
}
if (!isPrintableASCIIChar(node.codePoint)) {
return;
}
var symbol = String.fromCodePoint(node.codePoint);
var newChar = {
type: 'Char',
kind: 'simple',
value: symbol,
symbol: symbol,
codePoint: node.codePoint
};
if (needsEscape(symbol, parent.type)) {
newChar.escaped = true;
}
path.replace(newChar);
}
};
/**
* Checks if a range is included either in 0-9, a-z or A-Z
* @param classRange
* @returns {boolean}
*/
function isSimpleRange(classRange) {
var from = classRange.from,
to = classRange.to;
return from.codePoint >= DIGIT_0_CP && from.codePoint <= DIGIT_9_CP && to.codePoint >= DIGIT_0_CP && to.codePoint <= DIGIT_9_CP || from.codePoint >= UPPER_A_CP && from.codePoint <= UPPER_Z_CP && to.codePoint >= UPPER_A_CP && to.codePoint <= UPPER_Z_CP || from.codePoint >= LOWER_A_CP && from.codePoint <= LOWER_Z_CP && to.codePoint >= LOWER_A_CP && to.codePoint <= LOWER_Z_CP;
}
/**
* Checks if a code point in the range of printable ASCII chars
* (DEL char excluded)
* @param codePoint
* @returns {boolean}
*/
function isPrintableASCIIChar(codePoint) {
return codePoint >= 0x20 && codePoint <= 0x7e;
}
function needsEscape(symbol, parentType) {
if (parentType === 'ClassRange' || parentType === 'CharacterClass') {
return (/[\]\\^-]/.test(symbol)
);
}
return (/[*[()+?^$./\\|{}]/.test(symbol)
);
} | {
"content_hash": "a03edf64ebb27b14457ebce8295460b2",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 378,
"avg_line_length": 25.19753086419753,
"alnum_prop": 0.6075453209211171,
"repo_name": "brett-harvey/Smart-Contracts",
"id": "e3a1f4883edb73e25bf7fc609ceaa6b19496ef9a",
"size": "2149",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "Ethereum-based-Roll4Win/node_modules/regexp-tree/dist/optimizer/transforms/char-code-to-simple-char-transform.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "430"
}
],
"symlink_target": ""
} |
layout: page
title: Seattle Police Officer 7674 Milton J. Rodrigue
permalink: /information/agencies/city_of_seattle/seattle_police_department/copbook/7674/
---
**Age as of Feb. 24, 2016:** 37
| {
"content_hash": "9c4decc6ad2b768b7d7bc0b8b96f0459",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 88,
"avg_line_length": 32.166666666666664,
"alnum_prop": 0.7616580310880829,
"repo_name": "seattlepublicrecords/seattlepublicrecords.github.io",
"id": "93bfac2804e52af5f58387028307327751178680",
"size": "197",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "information/agencies/city_of_seattle/seattle_police_department/copbook/7674/index.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "14496"
},
{
"name": "HTML",
"bytes": "14591"
},
{
"name": "JavaScript",
"bytes": "5297"
},
{
"name": "Ruby",
"bytes": "964"
}
],
"symlink_target": ""
} |
require 'test/unit'
require 'churn'
class ChurnTests < Test::Unit::TestCase
def test_month_before_is_28_days
assert_equal(Time.local(2005, 1, 1),
month_before(Time.local(2005, 1, 29)))
end
def test_svn_date
assert_equal('2005-03-04',
svn_date(Time.local(2005, 3, 4)))
end
def test_header_format
assert_equal("Changes since 2005-08-05:",
header(svn_date(month_before(Time.local(2005, 9, 2)))))
end
def test_normal_subsystem_line_format
assert_equal(' audit ********* (45)',
subsystem_line("audit", 45))
end
def test_asterisks_for_divides_by_five
assert_equal('****', asterisks_for(20))
end
def test_asterisks_for_rounds_up_and_down
assert_equal('****', asterisks_for(18))
assert_equal('***', asterisks_for(17))
end
def test_subversion_log_can_have_no_changes
assert_equal(0, extract_change_count_from("------------------------------------------------------------------------\n"))
end
def test_subversion_log_with_changes
assert_equal(2, extract_change_count_from("------------------------------------------------------------------------\nr2531 | bem | 2005-07-01 01:11:44 -0500 (Fri, 01 Jul 2005) | 1 line\n\nrevisions up through ch 3 exercises\n------------------------------------------------------------------------\nr2524 | bem | 2005-06-30 18:45:59 -0500 (Thu, 30 Jun 2005) | 1 line\n\nresults of read-through; including renaming mistyping to snapshots\n------------------------------------------------------------------------\n"))
end
def test_churn_line_to_int_extracts_parenthesized_change_count
assert_equal(19, churn_line_to_int(" ui2 **** (19)"))
assert_equal(9, churn_line_to_int(" ui ** (9)"))
end
def test_order_by_descending_change_count
original = [ "all that really matters is the number in parens - (1)",
" inventory (0)",
" ui ** (12)" ]
expected = [ " ui ** (12)",
"all that really matters is the number in parens - (1)",
" inventory (0)" ]
actual = order_by_descending_change_count(original)
assert_equal(expected, actual)
end
end
| {
"content_hash": "5b819aa736c1e1c1bfed2130e115f476",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 519,
"avg_line_length": 34.87692307692308,
"alnum_prop": 0.521835024261138,
"repo_name": "Mitali-Sodhi/CodeLingo",
"id": "1efd6e231ec7653101bb9c1703b2eed411b38e5d",
"size": "2470",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Dataset/ruby/churn-tests-re.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9681846"
},
{
"name": "C#",
"bytes": "1741915"
},
{
"name": "C++",
"bytes": "5686017"
},
{
"name": "HTML",
"bytes": "11812193"
},
{
"name": "Java",
"bytes": "11198971"
},
{
"name": "JavaScript",
"bytes": "21693468"
},
{
"name": "M",
"bytes": "61627"
},
{
"name": "Objective-C",
"bytes": "4085820"
},
{
"name": "Perl",
"bytes": "193472"
},
{
"name": "Perl6",
"bytes": "176248"
},
{
"name": "Python",
"bytes": "10296284"
},
{
"name": "Ruby",
"bytes": "1050136"
}
],
"symlink_target": ""
} |
<h4>Metatranscriptomic diversity in Salix roots + rhizophere</h4>
This interactive graphic vizualizes the organism diversity by annotation, as it can be found in the roots + rhizosphere of Salix Fish Creek in hydrocarbons contaminated soil.
Note: DE refers to Differential Expression (including only differentially expressed transcripts). "Full" means all transcripts are considered. Full may run slowly on lower spec computers.
<h3><a target="_blank" href="http://htmlpreview.github.io/?https://github.com/gonzalezem/Dylan-PolyA/blob/master/index.html">GO TO FIGURE</a></h3>
| {
"content_hash": "d9750382f2df99a2056f75834e7c5cbb",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 187,
"avg_line_length": 82.71428571428571,
"alnum_prop": 0.7944732297063903,
"repo_name": "gonzalezem/Dylan-PolyA",
"id": "8b6e426b2a5855eeeacc063fd7e3709e8c9f2691",
"size": "579",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "8226"
},
{
"name": "HTML",
"bytes": "5107"
},
{
"name": "JavaScript",
"bytes": "78455"
}
],
"symlink_target": ""
} |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Management.Automation;
using Microsoft.Management.Infrastructure;
using Microsoft.PowerShell.Cim;
using Dbg = System.Management.Automation.Diagnostics;
namespace Microsoft.PowerShell.Cmdletization.Cim
{
/// <summary>
/// Client-side filtering for
/// 1) filtering that cannot be translated into a server-side query (i.e. when CimQuery.WildcardToWqlLikeOperand reports that it cannot translate into WQL)
/// 2) detecting if all expected results have been received and giving friendly user errors otherwise (i.e. could not find process with name='foo'; details in Windows 8 Bugs: #60926)
/// </summary>
internal class ClientSideQuery : QueryBuilder
{
internal class NotFoundError
{
public NotFoundError()
{
this.ErrorMessageGenerator = GetErrorMessageForNotFound;
}
public NotFoundError(string propertyName, object propertyValue, bool wildcardsEnabled)
{
this.PropertyName = propertyName;
this.PropertyValue = propertyValue;
if (wildcardsEnabled)
{
var propertyValueAsString = propertyValue as string;
if ((propertyValueAsString != null) && (WildcardPattern.ContainsWildcardCharacters(propertyValueAsString)))
{
this.ErrorMessageGenerator =
(queryDescription, className) => GetErrorMessageForNotFound_ForWildcard(this.PropertyName, this.PropertyValue, className);
}
else
{
this.ErrorMessageGenerator =
(queryDescription, className) => GetErrorMessageForNotFound_ForEquality(this.PropertyName, this.PropertyValue, className);
}
}
else
{
this.ErrorMessageGenerator =
(queryDescription, className) => GetErrorMessageForNotFound_ForEquality(this.PropertyName, this.PropertyValue, className);
}
}
public string PropertyName { get; private set; }
public object PropertyValue { get; private set; }
public Func<string, string, string> ErrorMessageGenerator { get; private set; }
private static string GetErrorMessageForNotFound(string queryDescription, string className)
{
string message = string.Format(
CultureInfo.InvariantCulture, // queryDescription should already be in the right format - can use invariant culture here
CmdletizationResources.CimJob_NotFound_ComplexCase,
queryDescription,
className);
return message;
}
private static string GetErrorMessageForNotFound_ForEquality(string propertyName, object propertyValue, string className)
{
string message = string.Format(
CultureInfo.InvariantCulture, // queryDescription should already be in the right format - can use invariant culture here
CmdletizationResources.CimJob_NotFound_SimpleGranularCase_Equality,
propertyName,
propertyValue,
className);
return message;
}
private static string GetErrorMessageForNotFound_ForWildcard(string propertyName, object propertyValue, string className)
{
string message = string.Format(
CultureInfo.InvariantCulture, // queryDescription should already be in the right format - can use invariant culture here
CmdletizationResources.CimJob_NotFound_SimpleGranularCase_Wildcard,
propertyName,
propertyValue,
className);
return message;
}
}
private abstract class CimInstanceFilterBase
{
protected abstract bool IsMatchCore(CimInstance cimInstance);
protected BehaviorOnNoMatch BehaviorOnNoMatch { get; set; }
private bool HadMatches { get; set; }
public bool IsMatch(CimInstance cimInstance)
{
bool isMatch = this.IsMatchCore(cimInstance);
this.HadMatches = this.HadMatches || isMatch;
return isMatch;
}
public virtual bool ShouldReportErrorOnNoMatches_IfMultipleFilters()
{
switch (this.BehaviorOnNoMatch)
{
case BehaviorOnNoMatch.ReportErrors:
return true;
case BehaviorOnNoMatch.SilentlyContinue:
return false;
case BehaviorOnNoMatch.Default:
default:
Dbg.Assert(false, "BehaviorOnNoMatch.Default should be handled by derived classes");
return false;
}
}
public virtual IEnumerable<NotFoundError> GetNotFoundErrors_IfThisIsTheOnlyFilter()
{
switch (this.BehaviorOnNoMatch)
{
case BehaviorOnNoMatch.ReportErrors:
if (this.HadMatches)
{
return Enumerable.Empty<NotFoundError>();
}
else
{
return new[] { new NotFoundError() };
}
case BehaviorOnNoMatch.SilentlyContinue:
return Enumerable.Empty<NotFoundError>();
case BehaviorOnNoMatch.Default:
default:
Dbg.Assert(false, "BehaviorOnNoMatch.Default should be handled by derived classes");
return Enumerable.Empty<NotFoundError>();
}
}
}
private abstract class CimInstancePropertyBasedFilter : CimInstanceFilterBase
{
private readonly List<PropertyValueFilter> _propertyValueFilters = new List<PropertyValueFilter>();
protected IEnumerable<PropertyValueFilter> PropertyValueFilters { get { return _propertyValueFilters; } }
protected void AddPropertyValueFilter(PropertyValueFilter propertyValueFilter)
{
_propertyValueFilters.Add(propertyValueFilter);
}
protected override bool IsMatchCore(CimInstance cimInstance)
{
bool isMatch = false;
foreach (PropertyValueFilter propertyValueFilter in this.PropertyValueFilters)
{
if (propertyValueFilter.IsMatch(cimInstance))
{
isMatch = true;
if (this.BehaviorOnNoMatch == BehaviorOnNoMatch.SilentlyContinue)
{
break;
}
}
}
return isMatch;
}
}
private class CimInstanceRegularFilter : CimInstancePropertyBasedFilter
{
public CimInstanceRegularFilter(string propertyName, IEnumerable allowedPropertyValues, bool wildcardsEnabled, BehaviorOnNoMatch behaviorOnNoMatch)
{
var valueBehaviors = new HashSet<BehaviorOnNoMatch>();
foreach (object allowedPropertyValue in allowedPropertyValues)
{
PropertyValueFilter filter =
new PropertyValueRegularFilter(
propertyName,
allowedPropertyValue,
wildcardsEnabled,
behaviorOnNoMatch);
this.AddPropertyValueFilter(filter);
valueBehaviors.Add(filter.BehaviorOnNoMatch);
}
if (valueBehaviors.Count == 1)
{
this.BehaviorOnNoMatch = valueBehaviors.Single();
}
else
{
this.BehaviorOnNoMatch = behaviorOnNoMatch;
}
}
public override bool ShouldReportErrorOnNoMatches_IfMultipleFilters()
{
switch (this.BehaviorOnNoMatch)
{
case BehaviorOnNoMatch.ReportErrors:
return true;
case BehaviorOnNoMatch.SilentlyContinue:
return false;
case BehaviorOnNoMatch.Default:
default:
return this.PropertyValueFilters
.Where(f => !f.HadMatch).Any(f => f.BehaviorOnNoMatch == BehaviorOnNoMatch.ReportErrors);
}
}
public override IEnumerable<NotFoundError> GetNotFoundErrors_IfThisIsTheOnlyFilter()
{
foreach (PropertyValueFilter propertyValueFilter in this.PropertyValueFilters)
{
if (propertyValueFilter.BehaviorOnNoMatch != BehaviorOnNoMatch.ReportErrors)
{
continue;
}
if (propertyValueFilter.HadMatch)
{
continue;
}
var propertyValueRegularFilter = (PropertyValueRegularFilter)propertyValueFilter;
yield return propertyValueRegularFilter.GetGranularNotFoundError();
}
}
}
private class CimInstanceExcludeFilter : CimInstancePropertyBasedFilter
{
public CimInstanceExcludeFilter(string propertyName, IEnumerable excludedPropertyValues, bool wildcardsEnabled, BehaviorOnNoMatch behaviorOnNoMatch)
{
if (behaviorOnNoMatch == BehaviorOnNoMatch.Default)
{
this.BehaviorOnNoMatch = BehaviorOnNoMatch.SilentlyContinue;
}
else
{
this.BehaviorOnNoMatch = behaviorOnNoMatch;
}
foreach (object excludedPropertyValue in excludedPropertyValues)
{
this.AddPropertyValueFilter(
new PropertyValueExcludeFilter(
propertyName,
excludedPropertyValue,
wildcardsEnabled,
behaviorOnNoMatch));
}
}
}
private class CimInstanceMinFilter : CimInstancePropertyBasedFilter
{
public CimInstanceMinFilter(string propertyName, object minPropertyValue, BehaviorOnNoMatch behaviorOnNoMatch)
{
if (behaviorOnNoMatch == BehaviorOnNoMatch.Default)
{
this.BehaviorOnNoMatch = BehaviorOnNoMatch.SilentlyContinue;
}
else
{
this.BehaviorOnNoMatch = behaviorOnNoMatch;
}
this.AddPropertyValueFilter(
new PropertyValueMinFilter(
propertyName,
minPropertyValue,
behaviorOnNoMatch));
}
}
private class CimInstanceMaxFilter : CimInstancePropertyBasedFilter
{
public CimInstanceMaxFilter(string propertyName, object minPropertyValue, BehaviorOnNoMatch behaviorOnNoMatch)
{
if (behaviorOnNoMatch == BehaviorOnNoMatch.Default)
{
this.BehaviorOnNoMatch = BehaviorOnNoMatch.SilentlyContinue;
}
else
{
this.BehaviorOnNoMatch = behaviorOnNoMatch;
}
this.AddPropertyValueFilter(
new PropertyValueMaxFilter(
propertyName,
minPropertyValue,
behaviorOnNoMatch));
}
}
private class CimInstanceAssociationFilter : CimInstanceFilterBase
{
public CimInstanceAssociationFilter(BehaviorOnNoMatch behaviorOnNoMatch)
{
if (behaviorOnNoMatch == BehaviorOnNoMatch.Default)
{
this.BehaviorOnNoMatch = BehaviorOnNoMatch.ReportErrors;
}
else
{
this.BehaviorOnNoMatch = behaviorOnNoMatch;
}
}
protected override bool IsMatchCore(CimInstance cimInstance)
{
return true; // the fact that this method is getting called means that CIM found associated instances (i.e. by definition the argument *is* matching)
}
}
internal abstract class PropertyValueFilter
{
protected PropertyValueFilter(string propertyName, object expectedPropertyValue, BehaviorOnNoMatch behaviorOnNoMatch)
{
PropertyName = propertyName;
_behaviorOnNoMatch = behaviorOnNoMatch;
OriginalExpectedPropertyValue = expectedPropertyValue;
CimTypedExpectedPropertyValue = CimValueConverter.ConvertFromDotNetToCim(expectedPropertyValue);
}
public BehaviorOnNoMatch BehaviorOnNoMatch
{
get
{
if (_behaviorOnNoMatch == BehaviorOnNoMatch.Default)
{
_behaviorOnNoMatch = this.GetDefaultBehaviorWhenNoMatchesFound(this.CimTypedExpectedPropertyValue);
}
return _behaviorOnNoMatch;
}
}
protected abstract BehaviorOnNoMatch GetDefaultBehaviorWhenNoMatchesFound(object cimTypedExpectedPropertyValue);
private BehaviorOnNoMatch _behaviorOnNoMatch;
public string PropertyName { get; }
public object CimTypedExpectedPropertyValue { get; }
public object OriginalExpectedPropertyValue { get; }
public bool HadMatch { get; private set; }
public bool IsMatch(CimInstance o)
{
if (o == null)
{
return false;
}
CimProperty propertyInfo = o.CimInstanceProperties[PropertyName];
if (propertyInfo == null)
{
return false;
}
object actualPropertyValue = propertyInfo.Value;
if (CimTypedExpectedPropertyValue == null)
{
HadMatch = HadMatch || (actualPropertyValue == null);
return actualPropertyValue == null;
}
CimValueConverter.AssertIntrinsicCimValue(actualPropertyValue);
CimValueConverter.AssertIntrinsicCimValue(CimTypedExpectedPropertyValue);
actualPropertyValue = ConvertActualValueToExpectedType(actualPropertyValue, CimTypedExpectedPropertyValue);
Dbg.Assert(IsSameType(actualPropertyValue, CimTypedExpectedPropertyValue), "Types of actual vs expected property value should always match");
bool isMatch = this.IsMatchingValue(actualPropertyValue);
HadMatch = HadMatch || isMatch;
return isMatch;
}
protected abstract bool IsMatchingValue(object actualPropertyValue);
private object ConvertActualValueToExpectedType(object actualPropertyValue, object expectedPropertyValue)
{
if ((actualPropertyValue is string) && (!(expectedPropertyValue is string)))
{
actualPropertyValue = LanguagePrimitives.ConvertTo(actualPropertyValue, expectedPropertyValue.GetType(), CultureInfo.InvariantCulture);
}
if (!IsSameType(actualPropertyValue, expectedPropertyValue))
{
var errorMessage = string.Format(
CultureInfo.InvariantCulture,
CmdletizationResources.CimJob_MismatchedTypeOfPropertyReturnedByQuery,
PropertyName,
actualPropertyValue.GetType().FullName,
expectedPropertyValue.GetType().FullName);
throw CimJobException.CreateWithoutJobContext(
errorMessage,
"CimJob_PropertyTypeUnexpectedByClientSideQuery",
ErrorCategory.InvalidType);
}
return actualPropertyValue;
}
private static bool IsSameType(object actualPropertyValue, object expectedPropertyValue)
{
if (actualPropertyValue == null)
{
return true;
}
if (expectedPropertyValue == null)
{
return true;
}
if (actualPropertyValue is TimeSpan || actualPropertyValue is DateTime)
{
return expectedPropertyValue is TimeSpan || expectedPropertyValue is DateTime;
}
return actualPropertyValue.GetType() == expectedPropertyValue.GetType();
}
}
internal class PropertyValueRegularFilter : PropertyValueFilter
{
private readonly bool _wildcardsEnabled;
public PropertyValueRegularFilter(string propertyName, object expectedPropertyValue, bool wildcardsEnabled, BehaviorOnNoMatch behaviorOnNoMatch)
: base(propertyName, expectedPropertyValue, behaviorOnNoMatch)
{
_wildcardsEnabled = wildcardsEnabled;
}
protected override BehaviorOnNoMatch GetDefaultBehaviorWhenNoMatchesFound(object cimTypedExpectedPropertyValue)
{
if (!_wildcardsEnabled)
{
return BehaviorOnNoMatch.ReportErrors;
}
else
{
string expectedPropertyValueAsString = cimTypedExpectedPropertyValue as string;
if (expectedPropertyValueAsString != null && WildcardPattern.ContainsWildcardCharacters(expectedPropertyValueAsString))
{
return BehaviorOnNoMatch.SilentlyContinue;
}
else
{
return BehaviorOnNoMatch.ReportErrors;
}
}
}
internal NotFoundError GetGranularNotFoundError()
{
return new NotFoundError(this.PropertyName, this.OriginalExpectedPropertyValue, _wildcardsEnabled);
}
protected override bool IsMatchingValue(object actualPropertyValue)
{
if (_wildcardsEnabled)
{
return WildcardEqual(this.PropertyName, actualPropertyValue, this.CimTypedExpectedPropertyValue);
}
else
{
return NonWildcardEqual(this.PropertyName, actualPropertyValue, this.CimTypedExpectedPropertyValue);
}
}
private static bool NonWildcardEqual(string propertyName, object actualPropertyValue, object expectedPropertyValue)
{
// perform .NET-based, case-insensitive equality test for 1) characters and 2) strings
if (expectedPropertyValue is char)
{
expectedPropertyValue = expectedPropertyValue.ToString();
actualPropertyValue = actualPropertyValue.ToString();
}
var expectedPropertyValueAsString = expectedPropertyValue as string;
if (expectedPropertyValueAsString != null)
{
var actualPropertyValueAsString = (string)actualPropertyValue;
return actualPropertyValueAsString.Equals(expectedPropertyValueAsString, StringComparison.OrdinalIgnoreCase);
}
// perform .NET based equality for everything else
return actualPropertyValue.Equals(expectedPropertyValue);
}
private static bool WildcardEqual(string propertyName, object actualPropertyValue, object expectedPropertyValue)
{
string actualPropertyValueAsString;
string expectedPropertyValueAsString;
if (!LanguagePrimitives.TryConvertTo(actualPropertyValue, out actualPropertyValueAsString))
{
return false;
}
if (!LanguagePrimitives.TryConvertTo(expectedPropertyValue, out expectedPropertyValueAsString))
{
return false;
}
return WildcardPattern.Get(expectedPropertyValueAsString, WildcardOptions.IgnoreCase).IsMatch(actualPropertyValueAsString);
}
}
internal class PropertyValueExcludeFilter : PropertyValueRegularFilter
{
public PropertyValueExcludeFilter(string propertyName, object expectedPropertyValue, bool wildcardsEnabled, BehaviorOnNoMatch behaviorOnNoMatch)
: base(propertyName, expectedPropertyValue, wildcardsEnabled, behaviorOnNoMatch)
{
}
protected override BehaviorOnNoMatch GetDefaultBehaviorWhenNoMatchesFound(object cimTypedExpectedPropertyValue)
{
return BehaviorOnNoMatch.SilentlyContinue;
}
protected override bool IsMatchingValue(object actualPropertyValue)
{
return !base.IsMatchingValue(actualPropertyValue);
}
}
internal class PropertyValueMinFilter : PropertyValueFilter
{
public PropertyValueMinFilter(string propertyName, object expectedPropertyValue, BehaviorOnNoMatch behaviorOnNoMatch)
: base(propertyName, expectedPropertyValue, behaviorOnNoMatch)
{
}
protected override BehaviorOnNoMatch GetDefaultBehaviorWhenNoMatchesFound(object cimTypedExpectedPropertyValue)
{
return BehaviorOnNoMatch.SilentlyContinue;
}
protected override bool IsMatchingValue(object actualPropertyValue)
{
return ActualValueGreaterThanOrEqualToExpectedValue(this.PropertyName, actualPropertyValue, this.CimTypedExpectedPropertyValue);
}
private static bool ActualValueGreaterThanOrEqualToExpectedValue(string propertyName, object actualPropertyValue, object expectedPropertyValue)
{
try
{
var expectedComparable = expectedPropertyValue as IComparable;
if (expectedComparable == null)
{
return false;
}
return expectedComparable.CompareTo(actualPropertyValue) <= 0;
}
catch (ArgumentException)
{
return false;
}
}
}
internal class PropertyValueMaxFilter : PropertyValueFilter
{
public PropertyValueMaxFilter(string propertyName, object expectedPropertyValue, BehaviorOnNoMatch behaviorOnNoMatch)
: base(propertyName, expectedPropertyValue, behaviorOnNoMatch)
{
}
protected override BehaviorOnNoMatch GetDefaultBehaviorWhenNoMatchesFound(object cimTypedExpectedPropertyValue)
{
return BehaviorOnNoMatch.SilentlyContinue;
}
protected override bool IsMatchingValue(object actualPropertyValue)
{
return ActualValueLessThanOrEqualToExpectedValue(this.PropertyName, actualPropertyValue, this.CimTypedExpectedPropertyValue);
}
private static bool ActualValueLessThanOrEqualToExpectedValue(string propertyName, object actualPropertyValue, object expectedPropertyValue)
{
try
{
var actualComparable = actualPropertyValue as IComparable;
if (actualComparable == null)
{
return false;
}
return actualComparable.CompareTo(expectedPropertyValue) <= 0;
}
catch (ArgumentException)
{
return false;
}
}
}
private int _numberOfResultsFromMi;
private int _numberOfMatchingResults;
private readonly List<CimInstanceFilterBase> _filters = new List<CimInstanceFilterBase>();
private readonly object _myLock = new object();
#region "Public" interface for client-side filtering
internal bool IsResultMatchingClientSideQuery(CimInstance result)
{
lock (_myLock)
{
_numberOfResultsFromMi++;
if (_filters.All(f => f.IsMatch(result)))
{
_numberOfMatchingResults++;
return true;
}
else
{
return false;
}
}
}
internal IEnumerable<NotFoundError> GenerateNotFoundErrors()
{
if (_filters.Count > 1)
{
if (_numberOfMatchingResults > 0)
{
return Enumerable.Empty<NotFoundError>();
}
if (_filters.All(f => !f.ShouldReportErrorOnNoMatches_IfMultipleFilters()))
{
return Enumerable.Empty<NotFoundError>();
}
return new[] { new NotFoundError() };
}
CimInstanceFilterBase filter = _filters.SingleOrDefault();
if (filter != null)
{
return filter.GetNotFoundErrors_IfThisIsTheOnlyFilter();
}
return Enumerable.Empty<NotFoundError>();
}
#endregion
#region QueryBuilder interface
public override void FilterByProperty(string propertyName, IEnumerable allowedPropertyValues, bool wildcardsEnabled, BehaviorOnNoMatch behaviorOnNoMatch)
{
_filters.Add(new CimInstanceRegularFilter(propertyName, allowedPropertyValues, wildcardsEnabled, behaviorOnNoMatch));
}
public override void ExcludeByProperty(string propertyName, IEnumerable excludedPropertyValues, bool wildcardsEnabled, BehaviorOnNoMatch behaviorOnNoMatch)
{
_filters.Add(new CimInstanceExcludeFilter(propertyName, excludedPropertyValues, wildcardsEnabled, behaviorOnNoMatch));
}
public override void FilterByMinPropertyValue(string propertyName, object minPropertyValue, BehaviorOnNoMatch behaviorOnNoMatch)
{
_filters.Add(new CimInstanceMinFilter(propertyName, minPropertyValue, behaviorOnNoMatch));
}
public override void FilterByMaxPropertyValue(string propertyName, object maxPropertyValue, BehaviorOnNoMatch behaviorOnNoMatch)
{
_filters.Add(new CimInstanceMaxFilter(propertyName, maxPropertyValue, behaviorOnNoMatch));
}
public override void FilterByAssociatedInstance(object associatedInstance, string associationName, string sourceRole, string resultRole, BehaviorOnNoMatch behaviorOnNoMatch)
{
_filters.Add(new CimInstanceAssociationFilter(behaviorOnNoMatch));
}
#endregion
}
}
| {
"content_hash": "6ec03f859e472b9e1ac2412738ba1b10",
"timestamp": "",
"source": "github",
"line_count": 702,
"max_line_length": 187,
"avg_line_length": 40.48860398860399,
"alnum_prop": 0.5685184533652324,
"repo_name": "Cowmonaut/PowerShell",
"id": "084d61ecab0a31798638ff1acff9085deb3de2bd",
"size": "28520",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Microsoft.PowerShell.Commands.Management/cimSupport/cmdletization/cim/clientSideQuery.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PowerShell",
"bytes": "15610"
}
],
"symlink_target": ""
} |
/******************************************************************************
* Product of NIST/ITL Advanced Networking Technologies Division (ANTD). *
******************************************************************************/
package gov.nist.javax.sip.stack;
import gov.nist.core.CommonLogger;
import gov.nist.core.LogWriter;
import gov.nist.core.StackLogger;
import gov.nist.javax.sip.header.CSeq;
import gov.nist.javax.sip.header.CallID;
import gov.nist.javax.sip.header.ContentLength;
import gov.nist.javax.sip.header.From;
import gov.nist.javax.sip.header.RequestLine;
import gov.nist.javax.sip.header.StatusLine;
import gov.nist.javax.sip.header.To;
import gov.nist.javax.sip.header.Via;
import gov.nist.javax.sip.message.SIPMessage;
import java.io.IOException;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.Socket;
import java.text.ParseException;
/*
* Ahmet Uyar <[email protected]>sent in a bug report for TCP operation of the JAIN sipStack.
* Niklas Uhrberg suggested that a mechanism be added to limit the number of simultaneous open
* connections. The TLS Adaptations were contributed by Daniel Martinez. Hagai Sela contributed a
* bug fix for symmetric nat. Jeroen van Bemmel added compensation for buggy clients ( Microsoft
* RTC clients ). Bug fixes by [email protected], Joost Yervante Damand
*/
/**
* This is a stack abstraction for TCP connections. This abstracts a stream of
* parsed messages. The SIP sipStack starts this from the main SIPStack class
* for each connection that it accepts. It starts a message parser in its own
* thread and talks to the message parser via a pipe. The message parser calls
* back via the parseError or processMessage functions that are defined as part
* of the SIPMessageListener interface.
*
* @see gov.nist.javax.sip.parser.PipelinedMsgParser
*
*
* @author M. Ranganathan <br/>
*
* @version 1.2 $Revision: 1.83 $ $Date: 2010-12-02 22:44:53 $
*/
public class TCPMessageChannel extends ConnectionOrientedMessageChannel {
private static StackLogger logger = CommonLogger.getLogger(TCPMessageChannel.class);
protected OutputStream myClientOutputStream;
protected TCPMessageChannel(SIPTransactionStack sipStack) {
super(sipStack);
}
/**
* Constructor - gets called from the SIPStack class with a socket on
* accepting a new client. All the processing of the message is done here
* with the sipStack being freed up to handle new connections. The sock
* input is the socket that is returned from the accept. Global data that is
* shared by all threads is accessible in the Server structure.
*
* @param sock
* Socket from which to read and write messages. The socket is
* already connected (was created as a result of an accept).
*
* @param sipStack
* Ptr to SIP Stack
*/
protected TCPMessageChannel(Socket sock, SIPTransactionStack sipStack,
TCPMessageProcessor msgProcessor, String threadName) throws IOException {
super(sipStack);
if (logger.isLoggingEnabled(LogWriter.TRACE_DEBUG)) {
logger.logDebug(
"creating new TCPMessageChannel ");
logger.logStackTrace();
}
mySock = sock;
peerAddress = mySock.getInetAddress();
myAddress = msgProcessor.getIpAddress().getHostAddress();
myClientInputStream = mySock.getInputStream();
myClientOutputStream = mySock.getOutputStream();
mythread = new Thread(this);
mythread.setDaemon(true);
mythread.setName(threadName);
this.peerPort = mySock.getPort();
this.key = MessageChannel.getKey(peerAddress, peerPort, "TCP");
this.myPort = msgProcessor.getPort();
// Bug report by Vishwashanti Raj Kadiayl
super.messageProcessor = msgProcessor;
// Can drop this after response is sent potentially.
mythread.start();
}
/**
* Constructor - connects to the given inet address. Acknowledgement --
* Lamine Brahimi (IBM Zurich) sent in a bug fix for this method. A thread
* was being uncessarily created.
*
* @param inetAddr
* inet address to connect to.
* @param sipStack
* is the sip sipStack from which we are created.
* @throws IOException
* if we cannot connect.
*/
protected TCPMessageChannel(InetAddress inetAddr, int port,
SIPTransactionStack sipStack, TCPMessageProcessor messageProcessor)
throws IOException {
super(sipStack);
if (logger.isLoggingEnabled(LogWriter.TRACE_DEBUG)) {
logger.logDebug(
"creating new TCPMessageChannel ");
logger.logStackTrace();
}
this.peerAddress = inetAddr;
this.peerPort = port;
this.myPort = messageProcessor.getPort();
this.peerProtocol = "TCP";
this.myAddress = messageProcessor.getIpAddress().getHostAddress();
// Bug report by Vishwashanti Raj Kadiayl
this.key = MessageChannel.getKey(peerAddress, peerPort, "TCP");
super.messageProcessor = messageProcessor;
}
/**
* Close the message channel.
*/
public void close(boolean removeSocket, boolean stopKeepAliveTask) {
isRunning = false;
// we need to close everything because the socket may be closed by the other end
// like in LB scenarios sending OPTIONS and killing the socket after it gets the response
if (mySock != null) {
if (logger.isLoggingEnabled(LogWriter.TRACE_DEBUG))
logger.logDebug("Closing socket " + key);
try {
mySock.close();
mySock = null;
} catch (IOException ex) {
if (logger.isLoggingEnabled(LogWriter.TRACE_DEBUG))
logger.logDebug("Error closing socket " + ex);
}
}
if(myParser != null) {
if (logger.isLoggingEnabled(LogWriter.TRACE_DEBUG))
logger.logDebug("Closing my parser " + myParser);
myParser.close();
}
// no need to close myClientInputStream since myParser.close() above will do it
if(myClientOutputStream != null) {
if (logger.isLoggingEnabled(LogWriter.TRACE_DEBUG))
logger.logDebug("Closing client output stream " + myClientOutputStream);
try {
myClientOutputStream.close();
} catch (IOException ex) {
if (logger.isLoggingEnabled(LogWriter.TRACE_DEBUG))
logger.logDebug("Error closing client output stream" + ex);
}
}
if(removeSocket) {
// remove the "tcp:" part of the key to cleanup the ioHandler hashmap
String ioHandlerKey = key.substring(4);
if (logger.isLoggingEnabled(LogWriter.TRACE_DEBUG))
logger.logDebug("Closing TCP socket " + ioHandlerKey);
// Issue 358 : remove socket and semaphore on close to avoid leaking
sipStack.ioHandler.removeSocket(ioHandlerKey);
if (logger.isLoggingEnabled(LogWriter.TRACE_DEBUG)) {
logger.logDebug("Closing message Channel (key = " + key +")" + this);
}
} else {
if (logger.isLoggingEnabled(LogWriter.TRACE_DEBUG)) {
String ioHandlerKey = key.substring(4);
logger.logDebug("not removing socket key from the cached map since it has already been updated by the iohandler.sendBytes " + ioHandlerKey);
}
}
if(stopKeepAliveTask) {
cancelPingKeepAliveTimeoutTaskIfStarted();
}
}
/**
* get the transport string.
*
* @return "tcp" in this case.
*/
public String getTransport() {
return "TCP";
}
/**
* Send message to whoever is connected to us. Uses the topmost via address
* to send to.
*
* @param msg
* is the message to send.
* @param isClient
*/
protected synchronized void sendMessage(byte[] msg, boolean isClient) throws IOException {
if ( logger.isLoggingEnabled(LogWriter.TRACE_DEBUG)) {
logger.logDebug("sendMessage isClient = " + isClient);
}
Socket sock = null;
IOException problem = null;
/* try {
// sock = this.sipStack.ioHandler.sendBytes(this.messageProcessor.getIpAddress(),
// this.peerAddress, this.peerPort, this.peerProtocol, msg, isClient, this);
} catch (IOException any) {
problem = any;
logger.logWarning("Failed to connect " + this.peerAddress + ":" + this.peerPort +" but trying the advertised port=" + this.peerPortAdvertisedInHeaders + " if it's different than the port we just failed on");
}*/
if(sock == null) { // http://java.net/jira/browse/JSIP-362 If we couldn't connect to the host, try the advertised host and port as failsafe
if(peerAddressAdvertisedInHeaders != null && peerPortAdvertisedInHeaders > 0) {
if (logger.isLoggingEnabled(LogWriter.TRACE_WARN)) {
logger.logWarning("Couldn't connect to peerAddress = " + peerAddress + " peerPort = " + peerPort
+ " key = " + key + " retrying on peerPortAdvertisedInHeaders "
+ peerPortAdvertisedInHeaders);
}
InetAddress address = InetAddress.getByName(peerAddressAdvertisedInHeaders);
// sock = this.sipStack.ioHandler.sendBytes(this.messageProcessor.getIpAddress(),
// address, this.peerPortAdvertisedInHeaders, this.peerProtocol, msg, isClient, this);
this.peerPort = this.peerPortAdvertisedInHeaders;
this.peerAddress = address;
this.key = MessageChannel.getKey(peerAddress, peerPort, "TCP");
if (logger.isLoggingEnabled(LogWriter.TRACE_WARN)) {
logger.logWarning("retry suceeded to peerAddress = " + peerAddress
+ " peerPortAdvertisedInHeaders = " + peerPortAdvertisedInHeaders + " key = " + key);
}
} else {
throw problem; // throw the original excpetion we had from the first attempt
}
}
// Created a new socket so close the old one and stick the new
// one in its place but dont do this if it is a datagram socket.
// (could have replied via udp but received via tcp!).
// if (mySock == null && s != null) {
// this.uncache();
// } else
if (sock != mySock && sock != null) {
if (mySock != null) {
if(logger.isLoggingEnabled(LogWriter.TRACE_WARN)) {
logger.logWarning(
"Old socket different than new socket on channel " + key);
logger.logStackTrace();
logger.logWarning(
"Old socket local ip address " + mySock.getLocalSocketAddress());
logger.logWarning(
"Old socket remote ip address " + mySock.getRemoteSocketAddress());
logger.logWarning(
"New socket local ip address " + sock.getLocalSocketAddress());
logger.logWarning(
"New socket remote ip address " + sock.getRemoteSocketAddress());
}
close(false, false);
}
if(problem == null) {
if(mySock != null) {
if(logger.isLoggingEnabled(LogWriter.TRACE_WARN)) {
logger.logWarning(
"There was no exception for the retry mechanism so creating a new thread based on the new socket for incoming " + key);
}
}
mySock = sock;
this.myClientInputStream = mySock.getInputStream();
this.myClientOutputStream = mySock.getOutputStream();
Thread thread = new Thread(this);
thread.setDaemon(true);
thread.setName("TCPMessageChannelThread");
thread.start();
} else {
if(logger.isLoggingEnabled(LogWriter.TRACE_WARN)) {
logger.logWarning(
"There was an exception for the retry mechanism so not creating a new thread based on the new socket for incoming " + key);
}
mySock = sock;
}
}
}
/**
* Send a message to a specified address.
*
* @param message
* Pre-formatted message to send.
* @param receiverAddress
* Address to send it to.
* @param receiverPort
* Receiver port.
* @throws IOException
* If there is a problem connecting or sending.
*/
public synchronized void sendMessage(byte message[], InetAddress receiverAddress,
int receiverPort, boolean retry) throws IOException {
if (message == null || receiverAddress == null)
throw new IllegalArgumentException("Null argument");
if(peerPortAdvertisedInHeaders <= 0) {
if(logger.isLoggingEnabled(LogWriter.TRACE_DEBUG)) {
logger.logDebug("receiver port = " + receiverPort + " for this channel " + this + " key " + key);
}
if(receiverPort <=0) {
// if port is 0 we assume the default port for TCP
this.peerPortAdvertisedInHeaders = 5060;
} else {
this.peerPortAdvertisedInHeaders = receiverPort;
}
if(logger.isLoggingEnabled(LogWriter.TRACE_DEBUG)) {
logger.logDebug("2.Storing peerPortAdvertisedInHeaders = " + peerPortAdvertisedInHeaders + " for this channel " + this + " key " + key);
}
}
Socket sock = null;
IOException problem = null;
/* try {
// sock = this.sipStack.ioHandler.sendBytes(this.messageProcessor.getIpAddress(),
// receiverAddress, receiverPort, "TCP", message, retry, this);
} catch (IOException any) {
problem = any;
logger.logWarning("Failed to connect " + this.peerAddress + ":" + receiverPort +" but trying the advertised port=" + this.peerPortAdvertisedInHeaders + " if it's different than the port we just failed on");
logger.logError("Error is ", any);
}*/
if(sock == null) { // http://java.net/jira/browse/JSIP-362 If we couldn't connect to the host, try the advertised host:port as failsafe
if(peerAddressAdvertisedInHeaders != null && peerPortAdvertisedInHeaders > 0) {
if (logger.isLoggingEnabled(LogWriter.TRACE_WARN)) {
logger.logWarning("Couldn't connect to receiverAddress = " + receiverAddress
+ " receiverPort = " + receiverPort + " key = " + key
+ " retrying on peerPortAdvertisedInHeaders " + peerPortAdvertisedInHeaders);
}
InetAddress address = InetAddress.getByName(peerAddressAdvertisedInHeaders);
// sock = this.sipStack.ioHandler.sendBytes(this.messageProcessor.getIpAddress(),
// address, this.peerPortAdvertisedInHeaders, "TCP", message, retry, this);
this.peerPort = this.peerPortAdvertisedInHeaders;
this.peerAddress = address;
this.key = MessageChannel.getKey(peerAddress, peerPort, "TCP");
if (logger.isLoggingEnabled(LogWriter.TRACE_WARN)) {
logger.logWarning("retry suceeded to peerAddress = " + peerAddress
+ " peerPort = " + peerPort + " key = " + key);
}
} else {
throw problem; // throw the original excpetion we had from the first attempt
}
}
if (sock != mySock && sock != null) {
if (mySock != null) {
if(logger.isLoggingEnabled(LogWriter.TRACE_WARN)) {
logger.logWarning(
"Old socket different than new socket on channel " + key);
logger.logStackTrace();
logger.logWarning(
"Old socket local ip address " + mySock.getLocalSocketAddress());
logger.logWarning(
"Old socket remote ip address " + mySock.getRemoteSocketAddress());
logger.logWarning(
"New socket local ip address " + sock.getLocalSocketAddress());
logger.logWarning(
"New socket remote ip address " + sock.getRemoteSocketAddress());
}
close(false, false);
}
if(problem == null) {
if (mySock != null) {
if(logger.isLoggingEnabled(LogWriter.TRACE_WARN)) {
logger.logWarning(
"There was no exception for the retry mechanism so creating a new thread based on the new socket for incoming " + key);
}
}
mySock = sock;
this.myClientInputStream = mySock.getInputStream();
this.myClientOutputStream = mySock.getOutputStream();
// start a new reader on this end of the pipe.
Thread mythread = new Thread(this);
mythread.setDaemon(true);
mythread.setName("TCPMessageChannelThread");
mythread.start();
} else {
if(logger.isLoggingEnabled(LogWriter.TRACE_WARN)) {
logger.logWarning(
"There was an exception for the retry mechanism so not creating a new thread based on the new socket for incoming " + key);
}
mySock = sock;
}
}
}
/**
* Exception processor for exceptions detected from the parser. (This is
* invoked by the parser when an error is detected).
*
* @param sipMessage
* -- the message that incurred the error.
* @param ex
* -- parse exception detected by the parser.
* @param header
* -- header that caused the error.
* @throws ParseException
* Thrown if we want to reject the message.
*/
public void handleException(ParseException ex, SIPMessage sipMessage,
Class hdrClass, String header, String message)
throws ParseException {
if (logger.isLoggingEnabled())
logger.logException(ex);
// Log the bad message for later reference.
if ((hdrClass != null)
&& (hdrClass.equals(From.class) || hdrClass.equals(To.class)
|| hdrClass.equals(CSeq.class)
|| hdrClass.equals(Via.class)
|| hdrClass.equals(CallID.class)
|| hdrClass.equals(ContentLength.class)
|| hdrClass.equals(RequestLine.class) || hdrClass
.equals(StatusLine.class))) {
if (logger.isLoggingEnabled(LogWriter.TRACE_DEBUG)) {
logger.logDebug(
"Encountered Bad Message \n" + sipMessage.toString());
}
// JvB: send a 400 response for requests (except ACK)
// Currently only UDP, @todo also other transports
String msgString = sipMessage.toString();
if (!msgString.startsWith("SIP/") && !msgString.startsWith("ACK ")) {
if(mySock != null)
{
if (logger.isLoggingEnabled(LogWriter.TRACE_ERROR)) {
logger.logError("Malformed mandatory headers: closing socket! :" + mySock.toString());
}
try
{
mySock.close();
} catch(IOException ie)
{
if (logger.isLoggingEnabled(LogWriter.TRACE_ERROR)) {
logger.logError("Exception while closing socket! :" + mySock.toString() + ":" + ie.toString());
}
}
}
}
throw ex;
} else {
sipMessage.addUnparsed(header);
}
}
/**
* Equals predicate.
*
* @param other
* is the other object to compare ourselves to for equals
*/
public boolean equals(Object other) {
if (!this.getClass().equals(other.getClass()))
return false;
else {
TCPMessageChannel that = (TCPMessageChannel) other;
if (this.mySock != that.mySock)
return false;
else
return true;
}
}
/**
* TCP Is not a secure protocol.
*/
public boolean isSecure() {
return false;
}
}
| {
"content_hash": "8d9c5c246eb71648156a875643cc03b6",
"timestamp": "",
"source": "github",
"line_count": 488,
"max_line_length": 216,
"avg_line_length": 42.86065573770492,
"alnum_prop": 0.5848154522853318,
"repo_name": "fhg-fokus-nubomedia/signaling-plane",
"id": "b552e244a02854e9f0d05e6248df7088d8fb58eb",
"size": "22000",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/lib-sip/src/main/java/gov/nist/javax/sip/stack/TCPMessageChannel.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "12152"
},
{
"name": "Groff",
"bytes": "22"
},
{
"name": "HTML",
"bytes": "2637100"
},
{
"name": "Java",
"bytes": "5622899"
},
{
"name": "JavaScript",
"bytes": "3448641"
},
{
"name": "Python",
"bytes": "161709"
},
{
"name": "Shell",
"bytes": "8658"
}
],
"symlink_target": ""
} |
using content::NotificationService;
TabModel::TabModel(Profile* profile)
: profile_(profile),
synced_window_delegate_(
new browser_sync::SyncedWindowDelegateAndroid(this)),
toolbar_model_(new ToolbarModelImpl(this)) {
if (profile) {
// A normal Profile creates an OTR profile if it does not exist when
// GetOffTheRecordProfile() is called, so we guard it with
// HasOffTheRecordProfile(). An OTR profile returns itself when you call
// GetOffTheRecordProfile().
is_off_the_record_ = (profile->HasOffTheRecordProfile() &&
profile == profile->GetOffTheRecordProfile());
// A profile can be destroyed, for example in the case of closing all
// incognito tabs. We therefore must listen for when this happens, and
// remove our pointer to the profile accordingly.
registrar_.Add(this, chrome::NOTIFICATION_PROFILE_DESTROYED,
content::Source<Profile>(profile_));
} else {
is_off_the_record_ = false;
}
}
TabModel::TabModel()
: profile_(NULL),
is_off_the_record_(false),
synced_window_delegate_(
new browser_sync::SyncedWindowDelegateAndroid(this)) {
}
TabModel::~TabModel() {
}
content::WebContents* TabModel::GetActiveWebContents() const {
if (GetTabCount() == 0 || GetActiveIndex() < 0 ||
GetActiveIndex() > GetTabCount())
return NULL;
return GetWebContentsAt(GetActiveIndex());
}
Profile* TabModel::GetProfile() const {
return profile_;
}
bool TabModel::IsOffTheRecord() const {
return is_off_the_record_;
}
browser_sync::SyncedWindowDelegate* TabModel::GetSyncedWindowDelegate() const {
return synced_window_delegate_.get();
}
SessionID::id_type TabModel::GetSessionId() const {
return session_id_.id();
}
void TabModel::BroadcastSessionRestoreComplete() {
if (profile_) {
NotificationService::current()->Notify(
chrome::NOTIFICATION_SESSION_RESTORE_COMPLETE,
content::Source<Profile>(profile_),
NotificationService::NoDetails());
} else {
// TODO(nyquist): Uncomment this once downstream Android uses new
// constructor that takes a Profile* argument. See crbug.com/159704.
// NOTREACHED();
}
}
ToolbarModel* TabModel::GetToolbarModel() {
return toolbar_model_.get();
}
ToolbarModel::SecurityLevel TabModel::GetSecurityLevelForCurrentTab() {
return toolbar_model_->GetSecurityLevel();
}
void TabModel::Observe(
int type,
const content::NotificationSource& source,
const content::NotificationDetails& details) {
switch (type) {
case chrome::NOTIFICATION_PROFILE_DESTROYED:
// Our profile just got destroyed, so we delete our pointer to it.
profile_ = NULL;
break;
default:
NOTREACHED();
}
}
| {
"content_hash": "06d85c09aa644c28e58d2d73449a5eb6",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 79,
"avg_line_length": 29.365591397849464,
"alnum_prop": 0.6935188575613328,
"repo_name": "zcbenz/cefode-chromium",
"id": "7bd5d7305061ebd98c912aad0ca1c378011b3cee",
"size": "3314",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chrome/browser/ui/android/tab_model/tab_model.cc",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "853"
},
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "1174304"
},
{
"name": "Awk",
"bytes": "9519"
},
{
"name": "C",
"bytes": "76026099"
},
{
"name": "C#",
"bytes": "1132"
},
{
"name": "C++",
"bytes": "157904700"
},
{
"name": "DOT",
"bytes": "1559"
},
{
"name": "F#",
"bytes": "381"
},
{
"name": "Java",
"bytes": "3225038"
},
{
"name": "JavaScript",
"bytes": "18180217"
},
{
"name": "Logos",
"bytes": "4517"
},
{
"name": "Matlab",
"bytes": "5234"
},
{
"name": "Objective-C",
"bytes": "7139426"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "Perl",
"bytes": "932901"
},
{
"name": "Python",
"bytes": "8654916"
},
{
"name": "R",
"bytes": "262"
},
{
"name": "Ragel in Ruby Host",
"bytes": "3621"
},
{
"name": "Shell",
"bytes": "1533012"
},
{
"name": "Tcl",
"bytes": "277077"
},
{
"name": "XML",
"bytes": "13493"
}
],
"symlink_target": ""
} |
import os
from collections import defaultdict
from .smb_utils import smb_connect, get_netbios_name, NameError
from smb.base import SharedDevice
DEFAULT_TIMEOUT = 30
DEFAULT_SHARE = 'data'
class IfcbConnectionError(Exception):
pass
def do_nothing(*args, **kw):
pass
class RemoteIfcb(object):
def __init__(self, addr, username, password, netbios_name=None, timeout=DEFAULT_TIMEOUT,
share=DEFAULT_SHARE, directory='', connect=True):
self.addr = addr
self.username = username
self.password = password
self.timeout = timeout
self.share = share
self.connect = connect
self.netbios_name = netbios_name
self.directory = directory
self._c = None
def open(self):
if self._c is not None:
return
try:
self._c = smb_connect(self.addr, self.username, self.password, self.netbios_name, self.timeout)
except:
raise IfcbConnectionError('unable to connect to IFCB')
def close(self):
if self._c is not None:
self._c.close()
self._c = None
def __enter__(self):
if self.connect:
self.open()
return self
def __exit__(self, type, value, traceback):
self.close()
def ensure_connected(self):
if self._c is None:
raise IfcbConnectionError('IFCB is not connected')
def is_responding(self):
# tries to get NetBIOS name to see if IFCB is responding
if self.netbios_name is not None:
return True # FIXME determine connection state
if self._c is not None:
return True
else:
try:
get_netbios_name(self.addr, timeout=self.timeout)
return True
except:
return False
def list_shares(self):
self.ensure_connected()
for share in self._c.listShares():
if share.type == SharedDevice.DISK_TREE:
yield share.name
def share_exists(self):
self.ensure_connected()
for share in self.list_shares():
if share.lower() == self.share.lower():
return True
return False
def list_filesets(self):
"""list fileset lids, most recent first"""
self.ensure_connected()
fs = defaultdict(lambda: 0)
for f in self._c.listPath(self.share, self.directory):
if f.isDirectory:
continue
fn = f.filename
lid, ext = os.path.splitext(fn)
if ext in ['.hdr','.roi','.adc']:
fs[lid] += 1
complete_sets = []
for lid, c in fs.items():
if c == 3: # complete fileset
complete_sets.append(lid)
return sorted(complete_sets, reverse=True)
def transfer_fileset(self, lid, local_directory, skip_existing=True, create_directories=True):
self.ensure_connected()
if create_directories:
os.makedirs(local_directory, exist_ok=True)
n_copied = 0
for ext in ['hdr', 'adc', 'roi']:
fn = '{}.{}'.format(lid, ext)
local_path = os.path.join(local_directory, fn)
remote_path = os.path.join(self.directory, fn)
temp_local_path = local_path + '.temp_download'
if skip_existing and os.path.exists(local_path):
lf_size = os.path.getsize(local_path)
rf = self._c.getAttributes(self.share, remote_path)
if lf_size == rf.file_size:
continue
with open(temp_local_path, 'wb') as fout:
self._c.retrieveFile(self.share, remote_path, fout, timeout=self.timeout)
os.rename(temp_local_path, local_path)
n_copied += 1
return n_copied > 0
def delete_fileset(self, lid):
self.ensure_connected()
for ext in ['hdr', 'adc', 'roi']:
self._c.deleteFiles(self.share, '{}.{}'.format(lid, ext))
def sync(self, local_directory, progress_callback=do_nothing, fileset_callback=do_nothing):
# local_directory can be
# * a path, or
# * a callbale returning a path when passed a bin lid
self.ensure_connected()
fss = self.list_filesets()
copied = []
failed = []
for lid in fss:
try:
if callable(local_directory):
destination_directory = local_directory(lid)
was_copied = self.transfer_fileset(lid, destination_directory, skip_existing=True)
if was_copied:
copied.append(lid)
fileset_callback(lid)
except:
failed.append(lid)
pass
progress_callback({
'total': len(fss),
'copied': copied,
'failed': failed,
'lid': lid
})
| {
"content_hash": "ae8b7173cfc845dfc3495c866c982732",
"timestamp": "",
"source": "github",
"line_count": 138,
"max_line_length": 107,
"avg_line_length": 36,
"alnum_prop": 0.5489130434782609,
"repo_name": "joefutrelle/pyifcb",
"id": "0c37ea582882e6065887aeec5c9a54eeaf1ac60d",
"size": "4968",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ifcb/data/transfer/remote.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "161062"
}
],
"symlink_target": ""
} |
/****************************************************************************
** Meta object code from reading C++ file 'mintingview.h'
**
** Created by: The Qt Meta Object Compiler version 67 (Qt 5.9.5)
**
** WARNING! All changes made in this file will be lost!
*****************************************************************************/
#include "../src/qt/mintingview.h"
#include <QtCore/qbytearray.h>
#include <QtCore/qmetatype.h>
#if !defined(Q_MOC_OUTPUT_REVISION)
#error "The header file 'mintingview.h' doesn't include <QObject>."
#elif Q_MOC_OUTPUT_REVISION != 67
#error "This file was generated using the moc from 5.9.5. It"
#error "cannot be used with the include files from this version of Qt."
#error "(The moc has changed too much.)"
#endif
QT_BEGIN_MOC_NAMESPACE
QT_WARNING_PUSH
QT_WARNING_DISABLE_DEPRECATED
struct qt_meta_stringdata_MintingView_t {
QByteArrayData data[5];
char stringdata0[53];
};
#define QT_MOC_LITERAL(idx, ofs, len) \
Q_STATIC_BYTE_ARRAY_DATA_HEADER_INITIALIZER_WITH_OFFSET(len, \
qptrdiff(offsetof(qt_meta_stringdata_MintingView_t, stringdata0) + ofs \
- idx * sizeof(QByteArrayData)) \
)
static const qt_meta_stringdata_MintingView_t qt_meta_stringdata_MintingView = {
{
QT_MOC_LITERAL(0, 0, 11), // "MintingView"
QT_MOC_LITERAL(1, 12, 13), // "exportClicked"
QT_MOC_LITERAL(2, 26, 0), // ""
QT_MOC_LITERAL(3, 27, 21), // "chooseMintingInterval"
QT_MOC_LITERAL(4, 49, 3) // "idx"
},
"MintingView\0exportClicked\0\0"
"chooseMintingInterval\0idx"
};
#undef QT_MOC_LITERAL
static const uint qt_meta_data_MintingView[] = {
// content:
7, // revision
0, // classname
0, 0, // classinfo
2, 14, // methods
0, 0, // properties
0, 0, // enums/sets
0, 0, // constructors
0, // flags
0, // signalCount
// slots: name, argc, parameters, tag, flags
1, 0, 24, 2, 0x0a /* Public */,
3, 1, 25, 2, 0x0a /* Public */,
// slots: parameters
QMetaType::Void,
QMetaType::Void, QMetaType::Int, 4,
0 // eod
};
void MintingView::qt_static_metacall(QObject *_o, QMetaObject::Call _c, int _id, void **_a)
{
if (_c == QMetaObject::InvokeMetaMethod) {
MintingView *_t = static_cast<MintingView *>(_o);
Q_UNUSED(_t)
switch (_id) {
case 0: _t->exportClicked(); break;
case 1: _t->chooseMintingInterval((*reinterpret_cast< int(*)>(_a[1]))); break;
default: ;
}
}
}
const QMetaObject MintingView::staticMetaObject = {
{ &QWidget::staticMetaObject, qt_meta_stringdata_MintingView.data,
qt_meta_data_MintingView, qt_static_metacall, nullptr, nullptr}
};
const QMetaObject *MintingView::metaObject() const
{
return QObject::d_ptr->metaObject ? QObject::d_ptr->dynamicMetaObject() : &staticMetaObject;
}
void *MintingView::qt_metacast(const char *_clname)
{
if (!_clname) return nullptr;
if (!strcmp(_clname, qt_meta_stringdata_MintingView.stringdata0))
return static_cast<void*>(this);
return QWidget::qt_metacast(_clname);
}
int MintingView::qt_metacall(QMetaObject::Call _c, int _id, void **_a)
{
_id = QWidget::qt_metacall(_c, _id, _a);
if (_id < 0)
return _id;
if (_c == QMetaObject::InvokeMetaMethod) {
if (_id < 2)
qt_static_metacall(this, _c, _id, _a);
_id -= 2;
} else if (_c == QMetaObject::RegisterMethodArgumentMetaType) {
if (_id < 2)
*reinterpret_cast<int*>(_a[0]) = -1;
_id -= 2;
}
return _id;
}
QT_WARNING_POP
QT_END_MOC_NAMESPACE
| {
"content_hash": "20c90970bdc11fd38080fa59e582151a",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 96,
"avg_line_length": 30.722689075630253,
"alnum_prop": 0.5905361050328227,
"repo_name": "FourTwentyOne/421",
"id": "c11d890b1546d7dcd35d4a631f53221566698ad1",
"size": "3656",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "build/moc_mintingview.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "8565"
},
{
"name": "C++",
"bytes": "1593460"
},
{
"name": "Makefile",
"bytes": "89377"
},
{
"name": "NSIS",
"bytes": "6074"
},
{
"name": "Objective-C",
"bytes": "858"
},
{
"name": "Objective-C++",
"bytes": "3537"
},
{
"name": "Python",
"bytes": "50532"
},
{
"name": "QMake",
"bytes": "15241"
},
{
"name": "Roff",
"bytes": "12841"
},
{
"name": "Shell",
"bytes": "3859"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>equations: Not compatible 👼</title>
<link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" />
<link href="../../../../../bootstrap.min.css" rel="stylesheet">
<link href="../../../../../bootstrap-custom.css" rel="stylesheet">
<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet">
<script src="../../../../../moment.min.js"></script>
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div class="container">
<div class="navbar navbar-default" role="navigation">
<div class="container-fluid">
<div class="navbar-header">
<a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a>
</div>
<div id="navbar" class="collapse navbar-collapse">
<ul class="nav navbar-nav">
<li><a href="../..">clean / released</a></li>
<li class="active"><a href="">8.5.1 / equations - 1.0~beta2+8.7</a></li>
</ul>
</div>
</div>
</div>
<div class="article">
<div class="row">
<div class="col-md-12">
<a href="../..">« Up</a>
<h1>
equations
<small>
1.0~beta2+8.7
<span class="label label-info">Not compatible 👼</span>
</small>
</h1>
<p>📅 <em><script>document.write(moment("2022-09-12 23:10:41 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-09-12 23:10:41 UTC)</em><p>
<h2>Context</h2>
<pre># Packages matching: installed
# Name # Installed # Synopsis
base-bigarray base
base-num base Num library distributed with the OCaml compiler
base-threads base
base-unix base
camlp5 7.14 Preprocessor-pretty-printer of OCaml
conf-findutils 1 Virtual package relying on findutils
conf-perl 2 Virtual package relying on perl
coq 8.5.1 Formal proof management system
num 0 The Num library for arbitrary-precision integer and rational arithmetic
ocaml 4.05.0 The OCaml compiler (virtual package)
ocaml-base-compiler 4.05.0 Official 4.05.0 release
ocaml-config 1 OCaml Switch Configuration
# opam file:
opam-version: "2.0"
authors: [ "Matthieu Sozeau <[email protected]>" "Cyprien Mangin <[email protected]>" ]
dev-repo: "git+https://github.com/mattam82/Coq-Equations.git"
maintainer: "[email protected]"
homepage: "https://mattam82.github.io/Coq-Equations"
bug-reports: "https://github.com/mattam82/Coq-Equations/issues"
license: "LGPL 2.1"
build: [
["coq_makefile" "-f" "_CoqProject" "-o" "Makefile"]
[make "-j%{jobs}%"]
]
install: [
[make "install"]
]
remove: ["rm" "-R" "%{lib}%/coq/user-contrib/Equations"]
depends: [
"ocaml"
"coq" {>= "8.7" & < "8.8"}
]
synopsis: "A function definition package for Coq"
flags: light-uninstall
url {
src:
"https://github.com/mattam82/Coq-Equations/archive/v1.0-8.7-beta2.tar.gz"
checksum: "md5=d281835d0762424b23c9aebf4a6d8921"
}
</pre>
<h2>Lint</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Dry install 🏜️</h2>
<p>Dry install with the current Coq version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam install -y --show-action coq-equations.1.0~beta2+8.7 coq.8.5.1</code></dd>
<dt>Return code</dt>
<dd>5120</dd>
<dt>Output</dt>
<dd><pre>[NOTE] Package coq is already installed (current version is 8.5.1).
The following dependencies couldn't be met:
- coq-equations -> coq >= 8.7 -> ocaml >= 4.09.0
base of this switch (use `--unlock-base' to force)
Your request can't be satisfied:
- No available version of coq satisfies the constraints
No solution found, exiting
</pre></dd>
</dl>
<p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-equations.1.0~beta2+8.7</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Install dependencies</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Install 🚀</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Installation size</h2>
<p>No files were installed.</p>
<h2>Uninstall 🧹</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Missing removes</dt>
<dd>
none
</dd>
<dt>Wrong removes</dt>
<dd>
none
</dd>
</dl>
</div>
</div>
</div>
<hr/>
<div class="footer">
<p class="text-center">
Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣
</p>
</div>
</div>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script src="../../../../../bootstrap.min.js"></script>
</body>
</html>
| {
"content_hash": "3a91f30612ba018377ea94059cda81eb",
"timestamp": "",
"source": "github",
"line_count": 168,
"max_line_length": 159,
"avg_line_length": 40.964285714285715,
"alnum_prop": 0.537198488811392,
"repo_name": "coq-bench/coq-bench.github.io",
"id": "112da7912cefe589186f9d69146761328aa40be2",
"size": "6907",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "clean/Linux-x86_64-4.05.0-2.0.1/released/8.5.1/equations/1.0~beta2+8.7.html",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
google-site-verification: google75f5f31b628404a4.html | {
"content_hash": "9021dddaf18c4ff6d1126bc08131a824",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 53,
"avg_line_length": 53,
"alnum_prop": 0.9056603773584906,
"repo_name": "vmcosta/vmcosta.github.io",
"id": "c275d104670e511e1091404795b68cc1536bd796",
"size": "53",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "google75f5f31b628404a4.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "2519"
},
{
"name": "CSS",
"bytes": "52103"
},
{
"name": "HTML",
"bytes": "17693"
},
{
"name": "JavaScript",
"bytes": "20894"
},
{
"name": "Nginx",
"bytes": "2342"
},
{
"name": "PHP",
"bytes": "711369"
}
],
"symlink_target": ""
} |
using System;
using System.Web;
using System.IO;
using System.Collections;
using System.Collections.Specialized;
using System.Security.Principal;
using System.Threading;
using log4net;
using FluorineFx.Messaging;
using FluorineFx.Messaging.Api;
using FluorineFx.Messaging.Messages;
using FluorineFx.Security;
using FluorineFx.Messaging.Rtmp;
namespace FluorineFx.Context
{
/// <summary>
/// This type supports the Fluorine infrastructure and is not intended to be used directly from your code.
/// </summary>
sealed class FluorineRtmpContext : FluorineContext
{
private static readonly ILog log = LogManager.GetLogger(typeof(FluorineRtmpContext));
private FluorineRtmpContext(IConnection connection)
{
_connection = connection;
_session = connection.Session;
_client = connection.Client;
if (_client != null)
_client.Renew();
}
internal static void Initialize(IConnection connection)
{
FluorineRtmpContext fluorineContext = new FluorineRtmpContext(connection);
WebSafeCallContext.SetData(FluorineContext.FluorineContextKey, fluorineContext);
if (log.IsDebugEnabled)
log.Debug(__Res.GetString(__Res.Context_Initialized, connection.ConnectionId, connection.Client != null ? connection.Client.Id : "[not set]", connection.Session != null ? connection.Session.Id : "[not set]"));
}
public FluorineRtmpContext()
{
}
/// <summary>
/// Gets the physical drive path of the application directory for the application hosted in the current application domain.
/// </summary>
public override string RootPath
{
get
{
//return HttpRuntime.AppDomainAppPath;
return AppDomain.CurrentDomain.BaseDirectory;
}
}
/// <summary>
/// Gets the virtual path of the current request.
/// </summary>
public override string RequestPath
{
get { return null; }
}
/// <summary>
/// Gets the ASP.NET application's virtual application root path on the server.
/// </summary>
public override string RequestApplicationPath
{
get { return null; }
}
public override string ApplicationPath
{
get
{
return null;
}
}
/// <summary>
/// Gets the absolute URI from the URL of the current request.
/// </summary>
public override string AbsoluteUri
{
get{ return null; }
}
public override string ActivationMode
{
get
{
return null;
}
}
public override string PhysicalApplicationPath
{
get
{
//return HttpRuntime.AppDomainAppPath;
return AppDomain.CurrentDomain.BaseDirectory;
}
}
}
}
| {
"content_hash": "8683b7a57251ccc3b7a99d0a4b75011c",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 225,
"avg_line_length": 26.066037735849058,
"alnum_prop": 0.6572566051393413,
"repo_name": "gspark/PmsAssistant",
"id": "4b53c176c7fbbce84afc5951d9db690f510114de",
"size": "3592",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "FluorineFx/Context/FluorineRtmpContext.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "5383235"
}
],
"symlink_target": ""
} |
// Utilities for dealing with XLA primitive types.
#ifndef TENSORFLOW_COMPILER_XLA_PRIMITIVE_UTIL_H_
#define TENSORFLOW_COMPILER_XLA_PRIMITIVE_UTIL_H_
#include <type_traits>
#include "tensorflow/compiler/xla/types.h"
#include "tensorflow/compiler/xla/xla_data.pb.h"
namespace xla {
namespace primitive_util {
// Returns the XLA primitive type (eg, F32) corresponding to the given
// template parameter native type (eg, float).
template <typename NativeT>
PrimitiveType NativeToPrimitiveType() {
// Make the expression depend on the template parameter NativeT so
// that this compile-time error only apperas if this function is
// instantiated with some concrete type that is not specialized
// below.
static_assert(!std::is_same<NativeT, NativeT>::value,
"Cannot map native type to primitive type.");
return PRIMITIVE_TYPE_INVALID;
}
// Declarations of specializations for each native type which correspond to a
// XLA primitive type.
template <>
PrimitiveType NativeToPrimitiveType<bool>();
// Unsigned integer
template <>
PrimitiveType NativeToPrimitiveType<uint8>();
template <>
PrimitiveType NativeToPrimitiveType<uint16>();
template <>
PrimitiveType NativeToPrimitiveType<uint32>();
template <>
PrimitiveType NativeToPrimitiveType<uint64>();
// Signed integer
template <>
PrimitiveType NativeToPrimitiveType<int8>();
template <>
PrimitiveType NativeToPrimitiveType<int16>();
template <>
PrimitiveType NativeToPrimitiveType<int32>();
template <>
PrimitiveType NativeToPrimitiveType<int64>();
// Floating point
template <>
PrimitiveType NativeToPrimitiveType<float>();
template <>
PrimitiveType NativeToPrimitiveType<double>();
template <>
PrimitiveType NativeToPrimitiveType<half>();
// Complex
template <>
PrimitiveType NativeToPrimitiveType<complex64>();
bool IsFloatingPointType(PrimitiveType type);
bool IsComplexType(PrimitiveType type);
bool IsSignedIntegralType(PrimitiveType type);
bool IsUnsignedIntegralType(PrimitiveType type);
bool IsIntegralType(PrimitiveType type);
// Returns the number of bits in the representation for a given type.
int BitWidth(PrimitiveType type);
// Returns the real, imag component type underlying the given complex type.
// LOG(FATAL)'s if complex_type is not complex.
PrimitiveType ComplexComponentType(PrimitiveType complex_type);
// Returns the native type (eg, float) corresponding to the given template
// parameter XLA primitive type (eg, F32).
template <PrimitiveType>
struct PrimitiveTypeToNative;
// Declarations of specializations for each native type which correspond to a
// XLA primitive type.
template <>
struct PrimitiveTypeToNative<PRED> {
using type = bool;
};
// Unsigned integer
template <>
struct PrimitiveTypeToNative<U8> {
using type = uint8;
};
template <>
struct PrimitiveTypeToNative<U16> {
using type = uint16;
};
template <>
struct PrimitiveTypeToNative<U32> {
using type = uint32;
};
template <>
struct PrimitiveTypeToNative<U64> {
using type = uint64;
};
// Signed integer
template <>
struct PrimitiveTypeToNative<S8> {
using type = int8;
};
template <>
struct PrimitiveTypeToNative<S16> {
using type = int16;
};
template <>
struct PrimitiveTypeToNative<S32> {
using type = int32;
};
template <>
struct PrimitiveTypeToNative<S64> {
using type = int64;
};
// Floating point
template <>
struct PrimitiveTypeToNative<F32> {
using type = float;
};
template <>
struct PrimitiveTypeToNative<F64> {
using type = double;
};
template <>
struct PrimitiveTypeToNative<F16> {
using type = half;
};
// Complex
template <>
struct PrimitiveTypeToNative<C64> {
using type = complex64;
};
} // namespace primitive_util
} // namespace xla
#endif // TENSORFLOW_COMPILER_XLA_PRIMITIVE_UTIL_H_
| {
"content_hash": "0fcdee3e0ffde7b72929d83dda1f72fb",
"timestamp": "",
"source": "github",
"line_count": 165,
"max_line_length": 77,
"avg_line_length": 22.666666666666668,
"alnum_prop": 0.7593582887700535,
"repo_name": "dyoung418/tensorflow",
"id": "a49c8b86fcfe156ea3733ce05c0fb7337cf60dce",
"size": "4408",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tensorflow/compiler/xla/primitive_util.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "155915"
},
{
"name": "C++",
"bytes": "9052366"
},
{
"name": "CMake",
"bytes": "29372"
},
{
"name": "CSS",
"bytes": "1297"
},
{
"name": "HTML",
"bytes": "763492"
},
{
"name": "Java",
"bytes": "38854"
},
{
"name": "JavaScript",
"bytes": "10779"
},
{
"name": "Jupyter Notebook",
"bytes": "1772913"
},
{
"name": "Protocol Buffer",
"bytes": "110178"
},
{
"name": "Python",
"bytes": "6032114"
},
{
"name": "Shell",
"bytes": "165125"
},
{
"name": "TypeScript",
"bytes": "403037"
}
],
"symlink_target": ""
} |
namespace _01.Last_3_Consecutive_Equal_Strings
{
using System;
using System.Linq;
public class LastThreeConsecutiveEqualStrings
{
public static void Main()
{
//var text = "one one one one two hi hi my echo last last last pi";
var words = Console.ReadLine()
.Split(new char[] { ' ' },
StringSplitOptions.RemoveEmptyEntries)
.ToArray();
LastThreeEqualStrings(words);
}
static void LastThreeEqualStrings(string[] words)
{
var len = words.Length;
var count = 1;
for (int i = len - 1; i > 0; i--)
{
var word = words[i];
var compare = words[i-1];
if (word == compare)
{
count++;
if (count == 3)
{
for (int j = 0; j < 3; j++)
{
Console.Write($"{word} ");
}
Console.WriteLine();
break;
}
}
else
{
count = 1;
}
}
}
}
}
| {
"content_hash": "c225cff5686dfc85da981186e812bdcd",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 79,
"avg_line_length": 25.92156862745098,
"alnum_prop": 0.3585476550680787,
"repo_name": "1ooIL40/FundamentalsExtendetRepo",
"id": "3b22e2251f4ec8917a00471ef15d72489d6e70e0",
"size": "1324",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Simple Arrays - More Exercises/01. Last 3 Consecutive Equal Strings/LastThreeConsecutiveEqualStrings.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "505444"
}
],
"symlink_target": ""
} |
const START_SPEAKING_ACTIVITY = 'WEB_CHAT/START_SPEAKING';
export default function startSpeakingActivity() {
return {
type: START_SPEAKING_ACTIVITY
};
}
export { START_SPEAKING_ACTIVITY };
| {
"content_hash": "7eac3be90cd7e5f6f17d023aab21ef31",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 58,
"avg_line_length": 22.11111111111111,
"alnum_prop": 0.7336683417085427,
"repo_name": "billba/botchat",
"id": "79122bd510580d9de64a80d13912e05cfbba5876",
"size": "199",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/core/src/actions/startSpeakingActivity.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "10340"
},
{
"name": "HTML",
"bytes": "1438"
},
{
"name": "JavaScript",
"bytes": "1845029"
},
{
"name": "TypeScript",
"bytes": "66519"
}
],
"symlink_target": ""
} |
import hassapi as hass
import globals
#
# App to send notification when door opened or closed
#
# Args:
#
# sensor: sensor to monitor e.g. input_binary.hall
#
# Release Notes
#
# Version 1.0:
# Initial Version
class DoorNotification(hass.Hass):
def initialize(self):
if "sensor" in self.args:
for sensor in self.split_device_list(self.args["sensor"]):
self.listen_state(self.state_change, sensor)
else:
self.listen_state(self.motion, "binary_sensor")
def state_change(self, entity, attribute, old, new, kwargs):
if new == "on" or new == "open":
state = "open"
else:
state = "closed"
self.log("{} is {}".format(self.friendly_name(entity), state))
self.notify("{} is {}".format(self.friendly_name(entity), state), name=globals.notify)
| {
"content_hash": "13a323a4821dcedf6eff673c8c4cbc91",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 94,
"avg_line_length": 27.70967741935484,
"alnum_prop": 0.6123399301513388,
"repo_name": "acockburn/appdaemon",
"id": "90c00182e0eda9c5000fef827c9ad91ce04397f5",
"size": "859",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "conf/example_apps/door_notification.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "96201"
},
{
"name": "Shell",
"bytes": "1768"
}
],
"symlink_target": ""
} |
<?php
/**
* @see Zend_Db_Profiler_TestCommon
*/
require_once 'Zend/Db/Profiler/TestCommon.php';
/**
* @category Zend
* @package Zend_Db
* @subpackage UnitTests
* @copyright Copyright (c) 2005-2012 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @group Zend_Db
* @group Zend_Db_Profiler
*/
class Zend_Db_Profiler_Db2Test extends Zend_Db_Profiler_TestCommon
{
public function testProfilerPreparedStatementWithBoundParams()
{
$this->markTestIncomplete($this->getDriver() . ' is having trouble with binding params');
}
public function getDriver()
{
return 'Db2';
}
}
| {
"content_hash": "c9afa57487a3e4542a8dabc3192386c6",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 97,
"avg_line_length": 19.97222222222222,
"alnum_prop": 0.6564673157162726,
"repo_name": "djozsef/zf1",
"id": "98c7c7931006e0c1d81a53c86b87dafb071838fd",
"size": "1481",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "tests/Zend/Db/Profiler/Db2Test.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
namespace XbyakSharp
{
public class CPUID : CodeGenerator
{
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
private delegate void GetCPUIDDelegate(int level, int[] result);
private static CPUID instance = null;
private static GetCPUIDDelegate func = null;
static CPUID()
{
instance = new CPUID();
func = instance.GetDelegate<GetCPUIDDelegate>();
}
private CPUID()
{
if (Environment.Is64BitProcess)
{
mov(r9, rdx);
mov(r10, rbx);
mov(rax, rcx);
cpuid();
mov(dword[r9], eax);
mov(dword[r9 + 4], ebx);
mov(dword[r9 + 8], ecx);
mov(dword[r9 + 12], edx);
mov(rbx, r10);
}
else
{
push(ebx);
push(esi);
mov(eax, dword[esp + 8 + 4]);
cpuid();
mov(esi, dword[esp + 8 + 8]);
mov(dword[esi], eax);
mov(dword[esi + 4], ebx);
mov(dword[esi + 8], ecx);
mov(dword[esi + 12], edx);
pop(esi);
pop(ebx);
}
ret();
}
public static int[] Exec(int level)
{
int[] result = new int[4];
func(level, result);
return result;
}
}
}
| {
"content_hash": "9734a1089e913cc1f8bdddb3325d15f0",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 72,
"avg_line_length": 27.112903225806452,
"alnum_prop": 0.4265318262938727,
"repo_name": "mes51/XbyakSharp",
"id": "39fa78c67caec82e918fe5c3a71e3294690660a5",
"size": "1683",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "XbyakSharp/CPUID.cs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C#",
"bytes": "468913"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Text;
using MinorShift.Emuera.GameProc;
using MinorShift.Emuera.GameData.Variable;
namespace MinorShift.Emuera.GameData.Function
{
internal sealed class UserDefinedRefMethod
{
public CalledFunction CalledFunction { get; private set; }
public string Name { get; private set; }
public Type RetType { get; private set; }
public UserDifinedFunctionDataArgType[] ArgTypeList { get; private set; }
internal static UserDefinedRefMethod Create(UserDefinedFunctionData funcData)
{
UserDefinedRefMethod ret = new UserDefinedRefMethod();
ret.Name = funcData.Name;
if (funcData.TypeIsStr)
ret.RetType = typeof(string);
else
ret.RetType = typeof(Int64);
ret.ArgTypeList = funcData.ArgList;
return ret;
}
/// <summary>
/// 戻り値と引数の数・型の完全一致が必要
/// </summary>
/// <param name="call"></param>
/// <returns>一致ならtrue</returns>
internal bool MatchType(CalledFunction call)
{
FunctionLabelLine label = call.TopLabel;
if (label.IsError)
return false;
if (RetType != label.MethodType)
return false;
if (ArgTypeList.Length != label.Arg.Length)
return false;
for (int i = 0; i < ArgTypeList.Length; i++)
{
VariableToken vToken = label.Arg[i].Identifier;
if (vToken.IsReference)
{
UserDifinedFunctionDataArgType type = UserDifinedFunctionDataArgType.__Ref;
type += vToken.Dimension;
if (vToken.IsInteger)
type |= UserDifinedFunctionDataArgType.Int;
else
type |= UserDifinedFunctionDataArgType.Str;
if (ArgTypeList[i] != type)
return false;
}
else
{
if (vToken.IsInteger && ArgTypeList[i] != UserDifinedFunctionDataArgType.Int)
return false;
if (vToken.IsString && ArgTypeList[i] != UserDifinedFunctionDataArgType.Str)
return false;
}
}
return true;
}
/// <summary>
/// 戻り値と引数の数・型の完全一致が必要
/// </summary>
/// <param name="rother"></param>
/// <returns>一致ならtrue</returns>
internal bool MatchType(UserDefinedRefMethod rother)
{
if (RetType != rother.RetType)
return false;
if (ArgTypeList.Length != rother.ArgTypeList.Length)
return false;
for (int i = 0; i < ArgTypeList.Length; i++)
{
if (ArgTypeList[i] != rother.ArgTypeList[i])
return false;
}
return true;
}
internal void SetReference(CalledFunction call)
{
CalledFunction = call;
}
}
}
| {
"content_hash": "addf252ad33d70eb33116468cae80775",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 83,
"avg_line_length": 26.67032967032967,
"alnum_prop": 0.6814997939843428,
"repo_name": "Riey/EmueraFramework",
"id": "3405767ac10e582a5bfca221af80116573de469e",
"size": "2517",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Emuera/GameData/Function/UserDefinedRefMethod.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "1565329"
}
],
"symlink_target": ""
} |
<?php
require_once __DIR__.'/Base.php';
use Model\Config;
use Model\Task;
use Model\TaskCreation;
use Model\TaskModification;
use Model\Project;
use Model\Comment;
use Subscriber\WebhookSubscriber;
class WebhookTest extends Base
{
public function testTaskCreation()
{
$c = new Config($this->container);
$p = new Project($this->container);
$tc = new TaskCreation($this->container);
$this->container['dispatcher']->addSubscriber(new WebhookSubscriber($this->container));
$c->save(array('webhook_url' => 'http://localhost/?task-creation'));
$this->assertEquals(1, $p->create(array('name' => 'test')));
$this->assertEquals(1, $tc->create(array('project_id' => 1, 'title' => 'test')));
$this->assertStringStartsWith('http://localhost/?task-creation&token=', $this->container['httpClient']->getUrl());
$event = $this->container['httpClient']->getData();
$this->assertNotEmpty($event);
$this->assertArrayHasKey('event_name', $event);
$this->assertArrayHasKey('event_data', $event);
$this->assertEquals('task.create', $event['event_name']);
$this->assertNotEmpty($event['event_data']);
$this->assertArrayHasKey('project_id', $event['event_data']);
$this->assertArrayHasKey('task_id', $event['event_data']);
$this->assertArrayHasKey('title', $event['event_data']);
$this->assertArrayHasKey('column_id', $event['event_data']);
$this->assertArrayHasKey('color_id', $event['event_data']);
$this->assertArrayHasKey('swimlane_id', $event['event_data']);
$this->assertArrayHasKey('date_creation', $event['event_data']);
$this->assertArrayHasKey('date_modification', $event['event_data']);
$this->assertArrayHasKey('date_moved', $event['event_data']);
$this->assertArrayHasKey('position', $event['event_data']);
}
public function testTaskModification()
{
$c = new Config($this->container);
$p = new Project($this->container);
$tc = new TaskCreation($this->container);
$tm = new TaskModification($this->container);
$this->container['dispatcher']->addSubscriber(new WebhookSubscriber($this->container));
$c->save(array('webhook_url' => 'http://localhost/modif/'));
$this->assertEquals(1, $p->create(array('name' => 'test')));
$this->assertEquals(1, $tc->create(array('project_id' => 1, 'title' => 'test')));
$this->assertTrue($tm->update(array('id' => 1, 'title' => 'test update')));
$this->assertStringStartsWith('http://localhost/modif/?token=', $this->container['httpClient']->getUrl());
$event = $this->container['httpClient']->getData();
$this->assertNotEmpty($event);
$this->assertArrayHasKey('event_name', $event);
$this->assertArrayHasKey('event_data', $event);
$this->assertEquals('task.update', $event['event_name']);
$this->assertNotEmpty($event['event_data']);
$this->assertArrayHasKey('project_id', $event['event_data']);
$this->assertArrayHasKey('task_id', $event['event_data']);
$this->assertArrayHasKey('title', $event['event_data']);
$this->assertArrayHasKey('column_id', $event['event_data']);
$this->assertArrayHasKey('color_id', $event['event_data']);
$this->assertArrayHasKey('swimlane_id', $event['event_data']);
$this->assertArrayHasKey('date_creation', $event['event_data']);
$this->assertArrayHasKey('date_modification', $event['event_data']);
$this->assertArrayHasKey('date_moved', $event['event_data']);
$this->assertArrayHasKey('position', $event['event_data']);
}
public function testCommentCreation()
{
$c = new Config($this->container);
$p = new Project($this->container);
$tc = new TaskCreation($this->container);
$cm = new Comment($this->container);
$this->container['dispatcher']->addSubscriber(new WebhookSubscriber($this->container));
$c->save(array('webhook_url' => 'http://localhost/comment'));
$this->assertEquals(1, $p->create(array('name' => 'test')));
$this->assertEquals(1, $tc->create(array('project_id' => 1, 'title' => 'test')));
$this->assertEquals(1, $cm->create(array('task_id' => 1, 'comment' => 'test comment', 'user_id' => 1)));
$this->assertStringStartsWith('http://localhost/comment?token=', $this->container['httpClient']->getUrl());
$event = $this->container['httpClient']->getData();
$this->assertNotEmpty($event);
$this->assertArrayHasKey('event_name', $event);
$this->assertArrayHasKey('event_data', $event);
$this->assertEquals('comment.create', $event['event_name']);
$this->assertNotEmpty($event['event_data']);
$this->assertArrayHasKey('task_id', $event['event_data']);
$this->assertArrayHasKey('user_id', $event['event_data']);
$this->assertArrayHasKey('comment', $event['event_data']);
$this->assertArrayHasKey('id', $event['event_data']);
$this->assertEquals('test comment', $event['event_data']['comment']);
}
}
| {
"content_hash": "3788a859779f569b20cfc995f9a3202e",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 122,
"avg_line_length": 46.294642857142854,
"alnum_prop": 0.6167791706846673,
"repo_name": "fabiano-pereira/kanboard",
"id": "946d744c8c961a97c93275564e2d8916b76b7664",
"size": "5185",
"binary": false,
"copies": "17",
"ref": "refs/heads/master",
"path": "tests/units/WebhookTest.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "230"
},
{
"name": "CSS",
"bytes": "29876"
},
{
"name": "JavaScript",
"bytes": "54130"
},
{
"name": "Makefile",
"bytes": "3923"
},
{
"name": "PHP",
"bytes": "2968349"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
Index Fungorum
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "5d238ec4cfda5e2db77aeed1ef6015c8",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 18,
"avg_line_length": 8.384615384615385,
"alnum_prop": 0.6788990825688074,
"repo_name": "mdoering/backbone",
"id": "e0c509f9499321d6633330b1bbc43018b83e0214",
"size": "154",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Fungi/Ascomycota/Lecanoromycetes/Lecanorales/Catillariaceae/Catillaria/Catillaria chroolepus/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en">
<head>
<meta http-equiv="Content-Type" content="application/xhtml+xml; charset=UTF-8" />
<meta name="generator" content="AsciiDoc 8.6.6" />
<title>git-remote-helpers</title>
<style type="text/css">
/* Shared CSS for AsciiDoc xhtml11 and html5 backends */
/* Default font. */
body {
font-family: Georgia,serif;
}
/* Title font. */
h1, h2, h3, h4, h5, h6,
div.title, caption.title,
thead, p.table.header,
#toctitle,
#author, #revnumber, #revdate, #revremark,
#footer {
font-family: Arial,Helvetica,sans-serif;
}
body {
margin: 1em 5% 1em 5%;
}
a {
color: blue;
text-decoration: underline;
}
a:visited {
color: fuchsia;
}
em {
font-style: italic;
color: navy;
}
strong {
font-weight: bold;
color: #083194;
}
h1, h2, h3, h4, h5, h6 {
color: #527bbd;
margin-top: 1.2em;
margin-bottom: 0.5em;
line-height: 1.3;
}
h1, h2, h3 {
border-bottom: 2px solid silver;
}
h2 {
padding-top: 0.5em;
}
h3 {
float: left;
}
h3 + * {
clear: left;
}
h5 {
font-size: 1.0em;
}
div.sectionbody {
margin-left: 0;
}
hr {
border: 1px solid silver;
}
p {
margin-top: 0.5em;
margin-bottom: 0.5em;
}
ul, ol, li > p {
margin-top: 0;
}
ul > li { color: #aaa; }
ul > li > * { color: black; }
pre {
padding: 0;
margin: 0;
}
#author {
color: #527bbd;
font-weight: bold;
font-size: 1.1em;
}
#email {
}
#revnumber, #revdate, #revremark {
}
#footer {
font-size: small;
border-top: 2px solid silver;
padding-top: 0.5em;
margin-top: 4.0em;
}
#footer-text {
float: left;
padding-bottom: 0.5em;
}
#footer-badges {
float: right;
padding-bottom: 0.5em;
}
#preamble {
margin-top: 1.5em;
margin-bottom: 1.5em;
}
div.imageblock, div.exampleblock, div.verseblock,
div.quoteblock, div.literalblock, div.listingblock, div.sidebarblock,
div.admonitionblock {
margin-top: 1.0em;
margin-bottom: 1.5em;
}
div.admonitionblock {
margin-top: 2.0em;
margin-bottom: 2.0em;
margin-right: 10%;
color: #606060;
}
div.content { /* Block element content. */
padding: 0;
}
/* Block element titles. */
div.title, caption.title {
color: #527bbd;
font-weight: bold;
text-align: left;
margin-top: 1.0em;
margin-bottom: 0.5em;
}
div.title + * {
margin-top: 0;
}
td div.title:first-child {
margin-top: 0.0em;
}
div.content div.title:first-child {
margin-top: 0.0em;
}
div.content + div.title {
margin-top: 0.0em;
}
div.sidebarblock > div.content {
background: #ffffee;
border: 1px solid #dddddd;
border-left: 4px solid #f0f0f0;
padding: 0.5em;
}
div.listingblock > div.content {
border: 1px solid #dddddd;
border-left: 5px solid #f0f0f0;
background: #f8f8f8;
padding: 0.5em;
}
div.quoteblock, div.verseblock {
padding-left: 1.0em;
margin-left: 1.0em;
margin-right: 10%;
border-left: 5px solid #f0f0f0;
color: #888;
}
div.quoteblock > div.attribution {
padding-top: 0.5em;
text-align: right;
}
div.verseblock > pre.content {
font-family: inherit;
font-size: inherit;
}
div.verseblock > div.attribution {
padding-top: 0.75em;
text-align: left;
}
/* DEPRECATED: Pre version 8.2.7 verse style literal block. */
div.verseblock + div.attribution {
text-align: left;
}
div.admonitionblock .icon {
vertical-align: top;
font-size: 1.1em;
font-weight: bold;
text-decoration: underline;
color: #527bbd;
padding-right: 0.5em;
}
div.admonitionblock td.content {
padding-left: 0.5em;
border-left: 3px solid #dddddd;
}
div.exampleblock > div.content {
border-left: 3px solid #dddddd;
padding-left: 0.5em;
}
div.imageblock div.content { padding-left: 0; }
span.image img { border-style: none; }
a.image:visited { color: white; }
dl {
margin-top: 0.8em;
margin-bottom: 0.8em;
}
dt {
margin-top: 0.5em;
margin-bottom: 0;
font-style: normal;
color: navy;
}
dd > *:first-child {
margin-top: 0.1em;
}
ul, ol {
list-style-position: outside;
}
ol.arabic {
list-style-type: decimal;
}
ol.loweralpha {
list-style-type: lower-alpha;
}
ol.upperalpha {
list-style-type: upper-alpha;
}
ol.lowerroman {
list-style-type: lower-roman;
}
ol.upperroman {
list-style-type: upper-roman;
}
div.compact ul, div.compact ol,
div.compact p, div.compact p,
div.compact div, div.compact div {
margin-top: 0.1em;
margin-bottom: 0.1em;
}
tfoot {
font-weight: bold;
}
td > div.verse {
white-space: pre;
}
div.hdlist {
margin-top: 0.8em;
margin-bottom: 0.8em;
}
div.hdlist tr {
padding-bottom: 15px;
}
dt.hdlist1.strong, td.hdlist1.strong {
font-weight: bold;
}
td.hdlist1 {
vertical-align: top;
font-style: normal;
padding-right: 0.8em;
color: navy;
}
td.hdlist2 {
vertical-align: top;
}
div.hdlist.compact tr {
margin: 0;
padding-bottom: 0;
}
.comment {
background: yellow;
}
.footnote, .footnoteref {
font-size: 0.8em;
}
span.footnote, span.footnoteref {
vertical-align: super;
}
#footnotes {
margin: 20px 0 20px 0;
padding: 7px 0 0 0;
}
#footnotes div.footnote {
margin: 0 0 5px 0;
}
#footnotes hr {
border: none;
border-top: 1px solid silver;
height: 1px;
text-align: left;
margin-left: 0;
width: 20%;
min-width: 100px;
}
div.colist td {
padding-right: 0.5em;
padding-bottom: 0.3em;
vertical-align: top;
}
div.colist td img {
margin-top: 0.3em;
}
@media print {
#footer-badges { display: none; }
}
#toc {
margin-bottom: 2.5em;
}
#toctitle {
color: #527bbd;
font-size: 1.1em;
font-weight: bold;
margin-top: 1.0em;
margin-bottom: 0.1em;
}
div.toclevel1, div.toclevel2, div.toclevel3, div.toclevel4 {
margin-top: 0;
margin-bottom: 0;
}
div.toclevel2 {
margin-left: 2em;
font-size: 0.9em;
}
div.toclevel3 {
margin-left: 4em;
font-size: 0.9em;
}
div.toclevel4 {
margin-left: 6em;
font-size: 0.9em;
}
span.aqua { color: aqua; }
span.black { color: black; }
span.blue { color: blue; }
span.fuchsia { color: fuchsia; }
span.gray { color: gray; }
span.green { color: green; }
span.lime { color: lime; }
span.maroon { color: maroon; }
span.navy { color: navy; }
span.olive { color: olive; }
span.purple { color: purple; }
span.red { color: red; }
span.silver { color: silver; }
span.teal { color: teal; }
span.white { color: white; }
span.yellow { color: yellow; }
span.aqua-background { background: aqua; }
span.black-background { background: black; }
span.blue-background { background: blue; }
span.fuchsia-background { background: fuchsia; }
span.gray-background { background: gray; }
span.green-background { background: green; }
span.lime-background { background: lime; }
span.maroon-background { background: maroon; }
span.navy-background { background: navy; }
span.olive-background { background: olive; }
span.purple-background { background: purple; }
span.red-background { background: red; }
span.silver-background { background: silver; }
span.teal-background { background: teal; }
span.white-background { background: white; }
span.yellow-background { background: yellow; }
span.big { font-size: 2em; }
span.small { font-size: 0.6em; }
span.underline { text-decoration: underline; }
span.overline { text-decoration: overline; }
span.line-through { text-decoration: line-through; }
/*
* xhtml11 specific
*
* */
tt {
font-family: monospace;
font-size: inherit;
color: navy;
}
div.tableblock {
margin-top: 1.0em;
margin-bottom: 1.5em;
}
div.tableblock > table {
border: 3px solid #527bbd;
}
thead, p.table.header {
font-weight: bold;
color: #527bbd;
}
p.table {
margin-top: 0;
}
/* Because the table frame attribute is overriden by CSS in most browsers. */
div.tableblock > table[frame="void"] {
border-style: none;
}
div.tableblock > table[frame="hsides"] {
border-left-style: none;
border-right-style: none;
}
div.tableblock > table[frame="vsides"] {
border-top-style: none;
border-bottom-style: none;
}
/*
* html5 specific
*
* */
.monospaced {
font-family: monospace;
font-size: inherit;
color: navy;
}
table.tableblock {
margin-top: 1.0em;
margin-bottom: 1.5em;
}
thead, p.tableblock.header {
font-weight: bold;
color: #527bbd;
}
p.tableblock {
margin-top: 0;
}
table.tableblock {
border-width: 3px;
border-spacing: 0px;
border-style: solid;
border-color: #527bbd;
border-collapse: collapse;
}
th.tableblock, td.tableblock {
border-width: 1px;
padding: 4px;
border-style: solid;
border-color: #527bbd;
}
table.tableblock.frame-topbot {
border-left-style: hidden;
border-right-style: hidden;
}
table.tableblock.frame-sides {
border-top-style: hidden;
border-bottom-style: hidden;
}
table.tableblock.frame-none {
border-style: hidden;
}
th.tableblock.halign-left, td.tableblock.halign-left {
text-align: left;
}
th.tableblock.halign-center, td.tableblock.halign-center {
text-align: center;
}
th.tableblock.halign-right, td.tableblock.halign-right {
text-align: right;
}
th.tableblock.valign-top, td.tableblock.valign-top {
vertical-align: top;
}
th.tableblock.valign-middle, td.tableblock.valign-middle {
vertical-align: middle;
}
th.tableblock.valign-bottom, td.tableblock.valign-bottom {
vertical-align: bottom;
}
/*
* manpage specific
*
* */
body.manpage h1 {
padding-top: 0.5em;
padding-bottom: 0.5em;
border-top: 2px solid silver;
border-bottom: 2px solid silver;
}
body.manpage h2 {
border-style: none;
}
body.manpage div.sectionbody {
margin-left: 3em;
}
@media print {
body.manpage div#toc { display: none; }
}
</style>
<script type="text/javascript">
/*<+'])');
// Function that scans the DOM tree for header elements (the DOM2
// nodeIterator API would be a better technique but not supported by all
// browsers).
var iterate = function (el) {
for (var i = el.firstChild; i != null; i = i.nextSibling) {
if (i.nodeType == 1 /* Node.ELEMENT_NODE */) {
var mo = re.exec(i.tagName);
if (mo && (i.getAttribute("class") || i.getAttribute("className")) != "float") {
result[result.length] = new TocEntry(i, getText(i), mo[1]-1);
}
iterate(i);
}
}
}
iterate(el);
return result;
}
var toc = document.getElementById("toc");
if (!toc) {
return;
}
// Delete existing TOC entries in case we're reloading the TOC.
var tocEntriesToRemove = [];
var i;
for (i = 0; i < toc.childNodes.length; i++) {
var entry = toc.childNodes[i];
if (entry.nodeName == 'div'
&& entry.getAttribute("class")
&& entry.getAttribute("class").match(/^toclevel/))
tocEntriesToRemove.push(entry);
}
for (i = 0; i < tocEntriesToRemove.length; i++) {
toc.removeChild(tocEntriesToRemove[i]);
}
// Rebuild TOC entries.
var entries = tocEntries(document.getElementById("content"), toclevels);
for (var i = 0; i < entries.length; ++i) {
var entry = entries[i];
if (entry.element.id == "")
entry.element.id = "_toc_" + i;
var a = document.createElement("a");
a.href = "#" + entry.element.id;
a.appendChild(document.createTextNode(entry.text));
var div = document.createElement("div");
div.appendChild(a);
div.className = "toclevel" + entry.toclevel;
toc.appendChild(div);
}
if (entries.length == 0)
toc.parentNode.removeChild(toc);
},
/////////////////////////////////////////////////////////////////////
// Footnotes generator
/////////////////////////////////////////////////////////////////////
/* Based on footnote generation code from:
* http://www.brandspankingnew.net/archive/2005/07/format_footnote.html
*/
footnotes: function () {
// Delete existing footnote entries in case we're reloading the footnodes.
var i;
var noteholder = document.getElementById("footnotes");
if (!noteholder) {
return;
}
var entriesToRemove = [];
for (i = 0; i < noteholder.childNodes.length; i++) {
var entry = noteholder.childNodes[i];
if (entry.nodeName == 'div' && entry.getAttribute("class") == "footnote")
entriesToRemove.push(entry);
}
for (i = 0; i < entriesToRemove.length; i++) {
noteholder.removeChild(entriesToRemove[i]);
}
// Rebuild footnote entries.
var cont = document.getElementById("content");
var spans = cont.getElementsByTagName("span");
var refs = {};
var n = 0;
for (i=0; i<spans.length; i++) {
if (spans[i].className == "footnote") {
n++;
var note = spans[i].getAttribute("data-note");
if (!note) {
// Use [\s\S] in place of . so multi-line matches work.
// Because JavaScript has no s (dotall) regex flag.
note = spans[i].innerHTML.match(/\s*\[([\s\S]*)]\s*/)[1];
spans[i].innerHTML =
"[<a id='_footnoteref_" + n + "' href='#_footnote_" + n +
"' title='View footnote' class='footnote'>" + n + "</a>]";
spans[i].setAttribute("data-note", note);
}
noteholder.innerHTML +=
"<div class='footnote' id='_footnote_" + n + "'>" +
"<a href='#_footnoteref_" + n + "' title='Return to text'>" +
n + "</a>. " + note + "</div>";
var id =spans[i].getAttribute("id");
if (id != null) refs["#"+id] = n;
}
}
if (n == 0)
noteholder.parentNode.removeChild(noteholder);
else {
// Process footnoterefs.
for (i=0; i<spans.length; i++) {
if (spans[i].className == "footnoteref") {
var href = spans[i].getElementsByTagName("a")[0].getAttribute("href");
href = href.match(/#.*/)[0]; // Because IE return full URL.
n = refs[href];
spans[i].innerHTML =
"[<a href='#_footnote_" + n +
"' title='View footnote' class='footnote'>" + n + "</a>]";
}
}
}
},
install: function(toclevels) {
var timerId;
function reinstall() {
asciidoc.footnotes();
if (toclevels) {
asciidoc.toc(toclevels);
}
}
function reinstallAndRemoveTimer() {
clearInterval(timerId);
reinstall();
}
timerId = setInterval(reinstall, 500);
if (document.addEventListener)
document.addEventListener("DOMContentLoaded", reinstallAndRemoveTimer, false);
else
window.onload = reinstallAndRemoveTimer;
}
}
asciidoc.install();
/*]]>*/
</script>
</head>
<body class="article">
<div id="header">
<h1>git-remote-helpers</h1>
</div>
<div id="content">
<div id="preamble">
<div class="sectionbody">
<div class="paragraph"><p>This document has been moved to <a href="gitremote-helpers.html">gitremote-helpers(1)</a>.</p></div>
<div class="paragraph"><p>Please let the owners of the referring site know so that they can update the
link you clicked to get here.</p></div>
<div class="paragraph"><p>Thanks.</p></div>
</div>
</div>
</div>
<div id="footnotes"><hr /></div>
<div id="footer">
<div id="footer-text">
Last updated 2013-08-20 08:40:27 PDT
</div>
</div>
</body>
</html>
| {
"content_hash": "ecd331af92af3974e3886a6f8d5257e6",
"timestamp": "",
"source": "github",
"line_count": 757,
"max_line_length": 126,
"avg_line_length": 21.18758256274769,
"alnum_prop": 0.6322713386121329,
"repo_name": "padamshrestha/portable_nodejs_git",
"id": "194bfead8756d1eccf282d2823e760cfefcc6cc7",
"size": "16039",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "Git/doc/git/html/git-remote-helpers.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "18617"
},
{
"name": "C",
"bytes": "275803"
},
{
"name": "C++",
"bytes": "164357"
},
{
"name": "CSS",
"bytes": "15143"
},
{
"name": "Emacs Lisp",
"bytes": "30222"
},
{
"name": "HTML",
"bytes": "6835201"
},
{
"name": "JavaScript",
"bytes": "77298"
},
{
"name": "M4",
"bytes": "193907"
},
{
"name": "Makefile",
"bytes": "2531"
},
{
"name": "NewLisp",
"bytes": "37316"
},
{
"name": "Perl",
"bytes": "5146825"
},
{
"name": "Perl6",
"bytes": "473997"
},
{
"name": "PowerShell",
"bytes": "991"
},
{
"name": "Prolog",
"bytes": "553295"
},
{
"name": "Ruby",
"bytes": "28952"
},
{
"name": "Shell",
"bytes": "273882"
},
{
"name": "Smalltalk",
"bytes": "25677"
},
{
"name": "SystemVerilog",
"bytes": "27798"
},
{
"name": "Tcl",
"bytes": "2257519"
},
{
"name": "VimL",
"bytes": "680966"
},
{
"name": "Visual Basic",
"bytes": "691"
},
{
"name": "XSLT",
"bytes": "50637"
}
],
"symlink_target": ""
} |
package com.linecorp.armeria.server;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.util.function.Function;
import org.junit.jupiter.api.Test;
import com.google.common.collect.ImmutableList;
import com.linecorp.armeria.client.ClientRequestContext;
import com.linecorp.armeria.common.HttpMethod;
import com.linecorp.armeria.common.HttpRequest;
import com.linecorp.armeria.common.QueryParams;
import com.linecorp.armeria.common.RequestContext;
import com.linecorp.armeria.common.annotation.Nullable;
import com.linecorp.armeria.common.util.SafeCloseable;
class ServiceRequestContextTest {
@Test
void current() {
assertThatThrownBy(ServiceRequestContext::current).isInstanceOf(IllegalStateException.class)
.hasMessageContaining("unavailable");
final ServiceRequestContext sctx = serviceRequestContext();
try (SafeCloseable unused = sctx.push()) {
assertThat(ServiceRequestContext.current()).isSameAs(sctx);
final ClientRequestContext cctx = clientRequestContext();
try (SafeCloseable unused1 = cctx.push()) {
assertThat(ServiceRequestContext.current()).isSameAs(sctx);
assertThat(ClientRequestContext.current()).isSameAs(cctx);
assertThat((ClientRequestContext) RequestContext.current()).isSameAs(cctx);
}
assertCurrentCtx(sctx);
}
assertCurrentCtx(null);
try (SafeCloseable unused = clientRequestContext().push()) {
assertThatThrownBy(ServiceRequestContext::current)
.isInstanceOf(IllegalStateException.class)
.hasMessageContaining("not a server-side context");
}
}
@Test
void currentOrNull() {
assertThat(ServiceRequestContext.currentOrNull()).isNull();
final ServiceRequestContext sctx = serviceRequestContext();
try (SafeCloseable unused = sctx.push()) {
assertThat(ServiceRequestContext.currentOrNull()).isSameAs(sctx);
final ClientRequestContext cctx = clientRequestContext();
try (SafeCloseable unused1 = cctx.push()) {
assertThat(ServiceRequestContext.currentOrNull()).isSameAs(sctx);
assertThat(ClientRequestContext.current()).isSameAs(cctx);
assertThat((ClientRequestContext) RequestContext.current()).isSameAs(cctx);
}
assertCurrentCtx(sctx);
}
assertCurrentCtx(null);
try (SafeCloseable unused = clientRequestContext().push()) {
assertThat(ServiceRequestContext.currentOrNull()).isNull();
}
}
@Test
void mapCurrent() {
assertThat(ServiceRequestContext.mapCurrent(ctx -> "foo", () -> "defaultValue"))
.isEqualTo("defaultValue");
assertThat(ServiceRequestContext.mapCurrent(Function.identity(), null)).isNull();
final ServiceRequestContext sctx = serviceRequestContext();
try (SafeCloseable unused = sctx.push()) {
assertThat(ServiceRequestContext.mapCurrent(c -> c == sctx ? "foo" : "bar",
() -> "defaultValue"))
.isEqualTo("foo");
assertThat(ServiceRequestContext.mapCurrent(Function.identity(), null)).isSameAs(sctx);
final ClientRequestContext cctx = clientRequestContext();
try (SafeCloseable unused1 = cctx.push()) {
assertThat(ServiceRequestContext.mapCurrent(c -> c == sctx ? "foo" : "bar",
() -> "defaultValue"))
.isEqualTo("foo");
assertThat(ClientRequestContext.mapCurrent(c -> c == cctx ? "baz" : "qux",
() -> "defaultValue"))
.isEqualTo("baz");
assertThat(ServiceRequestContext.mapCurrent(Function.identity(), null)).isSameAs(sctx);
assertThat(ClientRequestContext.mapCurrent(Function.identity(), null)).isSameAs(cctx);
assertThat(RequestContext.mapCurrent(Function.identity(), null)).isSameAs(cctx);
}
assertCurrentCtx(sctx);
}
assertCurrentCtx(null);
try (SafeCloseable unused = clientRequestContext().push()) {
assertThatThrownBy(() -> ServiceRequestContext.mapCurrent(c -> "foo", () -> "bar"))
.isInstanceOf(IllegalStateException.class)
.hasMessageContaining("not a server-side context");
}
}
@Test
void pushReentrance() {
final ServiceRequestContext ctx = serviceRequestContext();
try (SafeCloseable ignored = ctx.push()) {
assertCurrentCtx(ctx);
try (SafeCloseable ignored2 = ctx.push()) {
assertCurrentCtx(ctx);
}
assertCurrentCtx(ctx);
}
assertCurrentCtx(null);
}
@Test
void pushWithOldClientCtxWhoseRootIsThisServiceCtx() {
final ServiceRequestContext sctx = serviceRequestContext();
try (SafeCloseable ignored = sctx.push()) {
assertCurrentCtx(sctx);
// The root of ClientRequestContext is sctx.
final ClientRequestContext cctx = clientRequestContext();
try (SafeCloseable ignored1 = cctx.push()) {
assertCurrentCtx(cctx);
try (SafeCloseable ignored2 = sctx.push()) {
assertCurrentCtx(sctx);
}
assertCurrentCtx(cctx);
}
assertCurrentCtx(sctx);
}
assertCurrentCtx(null);
}
@Test
void pushWithOldIrrelevantClientCtx() {
final ClientRequestContext cctx = clientRequestContext();
try (SafeCloseable ignored = cctx.push()) {
assertCurrentCtx(cctx);
final ServiceRequestContext sctx = serviceRequestContext();
assertThatThrownBy(sctx::push).isInstanceOf(IllegalStateException.class);
}
assertCurrentCtx(null);
}
@Test
void pushWithOldIrrelevantServiceCtx() {
final ServiceRequestContext sctx1 = serviceRequestContext();
final ServiceRequestContext sctx2 = serviceRequestContext();
try (SafeCloseable ignored = sctx1.push()) {
assertCurrentCtx(sctx1);
assertThatThrownBy(sctx2::push).isInstanceOf(IllegalStateException.class);
}
assertCurrentCtx(null);
}
@Test
void queryParams() {
final String path = "/foo";
final QueryParams queryParams = QueryParams.of("param1", "value1",
"param1", "value2",
"Param1", "Value3",
"PARAM1", "VALUE4");
final String pathAndQuery = path + '?' + queryParams.toQueryString();
final ServiceRequestContext ctx = ServiceRequestContext.of(HttpRequest.of(HttpMethod.GET,
pathAndQuery));
assertThat(ctx.queryParams()).isEqualTo(queryParams);
assertThat(ctx.queryParam("param1")).isEqualTo("value1");
assertThat(ctx.queryParam("Param1")).isEqualTo("Value3");
assertThat(ctx.queryParam("PARAM1")).isEqualTo("VALUE4");
assertThat(ctx.queryParam("Not exist")).isNull();
assertThat(ctx.queryParams("param1")).isEqualTo(ImmutableList.of("value1", "value2"));
assertThat(ctx.queryParams("Param1")).isEqualTo(ImmutableList.of("Value3"));
assertThat(ctx.queryParams("PARAM1")).isEqualTo(ImmutableList.of("VALUE4"));
assertThat(ctx.queryParams("Not exist")).isEmpty();
}
private static void assertCurrentCtx(@Nullable RequestContext ctx) {
final RequestContext current = RequestContext.currentOrNull();
assertThat(current).isSameAs(ctx);
}
private static ServiceRequestContext serviceRequestContext() {
return ServiceRequestContext.of(HttpRequest.of(HttpMethod.GET, "/"));
}
private static ClientRequestContext clientRequestContext() {
return ClientRequestContext.of(HttpRequest.of(HttpMethod.GET, "/"));
}
}
| {
"content_hash": "0201dd7bf52a4510a8c57864116dc372",
"timestamp": "",
"source": "github",
"line_count": 195,
"max_line_length": 103,
"avg_line_length": 43.38461538461539,
"alnum_prop": 0.6112293144208037,
"repo_name": "minwoox/armeria",
"id": "b4c157cc9ab7474c6f50c33f5b7db2965b99adde",
"size": "9093",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core/src/test/java/com/linecorp/armeria/server/ServiceRequestContextTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "7197"
},
{
"name": "HTML",
"bytes": "1222"
},
{
"name": "Java",
"bytes": "16194263"
},
{
"name": "JavaScript",
"bytes": "26702"
},
{
"name": "Kotlin",
"bytes": "90127"
},
{
"name": "Less",
"bytes": "34341"
},
{
"name": "Scala",
"bytes": "209950"
},
{
"name": "Shell",
"bytes": "2062"
},
{
"name": "Thrift",
"bytes": "192676"
},
{
"name": "TypeScript",
"bytes": "243099"
}
],
"symlink_target": ""
} |
package sample.session;
import java.time.Duration;
import java.util.Base64;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.web.server.LocalServerPort;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseCookie;
import org.springframework.web.reactive.function.client.ClientResponse;
import org.springframework.web.reactive.function.client.WebClient;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Integration tests for {@link SampleSessionWebFluxApplication}.
*
* @author Vedran Pavic
*/
@SpringBootTest(properties = "server.servlet.session.timeout:2",
webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
class SampleSessionWebFluxApplicationTests {
@LocalServerPort
private int port;
@Autowired
private WebClient.Builder webClientBuilder;
@Test
void userDefinedMappingsSecureByDefault() throws Exception {
WebClient webClient = this.webClientBuilder
.baseUrl("http://localhost:" + this.port + "/").build();
ClientResponse response = webClient.get().header("Authorization", getBasicAuth())
.exchange().block(Duration.ofSeconds(30));
assertThat(response.statusCode()).isEqualTo(HttpStatus.OK);
ResponseCookie sessionCookie = response.cookies().getFirst("SESSION");
String sessionId = response.bodyToMono(String.class)
.block(Duration.ofSeconds(30));
response = webClient.get().cookie("SESSION", sessionCookie.getValue()).exchange()
.block(Duration.ofSeconds(30));
assertThat(response.statusCode()).isEqualTo(HttpStatus.OK);
assertThat(response.bodyToMono(String.class).block(Duration.ofSeconds(30)))
.isEqualTo(sessionId);
Thread.sleep(2000);
response = webClient.get().cookie("SESSION", sessionCookie.getValue()).exchange()
.block(Duration.ofSeconds(30));
assertThat(response.statusCode()).isEqualTo(HttpStatus.UNAUTHORIZED);
}
private String getBasicAuth() {
return "Basic " + Base64.getEncoder().encodeToString("user:password".getBytes());
}
}
| {
"content_hash": "e147f0acde58a3ccf396432c6fa58e38",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 83,
"avg_line_length": 35.36666666666667,
"alnum_prop": 0.7794533459000943,
"repo_name": "lburgazzoli/spring-boot",
"id": "018d03267a4f6fe57dca328cafb8f5162889c101",
"size": "2743",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spring-boot-samples/spring-boot-sample-session-webflux/src/test/java/sample/session/SampleSessionWebFluxApplicationTests.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "6954"
},
{
"name": "CSS",
"bytes": "5769"
},
{
"name": "FreeMarker",
"bytes": "2134"
},
{
"name": "Groovy",
"bytes": "49512"
},
{
"name": "HTML",
"bytes": "69689"
},
{
"name": "Java",
"bytes": "11602150"
},
{
"name": "JavaScript",
"bytes": "37789"
},
{
"name": "Ruby",
"bytes": "1307"
},
{
"name": "Shell",
"bytes": "27916"
},
{
"name": "Smarty",
"bytes": "3276"
},
{
"name": "XSLT",
"bytes": "34105"
}
],
"symlink_target": ""
} |
// Generated by CoffeeScript 1.8.0
(function() {
angular.module("GScreen").controller("Chromecasts", function($scope, Chromecast, castAway) {
castAway.initialize();
$scope.chromecasts = Chromecast.query();
$scope.chromecastAvailable = castAway.available;
console.log("Initial Chromecast available", $scope.chromecastAvailable);
return castAway.on("receivers:available", function() {
$scope.$apply(function() {
$scope.chromecastAvailable = true;
return $scope.chromecastAvailable;
});
return console.log("Chromecast is available", $scope.chromecastAvailable);
});
});
}).call(this);
| {
"content_hash": "90348abefa22deff1fb2c2579e1dac38",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 96,
"avg_line_length": 30.25,
"alnum_prop": 0.6101928374655647,
"repo_name": "vitorismart/ChromeCast",
"id": "358d281b8168a86ebbfc9726f831f4fef7505325",
"size": "2210",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "target/client/controllers/chromecasts.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "14339"
},
{
"name": "CoffeeScript",
"bytes": "744"
},
{
"name": "Gnuplot",
"bytes": "5822"
},
{
"name": "JavaScript",
"bytes": "123435"
},
{
"name": "Perl",
"bytes": "21591"
},
{
"name": "Python",
"bytes": "1347"
},
{
"name": "Shell",
"bytes": "3465"
}
],
"symlink_target": ""
} |
#ifndef __MEDIA_MEDIARECORDERIMPL_H
#define __MEDIA_MEDIARECORDERIMPL_H
#include <thread>
#include <mutex>
#include <condition_variable>
#include <queue>
#include <atomic>
#include <functional>
#include <iostream>
#include <fstream>
#include <tinyalsa/tinyalsa.h>
#include <media/OutputDataSource.h>
#include "OutputHandler.h"
#include "MediaQueue.h"
#include "RecorderObserverWorker.h"
using namespace std;
namespace media {
/**
* @brief current state of MediaRecorder
* @details @b #include <media/MediaRecorder.h>
* @since TizenRT v2.0
*/
typedef enum recorder_state_e {
/** MediaRecorder object was created */
RECORDER_STATE_NONE,
/** MediaRecorder worker object was created */
RECORDER_STATE_IDLE,
/** MediaRecorder datasource configured */
RECORDER_STATE_CONFIGURED,
/** MediaRecorder ready to record */
RECORDER_STATE_READY,
/** MediaRecorder do recording */
RECORDER_STATE_RECORDING,
/** MediaRecorder pause to record */
RECORDER_STATE_PAUSED
} recorder_state_t;
const char *const recorder_state_names[] = {
"RECORDER_STATE_NONE",
"RECORDER_STATE_IDLE",
"RECORDER_STATE_CONFIGURED",
"RECORDER_STATE_READY",
"RECORDER_STATE_RECORDING",
"RECORDER_STATE_PAUSED",
};
typedef enum recorder_observer_command_e {
RECORDER_OBSERVER_COMMAND_STARTED,
RECORDER_OBSERVER_COMMAND_PAUSED,
RECORDER_OBSERVER_COMMAND_FINISHIED,
RECORDER_OBSERVER_COMMAND_START_ERROR,
RECORDER_OBSERVER_COMMAND_PAUSE_ERROR,
RECORDER_OBSERVER_COMMAND_STOP_ERROR,
RECORDER_OBSERVER_COMMAND_BUFFER_OVERRUN,
RECORDER_OBSERVER_COMMAND_BUFFER_UNDERRUN,
RECORDER_OBSERVER_COMMAND_BUFFER_DATAREACHED,
} recorder_observer_command_t;
class MediaRecorderImpl : public enable_shared_from_this<MediaRecorderImpl>
{
public:
MediaRecorderImpl(MediaRecorder& recorder);
~MediaRecorderImpl();
recorder_result_t create();
recorder_result_t destroy();
recorder_result_t prepare();
recorder_result_t unprepare();
recorder_result_t start();
recorder_result_t pause();
recorder_result_t stop();
recorder_result_t getVolume(uint8_t *vol);
recorder_result_t getMaxVolume(uint8_t *vol);
recorder_result_t setVolume(uint8_t vol);
recorder_result_t setDataSource(std::unique_ptr<stream::OutputDataSource> dataSource);
recorder_state_t getState();
recorder_result_t setObserver(std::shared_ptr<MediaRecorderObserverInterface> observer);
bool isRecording();
recorder_result_t setDuration(int second);
recorder_result_t setFileSize(int byte);
void notifySync();
void notifyObserver(recorder_observer_command_t cmd, ...);
void capture();
private:
void createRecorder(recorder_result_t& ret);
void destroyRecorder(recorder_result_t& ret);
void prepareRecorder(recorder_result_t& ret);
void unprepareRecorder(recorder_result_t& ret);
void startRecorder();
void pauseRecorder();
void stopRecorder(recorder_result_t ret);
void getRecorderVolume(uint8_t *vol, recorder_result_t& ret);
void getRecorderMaxVolume(uint8_t *vol, recorder_result_t& ret);
void setRecorderVolume(uint8_t vol, recorder_result_t& ret);
void setRecorderObserver(std::shared_ptr<MediaRecorderObserverInterface> observer);
void setRecorderDataSource(std::shared_ptr<stream::OutputDataSource> dataSource, recorder_result_t& ret);
void setRecorderDuration(int second, recorder_result_t& ret);
void setRecorderFileSize(int byte, recorder_result_t& ret);
private:
std::atomic<recorder_state_t> mCurState;
stream::OutputHandler mOutputHandler;
std::shared_ptr<MediaRecorderObserverInterface> mRecorderObserver;
MediaRecorder& mRecorder;
unsigned char* mBuffer;
int mBuffSize;
mutex mCmdMtx; // command mutex
std::condition_variable mSyncCv;
int mDuration;
int mFileSize;
uint32_t mTotalFrames;
uint32_t mCapturedFrames;
};
} // namespace media
#endif
| {
"content_hash": "a3f4b35da3f0d9eeb5e5e785f8f3f4e4",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 106,
"avg_line_length": 29.761904761904763,
"alnum_prop": 0.7722666666666667,
"repo_name": "chanijjani/TizenRT",
"id": "a61f21d7476dda0b91e503552b6f78e01c5241ad",
"size": "4510",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "framework/src/media/MediaRecorderImpl.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "415941"
},
{
"name": "Batchfile",
"bytes": "42646"
},
{
"name": "C",
"bytes": "65318395"
},
{
"name": "C++",
"bytes": "1995766"
},
{
"name": "HTML",
"bytes": "2990"
},
{
"name": "Java",
"bytes": "63595"
},
{
"name": "Makefile",
"bytes": "737534"
},
{
"name": "Objective-C",
"bytes": "42504"
},
{
"name": "Perl",
"bytes": "4361"
},
{
"name": "PowerShell",
"bytes": "8511"
},
{
"name": "Python",
"bytes": "180702"
},
{
"name": "Roff",
"bytes": "4401"
},
{
"name": "Shell",
"bytes": "222522"
},
{
"name": "Tcl",
"bytes": "163693"
}
],
"symlink_target": ""
} |
package org.openehealth.ipf.platform.camel.ihe.fhir.iti83;
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
import org.hl7.fhir.r4.model.OperationOutcome;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertThrows;
/**
*
*/
public class TestIti83UnknownTarget extends AbstractTestIti83 {
private static final String CONTEXT_DESCRIPTOR = "iti-83-unknown-target.xml";
@BeforeAll
public static void setUpClass() {
startServer(CONTEXT_DESCRIPTOR);
}
@Test
public void testSendManualPixm() {
assertThrows(ForbiddenOperationException.class, ()->{
try {
sendManuallyOnType(validQueryParameters());
} catch (ForbiddenOperationException e) {
assertAndRethrow(e, OperationOutcome.IssueType.CODEINVALID);
}
});
}
}
| {
"content_hash": "42c6ae6c56be1b2e7b9d27202f9ddf4f",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 81,
"avg_line_length": 27.41176470588235,
"alnum_prop": 0.6952789699570815,
"repo_name": "oehf/ipf",
"id": "348b7c0cf5809773aea5af87d39b90d59ff2b07b",
"size": "1548",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "platform-camel/ihe/fhir/r4/pixpdq/src/test/java/org/openehealth/ipf/platform/camel/ihe/fhir/iti83/TestIti83UnknownTarget.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "2381"
},
{
"name": "Groovy",
"bytes": "1232218"
},
{
"name": "HTML",
"bytes": "11417"
},
{
"name": "Java",
"bytes": "6734450"
},
{
"name": "Kotlin",
"bytes": "87953"
},
{
"name": "Shell",
"bytes": "553"
},
{
"name": "XQuery",
"bytes": "15044"
},
{
"name": "XSLT",
"bytes": "567639"
}
],
"symlink_target": ""
} |
FROM balenalib/beaglebone-pocket-debian:sid-run
ENV GO_VERSION 1.15.8
# gcc for cgo
RUN apt-get update && apt-get install -y --no-install-recommends \
g++ \
gcc \
libc6-dev \
make \
pkg-config \
git \
&& rm -rf /var/lib/apt/lists/*
RUN set -x \
&& fetchDeps=' \
curl \
' \
&& apt-get update && apt-get install -y $fetchDeps --no-install-recommends && rm -rf /var/lib/apt/lists/* \
&& mkdir -p /usr/local/go \
&& curl -SLO "http://resin-packages.s3.amazonaws.com/golang/v$GO_VERSION/go$GO_VERSION.linux-armv7hf.tar.gz" \
&& echo "bde22202576c3920ff5646fb1d19877cedc19501939d6ccd7b16ff89071abd0a go$GO_VERSION.linux-armv7hf.tar.gz" | sha256sum -c - \
&& tar -xzf "go$GO_VERSION.linux-armv7hf.tar.gz" -C /usr/local/go --strip-components=1 \
&& rm -f go$GO_VERSION.linux-armv7hf.tar.gz
ENV GOROOT /usr/local/go
ENV GOPATH /go
ENV PATH $GOPATH/bin:/usr/local/go/bin:$PATH
RUN mkdir -p "$GOPATH/src" "$GOPATH/bin" && chmod -R 777 "$GOPATH"
WORKDIR $GOPATH
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/[email protected]" \
&& echo "Running test-stack@golang" \
&& chmod +x [email protected] \
&& bash [email protected] \
&& rm -rf [email protected]
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Debian Sid \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nGo v1.15.8 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | {
"content_hash": "c7d10d79b576c6f86f046a0f9b1401cb",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 669,
"avg_line_length": 50.73913043478261,
"alnum_prop": 0.7065124250214224,
"repo_name": "nghiant2710/base-images",
"id": "e3414823c6f451adaa5da3263b3837a93f305117",
"size": "2355",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "balena-base-images/golang/beaglebone-pocket/debian/sid/1.15.8/run/Dockerfile",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "144558581"
},
{
"name": "JavaScript",
"bytes": "16316"
},
{
"name": "Shell",
"bytes": "368690"
}
],
"symlink_target": ""
} |
package org.apache.wicket.protocol.ws.api.registry;
import java.util.Collection;
import org.apache.wicket.Application;
import org.apache.wicket.protocol.ws.api.IWebSocketConnection;
/**
* Tracks all currently connected WebSocket clients
*
* @since 6.0
*/
public interface IWebSocketConnectionRegistry
{
/**
* @param application
* the web application to look in
* @param sessionId
* the http session id
* @param key
* the web socket client key
* @return the web socket connection used by a client from the specified coordinates
*/
IWebSocketConnection getConnection(Application application, String sessionId, IKey key);
/**
* @param application
* the web application to look in
* @param sessionId
* the http session id
* @return collection of web socket connection used by a client with the given session id
*/
Collection<IWebSocketConnection> getConnections(Application application, String sessionId);
/**
* @param application
* the web application to look in
* @return collection of web socket connection used by any client connected to specified application
*/
Collection<IWebSocketConnection> getConnections(Application application);
/**
* Adds a new connection into the registry at the specified coordinates (application+session+page)
*
* @param application
* the web application to look in
* @param sessionId
* the http session id
* @param key
* the web socket client key
* @param connection
* the web socket connection to add
*/
void setConnection(Application application, String sessionId, IKey key, IWebSocketConnection connection);
/**
* Removes a web socket connection from the registry at the specified coordinates (application+session+page)
*
* @param application
* the web application to look in
* @param sessionId
* the http session id
* @param key
* the web socket client key
*/
void removeConnection(Application application, String sessionId, IKey key);
}
| {
"content_hash": "1e10e3874f02fd863830d409351b0882",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 109,
"avg_line_length": 29.768115942028984,
"alnum_prop": 0.7156767283349562,
"repo_name": "aldaris/wicket",
"id": "2782a93b01c0113e19fa6629c68ead94761e7760",
"size": "2856",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "wicket-native-websocket/wicket-native-websocket-core/src/main/java/org/apache/wicket/protocol/ws/api/registry/IWebSocketConnectionRegistry.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "131341"
},
{
"name": "Dockerfile",
"bytes": "163"
},
{
"name": "HTML",
"bytes": "897448"
},
{
"name": "Java",
"bytes": "12154156"
},
{
"name": "JavaScript",
"bytes": "540078"
},
{
"name": "Logos",
"bytes": "146"
},
{
"name": "Python",
"bytes": "1547"
},
{
"name": "Shell",
"bytes": "26094"
},
{
"name": "XSLT",
"bytes": "2162"
}
],
"symlink_target": ""
} |
class CreateDynamicFieldDependencies < ActiveRecord::Migration
def self.up
create_table :dynamic_field_dependencies do |t|
t.integer :child_id, :parent_id
t.string :dependent_value
end
add_index :dynamic_field_dependencies, [:child_id, :parent_id], :unique => true, :name => 'index_df_dependencies'
end
def self.down
drop_table :dynamic_field_dependencies
end
end
| {
"content_hash": "ad4f6db3eb28e808ed33cf01636a97e1",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 117,
"avg_line_length": 30.846153846153847,
"alnum_prop": 0.7057356608478803,
"repo_name": "netmediagroup/dynamic_form_builder",
"id": "201b9979709014313b9f3d482a1cf02cf1e8d2e2",
"size": "401",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "db/migrate/20090415200001_create_dynamic_field_dependencies.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "87687"
}
],
"symlink_target": ""
} |
module.exports = angular.module('trafficPortal.form.user.new', [])
.controller('FormNewUserController', require('./FormNewUserController'));
| {
"content_hash": "cdbaa50786d7b41ff04ffdcb6a9886c3",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 77,
"avg_line_length": 36.75,
"alnum_prop": 0.7414965986394558,
"repo_name": "serDrem/incubator-trafficcontrol",
"id": "cdd08177f8a76ce07555ce4e96b4935448c29481",
"size": "956",
"binary": false,
"copies": "14",
"ref": "refs/heads/master",
"path": "traffic_portal/app/src/common/modules/form/user/new/index.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "21929"
},
{
"name": "CSS",
"bytes": "188879"
},
{
"name": "Go",
"bytes": "1377260"
},
{
"name": "HTML",
"bytes": "723492"
},
{
"name": "Java",
"bytes": "1232975"
},
{
"name": "JavaScript",
"bytes": "1598022"
},
{
"name": "Makefile",
"bytes": "1047"
},
{
"name": "PLSQL",
"bytes": "4308"
},
{
"name": "PLpgSQL",
"bytes": "70798"
},
{
"name": "Perl",
"bytes": "3472258"
},
{
"name": "Perl 6",
"bytes": "25530"
},
{
"name": "Python",
"bytes": "92267"
},
{
"name": "Roff",
"bytes": "4011"
},
{
"name": "Ruby",
"bytes": "4090"
},
{
"name": "SQLPL",
"bytes": "67758"
},
{
"name": "Shell",
"bytes": "166073"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using System.Web.Http;
using Xpressive.Home.Contracts.Rooms;
namespace Xpressive.Home.WebApi.Controllers
{
[RoutePrefix("api/v1/roomscriptgroup")]
public class RoomScriptGroupController : ApiController
{
private readonly IRoomRepository _roomRepository;
private readonly IRoomScriptGroupRepository _repository;
public RoomScriptGroupController(IRoomScriptGroupRepository repository, IRoomRepository roomRepository)
{
_repository = repository;
_roomRepository = roomRepository;
}
[HttpGet, Route("{id}")]
public async Task<IHttpActionResult> Get(string id)
{
Guid guid;
if (Guid.TryParse(id, out guid))
{
var group = await _repository.GetAsync(guid);
if (group != null)
{
return Ok(group);
}
}
return NotFound();
}
[HttpGet, Route("")]
public async Task<IEnumerable<RoomScriptGroup>> GetByRoom([FromUri] string roomId)
{
var rooms = await _roomRepository.GetAsync();
var room = rooms.SingleOrDefault(r => r.Id.Equals(new Guid(roomId)));
if (room == null)
{
return Enumerable.Empty<RoomScriptGroup>();
}
var groups = await _repository.GetAsync(room);
return groups;
}
[HttpPost, Route("{roomId}")]
public async Task<RoomScriptGroup> Create(string roomId, [FromBody] RoomScriptGroup group)
{
var rooms = await _roomRepository.GetAsync();
var room = rooms.SingleOrDefault(r => r.Id.Equals(new Guid(roomId)));
if (room == null)
{
return null;
}
group = new RoomScriptGroup
{
Name = group.Name,
Icon = string.Empty,
RoomId = room.Id
};
await _repository.SaveAsync(group);
return group;
}
[HttpPost, Route("")]
public async Task<IHttpActionResult> Save([FromBody] RoomScriptGroup group)
{
if (group == null || group.Id == Guid.Empty)
{
return NotFound();
}
var existing = await _repository.GetAsync(group.Id);
if (existing == null)
{
return NotFound();
}
existing.Icon = group.Icon;
existing.Name = group.Name;
existing.SortOrder = group.SortOrder;
await _repository.SaveAsync(existing);
return Ok();
}
}
}
| {
"content_hash": "9b6943e9af36cce0541fdc324d115681",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 111,
"avg_line_length": 29.27,
"alnum_prop": 0.5165698667577725,
"repo_name": "xpressive-websolutions/Xpressive.Home",
"id": "c436ed5d161078475f4dcc53ced8f23eed3dcb9c",
"size": "2927",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Xpressive.Home.WebApi/Controllers/RoomScriptGroupController.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "557265"
},
{
"name": "CSS",
"bytes": "23005"
},
{
"name": "HTML",
"bytes": "59277"
},
{
"name": "JavaScript",
"bytes": "80498"
},
{
"name": "PowerShell",
"bytes": "6096"
}
],
"symlink_target": ""
} |
! Copyright (c) 2015 Alex Kramer <[email protected]>
! See the LICENSE.txt file at the top-level directory of this distribution.
program scatter
use progvars
use setup, only: init
use wfunc_prop, only: propagate
use output, only: write_output
implicit none
call init
call propagate
call write_output
end program scatter
| {
"content_hash": "839211f8e8d25178883459f95dd478dc",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 75,
"avg_line_length": 21.9375,
"alnum_prop": 0.7492877492877493,
"repo_name": "kramer314/qm-scattering",
"id": "e72ec27421d98a75d5fc57dd7123df60f39e26f2",
"size": "351",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scatter.f90",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "FORTRAN",
"bytes": "11654"
},
{
"name": "Python",
"bytes": "374"
}
],
"symlink_target": ""
} |
<metadata__cache__entry>
<version>5.2.0.0</version>
<connectorName>Twitter__Configuration</connectorName>
<metadataIo class="tree-map">
<entry>
<string>http://www.mulesoft.org/schema/mule/twitter/get-user-timeline-by-screen-name</string>
<map>
<entry>
<string>__default__</string>
<org.mule.tooling.metadata.cache.DefaultMetadataCacheEntry>
<output class="org.mule.common.metadata.DefaultMetaData">
<payload class="org.mule.common.metadata.DefaultListMetaDataModel">
<dataType>LIST</dataType>
<metaDataModelPropertiesManager>
<properties/>
</metaDataModelPropertiesManager>
<model class="org.mule.tooling.metadata.api.TypeRefMetaDataModel">
<connectorName>Twitter__Configuration</connectorName>
<typeId>twitter4j.Status</typeId>
</model>
<isArray>false</isArray>
</payload>
<properties class="tree-map">
<entry>
<org.mule.common.metadata.MetaDataPropertyScope>FLOW</org.mule.common.metadata.MetaDataPropertyScope>
<org.mule.common.metadata.MetaDataProperties>
<fields class="sorted-set">
<comparator class="org.mule.common.metadata.MetaDataProperties$MetaDataFieldComparator"/>
</fields>
</org.mule.common.metadata.MetaDataProperties>
</entry>
<entry>
<org.mule.common.metadata.MetaDataPropertyScope>INBOUND</org.mule.common.metadata.MetaDataPropertyScope>
<org.mule.common.metadata.MetaDataProperties>
<fields class="sorted-set">
<comparator class="org.mule.common.metadata.MetaDataProperties$MetaDataFieldComparator"/>
</fields>
</org.mule.common.metadata.MetaDataProperties>
</entry>
<entry>
<org.mule.common.metadata.MetaDataPropertyScope>OUTBOUND</org.mule.common.metadata.MetaDataPropertyScope>
<org.mule.common.metadata.MetaDataProperties>
<fields class="sorted-set">
<comparator class="org.mule.common.metadata.MetaDataProperties$MetaDataFieldComparator"/>
</fields>
</org.mule.common.metadata.MetaDataProperties>
</entry>
<entry>
<org.mule.common.metadata.MetaDataPropertyScope>SESSION</org.mule.common.metadata.MetaDataPropertyScope>
<org.mule.common.metadata.MetaDataProperties>
<fields class="sorted-set">
<comparator class="org.mule.common.metadata.MetaDataProperties$MetaDataFieldComparator"/>
</fields>
</org.mule.common.metadata.MetaDataProperties>
</entry>
<entry>
<org.mule.common.metadata.MetaDataPropertyScope>RECORD</org.mule.common.metadata.MetaDataPropertyScope>
<org.mule.common.metadata.MetaDataProperties>
<fields class="sorted-set">
<comparator class="org.mule.common.metadata.MetaDataProperties$MetaDataFieldComparator"/>
</fields>
</org.mule.common.metadata.MetaDataProperties>
</entry>
</properties>
</output>
</org.mule.tooling.metadata.cache.DefaultMetadataCacheEntry>
</entry>
</map>
</entry>
<entry>
<string>http://www.mulesoft.org/schema/mule/twitter/update-status</string>
<map>
<entry>
<string>__default__</string>
<org.mule.tooling.metadata.cache.DefaultMetadataCacheEntry>
<output class="org.mule.common.metadata.DefaultMetaData">
<payload class="org.mule.tooling.metadata.api.TypeRefMetaDataModel">
<connectorName>Twitter__Configuration</connectorName>
<typeId>twitter4j.Status</typeId>
</payload>
<properties class="tree-map">
<entry>
<org.mule.common.metadata.MetaDataPropertyScope>FLOW</org.mule.common.metadata.MetaDataPropertyScope>
<org.mule.common.metadata.MetaDataProperties>
<fields class="sorted-set">
<comparator class="org.mule.common.metadata.MetaDataProperties$MetaDataFieldComparator"/>
</fields>
</org.mule.common.metadata.MetaDataProperties>
</entry>
<entry>
<org.mule.common.metadata.MetaDataPropertyScope>INBOUND</org.mule.common.metadata.MetaDataPropertyScope>
<org.mule.common.metadata.MetaDataProperties>
<fields class="sorted-set">
<comparator class="org.mule.common.metadata.MetaDataProperties$MetaDataFieldComparator"/>
</fields>
</org.mule.common.metadata.MetaDataProperties>
</entry>
<entry>
<org.mule.common.metadata.MetaDataPropertyScope>OUTBOUND</org.mule.common.metadata.MetaDataPropertyScope>
<org.mule.common.metadata.MetaDataProperties>
<fields class="sorted-set">
<comparator class="org.mule.common.metadata.MetaDataProperties$MetaDataFieldComparator"/>
</fields>
</org.mule.common.metadata.MetaDataProperties>
</entry>
<entry>
<org.mule.common.metadata.MetaDataPropertyScope>SESSION</org.mule.common.metadata.MetaDataPropertyScope>
<org.mule.common.metadata.MetaDataProperties>
<fields class="sorted-set">
<comparator class="org.mule.common.metadata.MetaDataProperties$MetaDataFieldComparator"/>
</fields>
</org.mule.common.metadata.MetaDataProperties>
</entry>
<entry>
<org.mule.common.metadata.MetaDataPropertyScope>RECORD</org.mule.common.metadata.MetaDataPropertyScope>
<org.mule.common.metadata.MetaDataProperties>
<fields class="sorted-set">
<comparator class="org.mule.common.metadata.MetaDataProperties$MetaDataFieldComparator"/>
</fields>
</org.mule.common.metadata.MetaDataProperties>
</entry>
</properties>
</output>
</org.mule.tooling.metadata.cache.DefaultMetadataCacheEntry>
</entry>
</map>
</entry>
</metadataIo>
</metadata__cache__entry> | {
"content_hash": "4431f6652592414c8c10d999f62e7d09",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 123,
"avg_line_length": 53.51162790697674,
"alnum_prop": 0.5780095610604086,
"repo_name": "vageeshs7/mule-esb-examples",
"id": "360277b0b548f70e078b2ff90297771914c075ca",
"size": "6903",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "varys-knows-twitter-connector/catalog/Twitter__Configuration__md__io__.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<?php
namespace Hubzero\Form\Fields;
use Hubzero\Html\Builder\Behavior;
use Document;
use Route;
use Lang;
/**
* Supports a URL text field
*/
class Orcid extends Text
{
/**
* The form field type.
*
* @var string
*/
protected $type = 'Orcid';
/**
* Method to get the field input markup for a generic list.
* Use the multiple attribute to enable multiselect.
*
* @return string The field input markup.
*/
protected function getInput()
{
// Initialize variables.
$attributes = array(
'type' => 'text',
'value' => htmlspecialchars($this->value, ENT_COMPAT, 'UTF-8'),
'name' => $this->name,
'id' => $this->id,
'size' => ($this->element['size'] ? (int) $this->element['size'] : ''),
'maxlength' => ($this->element['maxlength'] ? (int) $this->element['maxlength'] : ''),
'class' => 'orcid' . ($this->element['class'] ? (string) $this->element['class'] : ''),
'autocomplete' => ((string) $this->element['autocomplete'] == 'off' ? 'off' : ''),
'readonly' => ((string) $this->element['readonly'] == 'true' ? 'readonly' : ''),
'disabled' => ((string) $this->element['disabled'] == 'true' ? 'disabled' : ''),
'onchange' => ($this->element['onchange'] ? (string) $this->element['onchange'] : '')
);
$attr = array();
foreach ($attributes as $key => $value)
{
if ($key != 'value' && !$value)
{
continue;
}
$attr[] = $key . '="' . $value . '"';
}
$attr = implode(' ', $attr);
$html = array();
$html[] = '<div class="grid">';
$html[] = ' <div class="col span9">';
$html[] = ' <input ' . $attr . ' placeholder="####-####-####-####" />';
$html[] = ' <input type="hidden" name="base_uri" id="base_uri" value="' . rtrim(Request::base(true), '/') . '" />';
$html[] = ' </div>';
$html[] = ' <div class="col span3 omega">';
// Build the ORCID Create or Connect hyperlink
$config = Component::params('com_members');
$srv = $config->get('orcid_service', 'members');
$clientID = $config->get('orcid_' . $srv . '_client_id', '');
$redirectURI = $config->get('orcid_' . $srv . '_redirect_uri', '');
$html[] = ' <a id="create-orcid" class="btn" href="https://';
if ($config->get('orcid_service', 'members') == 'sandbox')
{
$html[] = 'sandbox.';
}
$html[] = 'orcid.org/oauth/authorize?client_id=' . $clientID . htmlspecialchars('&') . 'response_type=code' . htmlspecialchars('&') . 'scope=/authenticate' . htmlspecialchars('&'). 'redirect_uri=' . urlencode($redirectURI)
. '" rel="nofollow external">' . '<img src="' . Request::root(true) . 'core/components/com_members/site/assets/img/orcid_16x16.png" class="logo" width="20" height="20" alt="iD"/>'
. Lang::txt('COM_MEMBERS_PROFILE_ORCID_CREATE_OR_CONNECT') . '</a>';
$html[] = ' </div>';
$html[] = '</div>';
$html[] = '<p><img src="' . Request::root(true) . 'core/components/com_members/site/assets/img/orcid-logo.png" width="80" alt="ORCID" /> ' . Lang::txt('COM_MEMBERS_PROFILE_ORCID_ABOUT') . '</p>';
Behavior::framework(true);
Behavior::modal();
$path = dirname(dirname(__DIR__)) . '/site/assets/js/orcid.js';
if (file_exists($path))
{
Document::addScript(Request::root(true) . 'core/components/com_members/site/assets/js/orcid.js?t=' . filemtime($path));
}
return implode($html);
}
}
| {
"content_hash": "b5e3062e7cdf7f6390049ebd8b42e5a6",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 227,
"avg_line_length": 35.6,
"alnum_prop": 0.562093435836783,
"repo_name": "zooley/hubzero-cms",
"id": "33d37b00adb2096cfee8173861a234b37af053ab",
"size": "3530",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "core/components/com_members/models/fields/orcid.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "171251"
},
{
"name": "AngelScript",
"bytes": "1638"
},
{
"name": "CSS",
"bytes": "2719736"
},
{
"name": "HTML",
"bytes": "1289374"
},
{
"name": "JavaScript",
"bytes": "12613354"
},
{
"name": "PHP",
"bytes": "24941743"
},
{
"name": "Shell",
"bytes": "10678"
},
{
"name": "TSQL",
"bytes": "572"
}
],
"symlink_target": ""
} |
/*
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
/*
*
*
*
*
*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
* http://creativecommons.org/publicdomain/zero/1.0/
*/
package java.util.concurrent.atomic;
import sun.misc.Unsafe;
/**
* An {@code int} value that may be updated atomically. See the
* {@link java.util.concurrent.atomic} package specification for
* description of the properties of atomic variables. An
* {@code AtomicInteger} is used in applications such as atomically
* incremented counters, and cannot be used as a replacement for an
* {@link java.lang.Integer}. However, this class does extend
* {@code Number} to allow uniform access by tools and utilities that
* deal with numerically-based classes.
*
* @since 1.5
* @author Doug Lea
*/
public class AtomicInteger extends Number implements java.io.Serializable {
private static final long serialVersionUID = 6214790243416807050L;
// setup to use Unsafe.compareAndSwapInt for updates
private static final Unsafe unsafe = Unsafe.getUnsafe();
private static final long valueOffset;
static {
try {
valueOffset = unsafe.objectFieldOffset
(AtomicInteger.class.getDeclaredField("value"));
} catch (Exception ex) { throw new Error(ex); }
}
private volatile int value;
/**
* Creates a new AtomicInteger with the given initial value.
*
* @param initialValue the initial value
*/
public AtomicInteger(int initialValue) {
value = initialValue;
}
/**
* Creates a new AtomicInteger with initial value {@code 0}.
*/
public AtomicInteger() {
}
/**
* Gets the current value.
*
* @return the current value
*/
public final int get() {
return value;
}
/**
* Sets to the given value.
*
* @param newValue the new value
*/
public final void set(int newValue) {
value = newValue;
}
/**
* Eventually sets to the given value.
*
* @param newValue the new value
* @since 1.6
*/
public final void lazySet(int newValue) {
unsafe.putOrderedInt(this, valueOffset, newValue);
}
/**
* Atomically sets to the given value and returns the old value.
*
* @param newValue the new value
* @return the previous value
*/
public final int getAndSet(int newValue) {
for (;;) {
int current = get();
if (compareAndSet(current, newValue))
return current;
}
}
/**
* Atomically sets the value to the given updated value
* if the current value {@code ==} the expected value.
*
* @param expect the expected value
* @param update the new value
* @return true if successful. False return indicates that
* the actual value was not equal to the expected value.
*/
public final boolean compareAndSet(int expect, int update) {
return unsafe.compareAndSwapInt(this, valueOffset, expect, update);
}
/**
* Atomically sets the value to the given updated value
* if the current value {@code ==} the expected value.
*
* <p>May <a href="package-summary.html#Spurious">fail spuriously</a>
* and does not provide ordering guarantees, so is only rarely an
* appropriate alternative to {@code compareAndSet}.
*
* @param expect the expected value
* @param update the new value
* @return true if successful.
*/
public final boolean weakCompareAndSet(int expect, int update) {
return unsafe.compareAndSwapInt(this, valueOffset, expect, update);
}
/**
* Atomically increments by one the current value.
*
* @return the previous value
*/
public final int getAndIncrement() {
for (;;) {
int current = get();
int next = current + 1;
if (compareAndSet(current, next))
return current;
}
}
/**
* Atomically decrements by one the current value.
*
* @return the previous value
*/
public final int getAndDecrement() {
for (;;) {
int current = get();
int next = current - 1;
if (compareAndSet(current, next))
return current;
}
}
/**
* Atomically adds the given value to the current value.
*
* @param delta the value to add
* @return the previous value
*/
public final int getAndAdd(int delta) {
for (;;) {
int current = get();
int next = current + delta;
if (compareAndSet(current, next))
return current;
}
}
/**
* Atomically increments by one the current value.
*
* @return the updated value
*/
public final int incrementAndGet() {
for (;;) {
int current = get();
int next = current + 1;
if (compareAndSet(current, next))
return next;
}
}
/**
* Atomically decrements by one the current value.
*
* @return the updated value
*/
public final int decrementAndGet() {
for (;;) {
int current = get();
int next = current - 1;
if (compareAndSet(current, next))
return next;
}
}
/**
* Atomically adds the given value to the current value.
*
* @param delta the value to add
* @return the updated value
*/
public final int addAndGet(int delta) {
for (;;) {
int current = get();
int next = current + delta;
if (compareAndSet(current, next))
return next;
}
}
/**
* Returns the String representation of the current value.
* @return the String representation of the current value.
*/
public String toString() {
return Integer.toString(get());
}
public int intValue() {
return get();
}
public long longValue() {
return (long)get();
}
public float floatValue() {
return (float)get();
}
public double doubleValue() {
return (double)get();
}
}
| {
"content_hash": "d13dd470698c748a69263bf8ebea3c66",
"timestamp": "",
"source": "github",
"line_count": 265,
"max_line_length": 75,
"avg_line_length": 24.19622641509434,
"alnum_prop": 0.5790704928259514,
"repo_name": "ZhaoX/jdk-1.7-annotated",
"id": "07366aa2e1e24f31c8b175c64ec45dc5c9b00117",
"size": "6412",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/java/util/concurrent/atomic/AtomicInteger.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "23352747"
}
],
"symlink_target": ""
} |
.cascading-list
{
border: 1px solid #999;
color: #333;
}
/*Clear fix*/
.cascading-list:before, .cascading-list:after { content: "\0020"; display: block; height: 0; overflow: hidden; }
.cascading-list:after { clear: both; }
.cascading-list { zoom: 1; }
.cascading-list .selective
{
float: left;
border: none;
}
.selective li
{
background-image: url('images/arrow-gradient.png');
background-position: 99% center;
background-repeat: no-repeat;
overflow: hidden;
}
.selective-disabled li.selected
{
background-image: url('images/arrow-disabled.png');
}
.selective li.selected
{
background-image: url('images/arrow-solidgrey.png');
}
.selective:focus li.selected
{
background-image: url('images/arrow-white.png');
}
.selective li.no-children, .selective:focus li.no-children
{
background-image: none;
}
.selective li.active
{
background-color: #fff2ab;
}
| {
"content_hash": "c0a326d4c42371ae1726b873571eecb0",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 112,
"avg_line_length": 19.632653061224488,
"alnum_prop": 0.6444906444906445,
"repo_name": "jondkoon/jquery.cascadingList",
"id": "0e986699ccfa718b425b424ac3d991dff4b30fcf",
"size": "964",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jquery.cascadingList/jquery.cascadingList.css",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "21363"
}
],
"symlink_target": ""
} |
goog.provide("trapeze.font.HmtxTable");
trapeze.font.HmtxTable = function(ttf) {
// the number of glyphs stored in the maxp table may be incorrect
// in the case of subsetted fonts produced by some pdf generators
var maxp = ttf.getTable("maxp");
var numGlyphs = maxp.numGlyphs;
var hhea = ttf.getTable("hhea");
var numOfLongHorMetrics = hhea.numOfLongHorMetrics;
this.advanceWidths = []; //new short[numOfLongHorMetrics];
this.leftSideBearings = []; //new short[numGlyphs];
this.setData = function(data) {
// some PDF writers subset the font but don't update the number of glyphs in the maxp table,
// this would appear to break the TTF spec.
// A better solution might be to try and override the numGlyphs in the maxp table based
// on the number of entries in the cmap table or by parsing the glyf table, but this
// appears to be the only place that gets affected by the discrepancy... so far!...
// so updating this allows it to work.
var i;
// only read as much data as is available
for (i = 0; i < numGlyphs && data.hasRemaining(); i++) {
if (i < numOfLongHorMetrics) {
this.advanceWidths[i] = data.getShort();
}
this.leftSideBearings[i] = data.getShort();
}
// initialise the remaining advanceWidths and leftSideBearings to 0
if (i < numOfLongHorMetrics) {
for(var j = i; j < numOfLongHorMetrics; j++)
this.advanceWidths[i] = 0;
}
if (i < numGlyphs) {
for(var j = i; j < numGlyphs; j++)
this.leftSideBearings[i] = 0;
}
};
/** get the advance of a given glyph */
this.getAdvance = function(glyphID) {
if (glyphID < this.advanceWidths.length) {
return this.advanceWidths[glyphID];
} else {
return this.advanceWidths[this.advanceWidths.length - 1];
}
};
}; | {
"content_hash": "178062854137bf658c6d997380944c8b",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 95,
"avg_line_length": 41.319148936170215,
"alnum_prop": 0.621524201853759,
"repo_name": "AKamanjha/trapeze-reader",
"id": "21795c2696790096ca86b4e99ff4096eb419aac3",
"size": "1942",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "src/font/HmtxTable.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2571"
},
{
"name": "HTML",
"bytes": "8176"
},
{
"name": "JavaScript",
"bytes": "333536"
},
{
"name": "PHP",
"bytes": "5564"
}
],
"symlink_target": ""
} |
package org.apereo.portal.io.xml.portlettype;
import org.apereo.portal.security.IPermission;
/**
* Set of supported permissions that can be used in the <permissions></permissions> element of a
* portlet definition.
*
* @since 4.2
*/
public enum ExternalPermissionDefinition {
SUBSCRIBE(IPermission.PORTAL_SUBSCRIBE, IPermission.PORTLET_SUBSCRIBER_ACTIVITY, false),
BROWSE(IPermission.PORTAL_SUBSCRIBE, IPermission.PORTLET_BROWSE_ACTIVITY, true);
private final String system;
private final String activity;
private boolean exportForPortletDef;
ExternalPermissionDefinition(final String system, final String activity, final boolean export) {
this.system = system;
this.activity = activity;
this.exportForPortletDef = export;
}
public String getSystem() {
return system;
}
public String getActivity() {
return activity;
}
public boolean getExportForPortletDef() {
return exportForPortletDef;
}
@Override
public String toString() {
return system + "." + activity;
}
/**
* Given a system and activity, attempt to lookup a matching ExternalPermissionDefinition.
*
* @param system the system to lookup
* @param activity the activity to lookup
* @return the matching permission if one can be found, otherwise null
*/
public static ExternalPermissionDefinition find(String system, String activity) {
for (ExternalPermissionDefinition perm : ExternalPermissionDefinition.values()) {
if (perm.system.equalsIgnoreCase(system) && perm.activity.equalsIgnoreCase(activity)) {
return perm;
}
}
return null;
}
}
| {
"content_hash": "195b15d3728c49916f30624a65bd1675",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 100,
"avg_line_length": 29.440677966101696,
"alnum_prop": 0.6827864133563616,
"repo_name": "stalele/uPortal",
"id": "4da8c08a1eca6301713c55d38b6508287809fff9",
"size": "2526",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "uPortal-io/uPortal-io-types/src/main/java/org/apereo/portal/io/xml/portlettype/ExternalPermissionDefinition.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "238557"
},
{
"name": "Groovy",
"bytes": "56453"
},
{
"name": "HTML",
"bytes": "223563"
},
{
"name": "Java",
"bytes": "9899148"
},
{
"name": "JavaScript",
"bytes": "811252"
},
{
"name": "Perl",
"bytes": "1769"
},
{
"name": "Shell",
"bytes": "3135"
},
{
"name": "XSLT",
"bytes": "259363"
}
],
"symlink_target": ""
} |
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var AppConfigSettings = {
name: 'Admin Panel',
basename: 'http://localhost:8786',
adminPath: '/r-admin',
routerHistory: 'browserHistory',
hot_reload: false,
includeCoreData: {
manifest: true,
navigation: true
},
allHistoryOptions: 'browserHistory|hashHistory|createMemoryHistory',
application: {
environment: 'development',
use_offline_cache: false
},
ui: {
initialization: {
show_header: false,
show_footer: false,
show_sidebar_overlay: true,
refresh_manifests: true,
refresh_navigation: true,
refresh_components: true
},
notifications: {
error_timeout: 10000,
timed_timeout: 10000,
hide_login_notification: false,
supressResourceErrors: false
},
fixedSidebar: true,
sidebarBG: '#ffffff',
header: {
isBold: true,
color: 'isBlack',
buttonColor: 'isWhite',
useGlobalSearch: false,
useHeaderLogout: false,
customButton: false,
navLabelStyle: {},
containerStyle: {},
userNameStyle: {}
},
footer: {
navStyle: {}
},
sidebar: {
containerStyle: {},
use_floating_nav: false
}
},
auth: {
logged_in_homepage: '/r-admin/dashboard',
logged_out_path: '/login'
},
login: {
url: 'http://localhost:8786/api/jwt/token',
devurl: 'http://localhost:8786/api/jwt/token',
options: {
method: 'POST',
headers: {
Accept: 'application/json',
clientid: 'fbff80bd23de5b1699cb595167370a1a',
entitytype: 'account'
}
}
},
userprofile: {
url: 'http://localhost:8786/api/jwt/profile',
devurl: 'http://localhost:8786/api/jwt/profile',
options: {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
clientid: 'fbff80bd23de5b1699cb595167370a1a',
clientid_default: 'clientIDNEEDED',
entitytype: 'account'
}
}
}
};
exports.default = {
pages: {
LOAD_PAGE_ACTION: 'load page component',
INITIAL_APP_LOADED: 'loaded initial app state',
RESET_APP_LOADED: 'resetting initial app state',
ASYNCSTORAGE_KEY: 'current_view',
UPDATE_APP_DIMENSIONS: 'update dimensions state'
},
tabBarExtensions: {
SET_EXTENSIONS_ACTION: 'set tabBarExtensions'
},
fetchData: {
FETCH_DATA_REQUEST: 'fetching data request',
FETCH_DATA_FAILURE: 'fetching data failed',
FETCH_DATA_SUCCESS: 'fetching data succeeded'
},
user: {
LOGIN_DATA_REQUEST: 'user logining data request',
USER_DATA_FAILURE: 'user fetching data failed',
LOGIN_DATA_SUCCESS: 'user login fetching data succeeded',
SAVE_DATA_SUCCESS: 'user profile saving data succeeded',
UPDATE_PROFILE_SUCCESS: 'user profile data updated',
LOGOUT_REQUEST: 'user logout request',
LOGOUT_SUCCESS: 'user logout succeeded',
LOGOUT_FAILURE: 'user logout failed',
PREFERENCE_LOAD_SUCCESS: 'preferences loaded',
PREFERENCE_LOAD_ERROR: 'preferences failed',
PREFERENCE_REQUEST: 'perferences request',
NAVIGATION_LOAD_SUCCESS: 'navigation loaded',
NAVIGATION_LOAD_ERROR: 'navigation failed',
NAVIGATION_REQUEST: 'navigation request',
MFA_AUTHENTICATED: 'mfa authenticated'
// CURRENT_USER_STATUS:'get current login status',
},
clientCacheData: {
CLIENT_CACHE_DATA_REQUEST: 'client cache data save request',
CLIENT_CACHE_DATA_FAILURE: 'client cache data failed',
CLIENT_CACHE_DATA_SUCCESS: 'client cache data succeeded'
},
dynamic: {
SET_DYNAMIC_DATA: 'set dynamic data'
// SHOW_ERROR:'show error notification',
},
output: {
OUTPUT_FILE_DATA_SUCCESS: 'output data to file',
OUTPUT_FILE_DATA_ERROR: 'error outputing data to file'
// SHOW_ERROR:'show error notification',
},
jwt_token: {
TOKEN_NAME: AppConfigSettings.name + '_jwt_token',
TOKEN_DATA: AppConfigSettings.name + '_jwt_token_data',
PROFILE_JSON: AppConfigSettings.name + '_jwt_profile'
},
cache: {
CONFIGURATION_CACHE: AppConfigSettings.name + '_configuration'
},
manifest: {
MANIFEST_DATA_REQUEST: 'manifest data request',
MANIFEST_DATA_FAILURE: 'manifest data failed',
MANIFEST_DATA_SUCCESS: 'manifest data succeeded',
UNAUTHENTICATED_MANIFEST_DATA_REQUEST: 'unauthenticated manifest data request',
UNAUTHENTICATED_MANIFEST_DATA_FAILURE: 'unauthenticated manifest data failed',
UNAUTHENTICATED_MANIFEST_DATA_SUCCESS: 'unauthenticated manifest data succeeded'
},
notification: {
SHOW_TIMED_NOTIFICATION: 'show timed notification',
SHOW_STATIC_NOTIFICATION: 'show static notification',
HIDE_NOTIFICATION: 'hide notification',
FAILED_NOTIFICATION_CREATION: 'failed to create notification',
SHOW_MODAL: 'show modal',
HIDE_MODAL: 'hide modal'
},
ui: {
TOGGLE_SIDEBAR: 'toggle side menu',
OPEN_SIDEBAR: 'open side menu',
CLOSE_SIDEBAR: 'close side menu',
SET_UI_LOADED: 'set ui loaded state',
SET_NAV_LABEL: 'set navigation label',
LOAD_NAV_DATA_SUCCESS: 'set nav ui loaded state',
LOGIN_COMPONENT: 'fetchLoginComponent',
MAIN_COMPONENT: 'fetchMainComponent',
ERROR_COMPONENTS: 'fetchErrorComponents',
SET_SELECTED_NAV_STATE: 'making nav item active'
// GET_APP_STATE:'get current app state',
},
settings: {
UPDATE_APP_SETTINGS: 'update application settings'
}
}; | {
"content_hash": "f65b217a086242ffe3f42f9acda08267",
"timestamp": "",
"source": "github",
"line_count": 176,
"max_line_length": 84,
"avg_line_length": 31.164772727272727,
"alnum_prop": 0.6641750227894258,
"repo_name": "typesettin/periodicjs.ext.reactadmin",
"id": "cda69b05de0d82cc0e8b53bc044b90e0979939f0",
"size": "5485",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "adminclient/_src/constants/index.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3282"
},
{
"name": "HTML",
"bytes": "148597"
},
{
"name": "JavaScript",
"bytes": "1262207"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/colorPrimary">
<ImageView
android:id="@+id/bing_pic_img"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:scaleType="centerCrop" />
<android.support.v4.widget.DrawerLayout
android:id="@+id/drawer_layout"
android:layout_width="match_parent"
android:layout_height="match_parent">
<android.support.v4.widget.SwipeRefreshLayout
android:id="@+id/swipe_refresh"
android:layout_width="match_parent"
android:layout_height="match_parent">
<ScrollView
android:id="@+id/weather_layout"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:scrollbars="none"
android:overScrollMode="never">
<LinearLayout
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:fitsSystemWindows="true">
<include layout="@layout/title" />
<include layout="@layout/now" />
<include layout="@layout/forecast" />
<include layout="@layout/aqi" />
<include layout="@layout/suggestion" />
</LinearLayout>
</ScrollView>
</android.support.v4.widget.SwipeRefreshLayout>
<fragment
android:id="@+id/choose_area_fragment"
android:name="com.huntertzty.tianyu.coolweather.ChooseAreaFragment"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_gravity="start"
/>
</android.support.v4.widget.DrawerLayout>
</FrameLayout>
| {
"content_hash": "3da76472e2930d57e8097033db9e9889",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 79,
"avg_line_length": 33.12698412698413,
"alnum_prop": 0.5764254911356014,
"repo_name": "DemonHunterzty/CoolWeather",
"id": "544519142c753b959392f5b406be088a1ec2eb14",
"size": "2087",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/res/layout/activity_weather.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "33698"
}
],
"symlink_target": ""
} |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Management.Automation.Internal;
using System.Management.Automation.Language;
using System.Management.Automation.Runspaces;
using System.Management.Automation.Tracing;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Security;
using System.Text;
using System.Xml;
using Microsoft.Management.Infrastructure;
using Microsoft.Management.Infrastructure.Serialization;
using Microsoft.PowerShell.Commands;
using Dbg = System.Management.Automation.Diagnostics;
using System.Management.Automation.Remoting;
#if CORECLR
// Use stubs for SerializableAttribute and ISerializable related types
using Microsoft.PowerShell.CoreClr.Stubs;
#else
using MailAddress = System.Net.Mail.MailAddress;
#endif
namespace System.Management.Automation
{
[Flags]
internal enum SerializationOptions
{
None = 0,
UseDepthFromTypes = 1,
NoRootElement = 2,
NoNamespace = 4,
NoObjectRefIds = 8,
PreserveSerializationSettingOfOriginal = 16,
RemotingOptions = UseDepthFromTypes | NoRootElement | NoNamespace | PreserveSerializationSettingOfOriginal,
}
internal class SerializationContext
{
private const int DefaultSerializationDepth = 2;
internal SerializationContext()
: this(DefaultSerializationDepth, true)
{
}
internal SerializationContext(int depth, bool useDepthFromTypes)
: this(
depth,
(useDepthFromTypes ? SerializationOptions.UseDepthFromTypes : SerializationOptions.None) |
SerializationOptions.PreserveSerializationSettingOfOriginal,
null)
{
}
internal SerializationContext(int depth, SerializationOptions options, PSRemotingCryptoHelper cryptoHelper)
{
if (depth < 1)
{
throw PSTraceSource.NewArgumentException("writer", Serialization.DepthOfOneRequired);
}
this.depth = depth;
this.options = options;
this.cryptoHelper = cryptoHelper;
}
internal readonly int depth;
internal readonly SerializationOptions options;
internal readonly PSRemotingCryptoHelper cryptoHelper;
internal readonly CimClassSerializationCache<CimClassSerializationId> cimClassSerializationIdCache = new CimClassSerializationCache<CimClassSerializationId>();
}
/// <summary>
/// This class provides public functionality for serializing a PSObject
/// </summary>
public class PSSerializer
{
internal PSSerializer() { }
/// <summary>
/// Serializes an object into PowerShell CliXml
/// </summary>
/// <param name="source">The input object to serialize. Serializes to a default depth of 1</param>
/// <returns>The serialized object, as CliXml</returns>
public static string Serialize(Object source)
{
return Serialize(source, s_mshDefaultSerializationDepth);
}
/// <summary>
/// Serializes an object into PowerShell CliXml
/// </summary>
/// <param name="source">The input object to serialize</param>
/// <param name="depth">The depth of the members to serialize</param>
/// <returns>The serialized object, as CliXml</returns>
public static string Serialize(Object source, int depth)
{
// Create an xml writer
StringBuilder sb = new StringBuilder();
XmlWriterSettings xmlSettings = new XmlWriterSettings();
xmlSettings.CloseOutput = true;
xmlSettings.Encoding = System.Text.Encoding.Unicode;
xmlSettings.Indent = true;
xmlSettings.OmitXmlDeclaration = true;
XmlWriter xw = XmlWriter.Create(sb, xmlSettings);
// Serialize the objects
Serializer serializer = new Serializer(xw, depth, true);
serializer.Serialize(source);
serializer.Done();
serializer = null;
// Return the output
return sb.ToString();
}
/// <summary>
/// Deserializes PowerShell CliXml into an object.
/// </summary>
/// <param name="source">The CliXml the represents the object to deserialize.</param>
/// <returns>An object that represents the serialized content</returns>
public static object Deserialize(string source)
{
Object[] results = DeserializeAsList(source);
// Return the results
if (results.Length == 0)
{
return null;
}
else if (results.Length == 1)
{
return results[0];
}
else
{
return results;
}
}
/// <summary>
/// Deserializes PowerShell CliXml into a list of objects.
/// </summary>
/// <param name="source">The CliXml the represents the object to deserialize.</param>
/// <returns>An object array represents the serialized content</returns>
public static object[] DeserializeAsList(string source)
{
List<object> results = new List<object>();
// Create the text reader to hold the content
TextReader textReader = new StringReader(source);
XmlReader xmlReader = XmlReader.Create(textReader, InternalDeserializer.XmlReaderSettingsForCliXml);
// Deserialize the content
Deserializer deserializer = new Deserializer(xmlReader);
while (!deserializer.Done())
{
object result = deserializer.Deserialize();
results.Add(result);
}
return results.ToArray();
}
/// <summary>
/// Default depth of serialization
/// </summary>
private static int s_mshDefaultSerializationDepth = 1;
}
/// <summary>
/// This class provides functionality for serializing a PSObject
/// </summary>
internal class Serializer
{
#region constructor
private readonly InternalSerializer _serializer;
/// <summary>
/// Creates a Serializer using default serialization context
/// </summary>
/// <param name="writer">writer to be used for serialization</param>
internal Serializer(XmlWriter writer)
: this(writer, new SerializationContext())
{
}
/// <summary>
/// Creates a Serializer using specified serialization context
/// </summary>
/// <param name="writer">writer to be used for serialization</param>
/// <param name="depth">depth of serialization</param>
/// <param name="useDepthFromTypes">
/// if <c>true</c> then types.ps1xml can override depth
/// for a particular types (using SerializationDepth property)
/// </param>
internal Serializer(XmlWriter writer, int depth, bool useDepthFromTypes)
: this(writer, new SerializationContext(depth, useDepthFromTypes))
{
}
/// <summary>
/// Creates a Serializer using specified serialization context
/// </summary>
/// <param name="writer">writer to be used for serialization</param>
/// <param name="context">serialization context</param>
internal Serializer(XmlWriter writer, SerializationContext context)
{
if (writer == null)
{
throw PSTraceSource.NewArgumentException("writer");
}
if (context == null)
{
throw PSTraceSource.NewArgumentException("context");
}
_serializer = new InternalSerializer(writer, context);
_serializer.Start();
}
#endregion constructor
#region public methods / properties
/// <summary>
/// Used by Remoting infrastructure. This TypeTable instance
/// will be used by Serializer if ExecutionContext is not
/// available (to get the ExecutionContext's TypeTable)
/// </summary>
internal TypeTable TypeTable
{
get { return _serializer.TypeTable; }
set { _serializer.TypeTable = value; }
}
/// <summary>
/// Serializes the object
/// </summary>
/// <param name="source">object to be serialized</param>
/// <remarks>
/// Please note that this method shouldn't throw any exceptions.
/// If it throws - please open a bug.
/// </remarks>
internal void Serialize(object source)
{
Serialize(source, null);
}
/// <summary>
/// Serializes passed in object
/// </summary>
/// <param name="source">
/// object to be serialized
/// </param>
/// <param name="streamName">
/// Stream to which this object belong. Ex: Output, Error etc.
/// </param>
/// <remarks>
/// Please note that this method shouldn't throw any exceptions.
/// If it throws - please open a bug.
/// </remarks>
internal void Serialize(object source, string streamName)
{
_serializer.WriteOneTopLevelObject(source, streamName);
}
/// <summary>
/// Write the end of root element
/// </summary>
internal void Done()
{
_serializer.End();
}
internal void Stop()
{
_serializer.Stop();
}
#endregion
}
[Flags]
internal enum DeserializationOptions
{
None = 0,
NoRootElement = 256, // I start at 256 to try not to overlap
NoNamespace = 512, // with SerializationOptions and to catch bugs early
DeserializeScriptBlocks = 1024,
RemotingOptions = NoRootElement | NoNamespace,
}
internal class DeserializationContext
{
internal DeserializationContext()
: this(DeserializationOptions.None, null)
{
}
internal DeserializationContext(DeserializationOptions options, PSRemotingCryptoHelper cryptoHelper)
{
this.options = options;
this.cryptoHelper = cryptoHelper;
}
/// <summary>
/// Limits the total data processed by the deserialization context. Deserialization context
/// is used by PriorityReceivedDataCollection (remoting) to process incoming data from the
/// remote end. A value of Null means that the max memory is unlimited.
/// </summary>
internal Nullable<int> MaximumAllowedMemory { set; get; }
/// <summary>
/// Logs that memory used by deserialized objects is not related to the size of input xml.
/// Used mainly to account for memory usage of cloned TypeNames when calculating memory quota usage.
/// </summary>
/// <param name="amountOfExtraMemory"></param>
internal void LogExtraMemoryUsage(int amountOfExtraMemory)
{
if (amountOfExtraMemory < 0)
{
return;
}
if (MaximumAllowedMemory.HasValue)
{
if (amountOfExtraMemory > (MaximumAllowedMemory.Value - _totalDataProcessedSoFar))
{
string message = StringUtil.Format(Serialization.DeserializationMemoryQuota, ((double)MaximumAllowedMemory.Value) / (1 << 20),
ConfigurationDataFromXML.MAXRCVDOBJSIZETOKEN_CamelCase,
ConfigurationDataFromXML.MAXRCVDCMDSIZETOKEN_CamelCase);
throw new XmlException(message);
}
_totalDataProcessedSoFar = _totalDataProcessedSoFar + amountOfExtraMemory;
}
}
private int _totalDataProcessedSoFar;
internal readonly DeserializationOptions options;
internal readonly PSRemotingCryptoHelper cryptoHelper;
internal static int MaxItemsInCimClassCache = 100;
internal readonly CimClassDeserializationCache<CimClassSerializationId> cimClassSerializationIdCache = new CimClassDeserializationCache<CimClassSerializationId>();
}
internal class CimClassDeserializationCache<TKey>
{
private readonly Dictionary<TKey, CimClass> _cimClassIdToClass = new Dictionary<TKey, CimClass>();
internal void AddCimClassToCache(TKey key, CimClass cimClass)
{
if (_cimClassIdToClass.Count >= DeserializationContext.MaxItemsInCimClassCache)
{
_cimClassIdToClass.Clear();
}
_cimClassIdToClass.Add(key, cimClass);
/* PRINTF DEBUG
Console.WriteLine("Contents of deserialization cache (after a call to AddCimClassToCache ({0})):", key);
Console.WriteLine(" Count = {0}", this._cimClassIdToClass.Count);
foreach (var t in this._cimClassIdToClass.Keys)
{
Console.WriteLine(" {0}", t);
}
*/
}
internal CimClass GetCimClassFromCache(TKey key)
{
CimClass cimClass;
if (_cimClassIdToClass.TryGetValue(key, out cimClass))
{
/* PRINTF DEBUG
Console.WriteLine("GetCimClassFromCache - class found: {0}", key);
*/
return cimClass;
}
/* PRINTF DEBUG
Console.WriteLine("GetCimClassFromCache - class NOT found: {0}", key);
*/
return null;
}
}
internal class CimClassSerializationCache<TKey>
{
private readonly HashSet<TKey> _cimClassesHeldByDeserializer = new HashSet<TKey>(EqualityComparer<TKey>.Default);
internal bool DoesDeserializerAlreadyHaveCimClass(TKey key)
{
return _cimClassesHeldByDeserializer.Contains(key);
}
internal void AddClassToCache(TKey key)
{
Dbg.Assert(!_cimClassesHeldByDeserializer.Contains(key), "This method should not be called for classes already in the cache");
if (_cimClassesHeldByDeserializer.Count >= DeserializationContext.MaxItemsInCimClassCache)
{
_cimClassesHeldByDeserializer.Clear();
}
_cimClassesHeldByDeserializer.Add(key);
/* PRINTF DEBUG
Console.WriteLine("Contents of serialization cache (after adding {0}):", key);
Console.WriteLine(" Count = {0}", this._cimClassesHeldByDeserializer.Count);
foreach (var t in _cimClassesHeldByDeserializer)
{
Console.WriteLine(" {0}", t);
}
*/
}
}
internal class CimClassSerializationId : Tuple<string, string, string, int>
{
public CimClassSerializationId(string className, string namespaceName, string computerName, int hashCode)
: base(className, namespaceName, computerName, hashCode)
{
}
public string ClassName { get { return this.Item1; } }
public string NamespaceName { get { return this.Item2; } }
public string ComputerName { get { return this.Item3; } }
public int ClassHashCode { get { return this.Item4; } }
}
/// <summary>
/// This class provides functionality for deserializing a PSObject
/// </summary>
internal class Deserializer
{
#region constructor
private readonly XmlReader _reader;
private readonly InternalDeserializer _deserializer;
private readonly DeserializationContext _context;
/// <summary>
/// Creates a Deserializer using default deserialization context
/// </summary>
/// <param name="reader">reader to be used for deserialization</param>
/// <exception cref="XmlException">
/// Thrown when the xml is in an incorrect format
/// </exception>
internal Deserializer(XmlReader reader)
: this(reader, new DeserializationContext())
{
}
/// <summary>
/// Creates a Deserializer using specified serialization context
/// </summary>
/// <param name="reader">reader to be used for deserialization</param>
/// <param name="context">serialization context</param>
/// <exception cref="XmlException">
/// Thrown when the xml is in an incorrect format
/// </exception>
internal Deserializer(XmlReader reader, DeserializationContext context)
{
if (reader == null)
{
throw PSTraceSource.NewArgumentNullException("reader");
}
_reader = reader;
_context = context;
_deserializer = new InternalDeserializer(_reader, _context);
try
{
Start();
}
catch (XmlException exception)
{
ReportExceptionForETW(exception);
throw;
}
}
#endregion constructor
#region public method / properties
private static void ReportExceptionForETW(XmlException exception)
{
PSEtwLog.LogAnalyticError(
PSEventId.Serializer_XmlExceptionWhenDeserializing, PSOpcode.Exception, PSTask.Serialization,
PSKeyword.Serializer | PSKeyword.UseAlwaysAnalytic,
exception.LineNumber, exception.LinePosition, exception.ToString());
}
private bool _done = false;
/// <summary>
/// Used by Remoting infrastructure. This TypeTable instance
/// will be used by Deserializer if ExecutionContext is not
/// available (to get the ExecutionContext's TypeTable)
/// </summary>
internal TypeTable TypeTable
{
get { return _deserializer.TypeTable; }
set { _deserializer.TypeTable = value; }
}
/// <summary>
/// Read the root element tag and set the cursor to start tag of
/// first object.
/// </summary>
private void Start()
{
Dbg.Assert(_reader.ReadState == ReadState.Initial, "When deserialization starts we should have XmlReader.ReadState == Initial");
Dbg.Assert(_reader.NodeType == XmlNodeType.None, "When deserialization starts we should have XmlReader.NodeType == None");
_reader.Read();
//If version is not provided, we assume it is the default
string version = InternalSerializer.DefaultVersion;
if (DeserializationOptions.NoRootElement == (_context.options & DeserializationOptions.NoRootElement))
{
_done = _reader.EOF;
}
else
{
// Make sure the reader is positioned on the root ( <Objs> ) element (not on XmlDeclaration for example)
_reader.MoveToContent();
Dbg.Assert(_reader.EOF || (_reader.NodeType == XmlNodeType.Element), "When deserialization starts reading we should have XmlReader.NodeType == Element");
//Read version attribute and validate it.
string versionAttribute = _reader.GetAttribute(SerializationStrings.VersionAttribute);
if (versionAttribute != null)
{
version = versionAttribute;
}
//If the root element tag is empty, there are no objects to read.
if (!_deserializer.ReadStartElementAndHandleEmpty(SerializationStrings.RootElementTag))
{
_done = true;
}
}
_deserializer.ValidateVersion(version);
}
internal bool Done()
{
if (_done == false)
{
if (DeserializationOptions.NoRootElement == (_context.options & DeserializationOptions.NoRootElement))
{
_done = _reader.EOF;
}
else
{
if (_reader.NodeType == XmlNodeType.EndElement)
{
try
{
_reader.ReadEndElement();
}
catch (XmlException exception)
{
ReportExceptionForETW(exception);
throw;
}
_done = true;
}
}
}
return _done;
}
internal void Stop()
{
_deserializer.Stop();
}
/// <summary>
/// Deserializes next object.
/// </summary>
/// <exception cref="XmlException">
/// Thrown when the xml is in an incorrect format
/// </exception>
internal object Deserialize()
{
string ignore;
return Deserialize(out ignore);
}
/// <summary>
/// Deserializes next object.
/// </summary>
/// <param name="streamName">stream the object belongs to (i.e. "Error", "Output", etc.)</param>
/// <exception cref="XmlException">
/// Thrown when the xml is in an incorrect format
/// </exception>
internal object Deserialize(out string streamName)
{
if (Done())
{
throw PSTraceSource.NewInvalidOperationException(Serialization.ReadCalledAfterDone);
}
try
{
return _deserializer.ReadOneObject(out streamName);
}
catch (XmlException exception)
{
ReportExceptionForETW(exception);
throw;
}
}
#endregion public methods
#region Helper methods for dealing with "Deserialized." prefix
/// <summary>
/// Adds "Deserialized." prefix to passed in argument if not already present
/// </summary>
/// <param name="type"></param>
internal static void AddDeserializationPrefix(ref string type)
{
Dbg.Assert(type != null, "caller should validate the parameter");
if (!type.StartsWith(Deserializer.DeserializationTypeNamePrefix, StringComparison.OrdinalIgnoreCase))
{
type = type.Insert(0, Deserializer.DeserializationTypeNamePrefix);
}
}
/// <summary>
/// Checks if an object <paramref name="o"/> is either a live or deserialized instance of class <paramref name="type"/> or one of its subclasses.
/// </summary>
/// <param name="o"></param>
/// <param name="type"></param>
/// <returns><c>true</c> if <paramref name="o"/> is either a live or deserialized instance of class <paramref name="type"/> or one of its subclasses; <c>false</c> otherwise</returns>
internal static bool IsInstanceOfType(object o, Type type)
{
if (type == null)
{
throw PSTraceSource.NewArgumentNullException("type");
}
if (o == null)
{
return false;
}
return type.IsInstanceOfType(PSObject.Base(o)) || IsDeserializedInstanceOfType(o, type);
}
/// <summary>
/// Checks if an object <paramref name="o"/> is a deserialized instance of class <paramref name="type"/> or one of its subclasses.
/// </summary>
/// <param name="o"></param>
/// <param name="type"></param>
/// <returns><c>true</c> if <paramref name="o"/> is a deserialized instance of class <paramref name="type"/> or one of its subclasses; <c>false</c> otherwise</returns>
internal static bool IsDeserializedInstanceOfType(object o, Type type)
{
if (type == null)
{
throw PSTraceSource.NewArgumentNullException("type");
}
if (o == null)
{
return false;
}
PSObject pso = o as PSObject;
if (pso != null)
{
IEnumerable<string> typeNames = pso.InternalTypeNames;
if (typeNames != null)
{
foreach (string typeName in typeNames)
{
if (typeName.Length == Deserializer.DeserializationTypeNamePrefix.Length + type.FullName.Length &&
typeName.StartsWith(Deserializer.DeserializationTypeNamePrefix, StringComparison.OrdinalIgnoreCase) &&
typeName.EndsWith(type.FullName, StringComparison.OrdinalIgnoreCase))
{
return true;
}
}
}
}
// not the right type
return false;
}
internal static string MaskDeserializationPrefix(string typeName)
{
if (typeName == null)
{
return null;
}
if (typeName.StartsWith(Deserializer.DeserializationTypeNamePrefix, StringComparison.OrdinalIgnoreCase))
{
typeName = typeName.Substring(Deserializer.DeserializationTypeNamePrefix.Length);
}
return typeName;
}
/// <summary>
/// Gets a new collection of typenames without "Deserialization." prefix
/// in the typename. This will allow to map type info/format info of the orignal type
/// for deserialized objects.
/// </summary>
/// <param name="typeNames"></param>
/// <returns>
/// Null if no type with "Deserialized." prefix is found.
/// Otherwise <paramref name="typeNames"/> with the prefix removed if any.
/// </returns>
internal static Collection<string> MaskDeserializationPrefix(Collection<string> typeNames)
{
Dbg.Assert(null != typeNames, "typeNames cannot be null");
bool atleastOneDeserializedTypeFound = false;
Collection<string> typesWithoutPrefix = new Collection<string>();
foreach (string type in typeNames)
{
if (type.StartsWith(Deserializer.DeserializationTypeNamePrefix,
StringComparison.OrdinalIgnoreCase))
{
atleastOneDeserializedTypeFound = true;
// remove *only* the prefix
typesWithoutPrefix.Add(type.Substring(Deserializer.DeserializationTypeNamePrefix.Length));
}
else
{
typesWithoutPrefix.Add(type);
}
}
if (atleastOneDeserializedTypeFound)
{
return typesWithoutPrefix;
}
return null;
}
/// <summary>
/// Used to prefix a typename for deserialization.
/// </summary>
private const string DeserializationTypeNamePrefix = "Deserialized.";
#endregion
}
/// <summary>
/// Types of known type container supported by monad
/// </summary>
internal enum ContainerType
{
Dictionary,
Queue,
Stack,
List,
Enumerable,
None
};
/// <summary>
/// This internal helper class provides methods for serializing mshObject.
/// </summary>
internal class InternalSerializer
{
#region constructor
internal const string DefaultVersion = "1.1.0.1";
/// <summary>
/// Xml writer to be used
/// </summary>
private readonly XmlWriter _writer;
/// <summary>
/// Serialization context
/// </summary>
private readonly SerializationContext _context;
/// Used by Remoting infrastructure. This TypeTable instance
/// will be used by Serializer if ExecutionContext is not
/// available (to get the ExecutionContext's TypeTable)
private TypeTable _typeTable;
/// <summary>
/// Depth below top level - used to prevent infinitely deep serialization
/// (without this protection it would be possible i.e. with SerializationDepth and recursion)
/// </summary>
private int _depthBelowTopLevel;
private const int MaxDepthBelowTopLevel = 50;
private readonly ReferenceIdHandlerForSerializer<object> _objectRefIdHandler;
private readonly ReferenceIdHandlerForSerializer<ConsolidatedString> _typeRefIdHandler;
internal InternalSerializer(XmlWriter writer, SerializationContext context)
{
Dbg.Assert(writer != null, "caller should validate the parameter");
Dbg.Assert(context != null, "caller should validate the parameter");
_writer = writer;
_context = context;
IDictionary<object, ulong> objectRefIdDictionary = null;
if ((_context.options & SerializationOptions.NoObjectRefIds) == 0)
{
objectRefIdDictionary = new WeakReferenceDictionary<UInt64>();
}
_objectRefIdHandler = new ReferenceIdHandlerForSerializer<object>(objectRefIdDictionary);
_typeRefIdHandler = new ReferenceIdHandlerForSerializer<ConsolidatedString>(
new Dictionary<ConsolidatedString, UInt64>(ConsolidatedString.EqualityComparer));
}
#endregion
/// <summary>
/// Used by Remoting infrastructure. This TypeTable instance
/// will be used by Serializer if ExecutionContext is not
/// available (to get the ExecutionContext's TypeTable)
/// </summary>
internal TypeTable TypeTable
{
get { return _typeTable; }
set { _typeTable = value; }
}
/// <summary>
/// Writes the start of root element
/// </summary>
internal void Start()
{
if (SerializationOptions.NoRootElement != (_context.options & SerializationOptions.NoRootElement))
{
this.WriteStartElement(SerializationStrings.RootElementTag);
this.WriteAttribute(SerializationStrings.VersionAttribute, InternalSerializer.DefaultVersion);
}
}
/// <summary>
/// Writes the end of root element
/// </summary>
internal void End()
{
if (SerializationOptions.NoRootElement != (_context.options & SerializationOptions.NoRootElement))
{
_writer.WriteEndElement();
}
_writer.Flush();
}
private bool _isStopping = false;
/// <summary>
/// Called from a separate thread will stop the serialization process
/// </summary>
internal void Stop()
{
_isStopping = true;
}
private void CheckIfStopping()
{
if (_isStopping)
{
throw PSTraceSource.NewInvalidOperationException(Serialization.Stopping);
}
}
internal static bool IsPrimitiveKnownType(Type input)
{
//Check if source is of primitive known type
TypeSerializationInfo pktInfo = KnownTypes.GetTypeSerializationInfo(input);
return (pktInfo != null);
}
/// <summary>
/// This writes one object.
/// </summary>
/// <param name="source">
/// source to be serialized.
/// </param>
/// <param name="streamName">
/// Stream to which source belongs
/// </param>
internal void WriteOneTopLevelObject
(
object source,
string streamName
)
{
Dbg.Assert(_depthBelowTopLevel == 0, "InternalSerializer.depthBelowTopLevel should be 0 at top-level");
WriteOneObject(source, streamName, null, _context.depth);
}
private void WriteOneObject
(
object source,
string streamName,
string property,
int depth
)
{
Dbg.Assert(depth >= 0, "depth should always be greater or equal to zero");
this.CheckIfStopping();
if (source == null)
{
WriteNull(streamName, property);
return;
}
try
{
_depthBelowTopLevel++;
Dbg.Assert(_depthBelowTopLevel <= MaxDepthBelowTopLevel, "depthBelowTopLevel should be <= MaxDepthBelowTopLevel");
if (HandleMaxDepth(source, streamName, property))
{
return;
}
depth = GetDepthOfSerialization(source, depth);
if (HandlePrimitiveKnownTypeByConvertingToPSObject(source, streamName, property, depth))
{
return;
}
//Object is not of primitive known type. Check if this has
//already been serialized.
string refId = _objectRefIdHandler.GetRefId(source);
if (refId != null)
{
WritePSObjectReference(streamName, property, refId);
return;
}
if (HandlePrimitiveKnownTypePSObject(source, streamName, property, depth))
{
return;
}
//Note: We do not use containers in depth calculation. i.e even if the
//current depth is zero, we serialize the container. All contained items will
//get serialized with depth zero.
if (HandleKnownContainerTypes(source, streamName, property, depth))
{
return;
}
PSObject mshSource = PSObject.AsPSObject(source);
//If depth is zero, complex type should be serialized as string.
if (depth == 0 || SerializeAsString(mshSource))
{
HandlePSObjectAsString(mshSource, streamName, property, depth);
return;
}
HandleComplexTypePSObject(source, streamName, property, depth);
return;
}
finally
{
_depthBelowTopLevel--;
Dbg.Assert(_depthBelowTopLevel >= 0, "depthBelowTopLevel should be >= 0");
}
}
private bool HandleMaxDepth(object source, string streamName, string property)
{
if (_depthBelowTopLevel == MaxDepthBelowTopLevel)
{
// assert commented out because of clashes with Wei's tests
// Dbg.Assert(false, "We should never reach MaxDepthBelowTopLevel with non-malicious input");
PSEtwLog.LogAnalyticError(PSEventId.Serializer_MaxDepthWhenSerializing, PSOpcode.Exception,
PSTask.Serialization, PSKeyword.Serializer, source.GetType().AssemblyQualifiedName, property, _depthBelowTopLevel);
string content = Serialization.DeserializationTooDeep;
HandlePrimitiveKnownType(content, streamName, property);
return true;
}
else
{
return false;
}
}
/// <summary>
/// Serializes Primitive Known Types.
/// </summary>
/// <returns>
/// true if source is handled, else false.
/// </returns>
private bool HandlePrimitiveKnownType
(
object source,
string streamName,
string property
)
{
Dbg.Assert(source != null, "caller should validate the parameter");
//Check if source is of primitive known type
TypeSerializationInfo pktInfo = KnownTypes.GetTypeSerializationInfo(source.GetType());
if (pktInfo != null)
{
WriteOnePrimitiveKnownType(this, streamName, property, source, pktInfo);
return true;
}
return false;
}
/// <summary>
/// Handles primitive known type by first converting it to a PSObject.In W8, extended
/// property data is stored external to PSObject. By converting to PSObject, we will
/// be able to retrieve and serialize the extended properties. This is tracked by
/// Win8: 414042
/// </summary>
/// <param name="source"></param>
/// <param name="streamName"></param>
/// <param name="property"></param>
/// <param name="depth"></param>
/// <returns></returns>
private bool HandlePrimitiveKnownTypeByConvertingToPSObject(
object source,
string streamName,
string property,
int depth
)
{
Dbg.Assert(source != null, "caller should validate the parameter");
//Check if source is of primitive known type
TypeSerializationInfo pktInfo = KnownTypes.GetTypeSerializationInfo(source.GetType());
if (pktInfo != null)
{
PSObject pktInfoPSObject = PSObject.AsPSObject(source);
return HandlePrimitiveKnownTypePSObject(pktInfoPSObject, streamName, property, depth);
}
return false;
}
/// <summary>
///
/// </summary>
/// <param name="source"></param>
/// <param name="streamName"></param>
/// <param name="property"></param>
/// <returns></returns>
private bool HandleSecureString(object source, string streamName, string property)
{
Dbg.Assert(source != null, "caller should validate the parameter");
SecureString secureString = null;
secureString = source as SecureString;
PSObject moSource;
if (secureString != null)
{
moSource = PSObject.AsPSObject(secureString);
}
else
{
moSource = source as PSObject;
}
if (moSource != null && !moSource.immediateBaseObjectIsEmpty)
{
// check if source is of type secure string
secureString = moSource.ImmediateBaseObject as SecureString;
if (secureString != null)
{
// the principle used in serialization is that serialization
// never throws, and if something can't be serialized nothing
// is written. So we write the elements only if encryption succeeds
try
{
String encryptedString;
if (_context.cryptoHelper != null)
{
encryptedString = _context.cryptoHelper.EncryptSecureString(secureString);
}
else
{
encryptedString = Microsoft.PowerShell.SecureStringHelper.Protect(secureString);
}
if (property != null)
{
WriteStartElement(SerializationStrings.SecureStringTag);
WriteNameAttribute(property);
}
else
{
WriteStartElement(SerializationStrings.SecureStringTag);
}
if (streamName != null)
{
WriteAttribute(SerializationStrings.StreamNameAttribute, streamName);
}
//Note: We do not use WriteRaw for serializing secure string. WriteString
//does necessary escaping which may be needed for certain
//characters.
_writer.WriteString(encryptedString);
_writer.WriteEndElement();
return true;
}
catch (PSCryptoException)
{
// do nothing
}
} // if (source ...
}
return false;
}
/// <summary>
/// Serializes PSObject whose base objects are of primitive known type
/// </summary>
/// <param name="source"></param>
/// <param name="streamName"></param>
/// <param name="property"></param>
/// <param name="depth"></param>
/// <returns>
/// true if source is handled, else false.
/// </returns>
private bool HandlePrimitiveKnownTypePSObject
(
object source,
string streamName,
string property,
int depth
)
{
Dbg.Assert(source != null, "caller should validate the parameter");
bool sourceHandled = false;
PSObject moSource = source as PSObject;
if (moSource != null && !moSource.immediateBaseObjectIsEmpty)
{
//Check if baseObject is primitive known type
object baseObject = moSource.ImmediateBaseObject;
TypeSerializationInfo pktInfo = KnownTypes.GetTypeSerializationInfo(baseObject.GetType());
if (pktInfo != null)
{
WritePrimitiveTypePSObject(moSource, baseObject, pktInfo, streamName, property, depth);
sourceHandled = true;
}
}
return sourceHandled;
}
private bool HandleKnownContainerTypes
(
object source,
string streamName,
string property,
int depth
)
{
Dbg.Assert(source != null, "caller should validate the parameter");
ContainerType ct = ContainerType.None;
PSObject mshSource = source as PSObject;
IEnumerable enumerable = null;
IDictionary dictionary = null;
//If passed in object is PSObject with no baseobject, return false.
if (mshSource != null && mshSource.immediateBaseObjectIsEmpty)
{
return false;
}
//Check if source (or baseobject in mshSource) is known container type
SerializationUtilities.GetKnownContainerTypeInfo(mshSource != null ? mshSource.ImmediateBaseObject : source, out ct,
out dictionary, out enumerable);
if (ct == ContainerType.None)
return false;
string refId = _objectRefIdHandler.SetRefId(source);
WriteStartOfPSObject(
mshSource ?? PSObject.AsPSObject(source),
streamName,
property,
refId,
true, // always write TypeNames information for known container types
null); // never write ToString information for known container types
switch (ct)
{
case ContainerType.Dictionary:
WriteDictionary(dictionary, SerializationStrings.DictionaryTag, depth);
break;
case ContainerType.Stack:
WriteEnumerable(enumerable, SerializationStrings.StackTag, depth);
break;
case ContainerType.Queue:
WriteEnumerable(enumerable, SerializationStrings.QueueTag, depth);
break;
case ContainerType.List:
WriteEnumerable(enumerable, SerializationStrings.ListTag, depth);
break;
case ContainerType.Enumerable:
WriteEnumerable(enumerable, SerializationStrings.CollectionTag, depth);
break;
default:
Dbg.Assert(false, "All containers should be handled in the switch");
break;
}
if (depth != 0)
{
// An object which is original enumerable becomes an PSObject with ArrayList on deserialization.
// So on roundtrip it will show up as List.
// We serialize properties of enumerable and on deserialization mark the object as Deserialized.
// So if object is marked deserialized, we should write properties.
if (ct == ContainerType.Enumerable || (mshSource != null && mshSource.isDeserialized))
{
PSObject sourceAsPSObject = PSObject.AsPSObject(source);
PSMemberInfoInternalCollection<PSPropertyInfo> specificPropertiesToSerialize = SerializationUtilities.GetSpecificPropertiesToSerialize(sourceAsPSObject, AllPropertiesCollection, _typeTable);
WritePSObjectProperties(sourceAsPSObject, depth, specificPropertiesToSerialize);
SerializeExtendedProperties(sourceAsPSObject, depth, specificPropertiesToSerialize);
}
// always serialize instance properties if there are any
else if (mshSource != null)
{
SerializeInstanceProperties(mshSource, depth);
}
}
_writer.WriteEndElement();
return true;
}
#region Write PSObject
/// <summary>
/// Writes PSObject Reference Element
/// </summary>
private void WritePSObjectReference
(
string streamName,
string property,
string refId
)
{
Dbg.Assert(!string.IsNullOrEmpty(refId), "caller should validate the parameter");
WriteStartElement(SerializationStrings.ReferenceTag);
if (streamName != null)
{
WriteAttribute(SerializationStrings.StreamNameAttribute, streamName);
}
if (property != null)
{
WriteNameAttribute(property);
}
WriteAttribute(SerializationStrings.ReferenceIdAttribute, refId);
_writer.WriteEndElement();
}
private static bool PSObjectHasModifiedTypesCollection(PSObject pso)
{
ConsolidatedString currentTypes = pso.InternalTypeNames;
Collection<string> originalTypes = pso.InternalAdapter.BaseGetTypeNameHierarchy(pso.ImmediateBaseObject);
if (currentTypes.Count != originalTypes.Count)
{
return true;
}
IEnumerator<string> currentEnumerator = currentTypes.GetEnumerator();
IEnumerator<string> originalEnumerator = originalTypes.GetEnumerator();
while (currentEnumerator.MoveNext() && originalEnumerator.MoveNext())
{
if (!currentEnumerator.Current.Equals(originalEnumerator.Current, StringComparison.OrdinalIgnoreCase))
{
return true;
}
}
return false;
}
/// <summary>
/// Serializes an PSObject whose baseobject is of primitive type.
/// </summary>
/// <param name="source">
/// source from which notes are written
/// </param>
/// <param name="primitive">
/// primitive object which is written as base object. In most cases it
/// is same source.ImmediateBaseObject. When PSObject is serialized as string,
/// it can be different. <see cref="HandlePSObjectAsString"/> for more info.
/// </param>
/// <param name="pktInfo">
/// TypeSerializationInfo for the primitive.
/// </param>
/// <param name="streamName"></param>
/// <param name="property"></param>
/// <param name="depth"></param>
private void WritePrimitiveTypePSObject
(
PSObject source,
object primitive,
TypeSerializationInfo pktInfo,
string streamName,
string property,
int depth
)
{
Dbg.Assert(source != null, "Caller should validate source != null");
string toStringValue = SerializationUtilities.GetToStringForPrimitiveObject(source);
bool hasModifiedTypesCollection = PSObjectHasModifiedTypesCollection(source);
bool hasNotes = PSObjectHasNotes(source);
bool hasModifiedToString = (toStringValue != null);
if (hasNotes || hasModifiedTypesCollection || hasModifiedToString)
{
WritePrimitiveTypePSObjectWithNotes(
source,
primitive,
hasModifiedTypesCollection,
toStringValue,
pktInfo,
streamName,
property,
depth);
return;
}
else
{
if (primitive != null)
{
WriteOnePrimitiveKnownType(this, streamName, property, primitive, pktInfo);
return;
}
else
{
WriteNull(streamName, property);
return;
}
}
}
/// <summary>
/// Serializes an PSObject whose baseobject is of primitive type
/// and which has notes.
/// </summary>
/// <param name="source">
/// source from which notes are written
/// </param>
/// <param name="primitive">
/// primitive object which is written as base object. In most cases it
/// is same source.ImmediateBaseObject. When PSObject is serialized as string,
/// it can be different. <see cref="HandlePSObjectAsString"/> for more info.
/// </param>
/// <param name="hasModifiedTypesCollection"></param>
/// <param name="toStringValue"></param>
/// <param name="pktInfo">
/// TypeSerializationInfo for the primitive.
/// </param>
/// <param name="streamName"></param>
/// <param name="property"></param>
/// <param name="depth"></param>
private void WritePrimitiveTypePSObjectWithNotes
(
PSObject source,
object primitive,
bool hasModifiedTypesCollection,
string toStringValue,
TypeSerializationInfo pktInfo,
string streamName,
string property,
int depth
)
{
Dbg.Assert(source != null, "caller should validate the parameter");
Dbg.Assert(pktInfo != null, "Caller should validate pktInfo != null");
string refId = _objectRefIdHandler.SetRefId(source);
WriteStartOfPSObject(
source,
streamName,
property,
refId,
hasModifiedTypesCollection, // preserve TypeNames information if different from the primitive type
toStringValue); // preserve ToString information only if got it from deserialization or overridden by PSObject
// (example where preservation of TypeNames and ToString is needed: enums serialized as ints, help string with custom type names (HelpInfoShort))
if (pktInfo != null)
{
WriteOnePrimitiveKnownType(this, streamName, null, primitive, pktInfo);
}
// serialize only instance properties - members from type table are
// always going to be available for known primitive types
SerializeInstanceProperties(source, depth);
_writer.WriteEndElement();
}
private void HandleComplexTypePSObject
(
object source,
string streamName,
string property,
int depth
)
{
Dbg.Assert(source != null, "caller should validate the parameter");
PSObject mshSource = PSObject.AsPSObject(source);
// Figure out what kind of object we are dealing with
bool isErrorRecord = false;
bool isInformationalRecord = false;
bool isEnum = false;
bool isPSObject = false;
bool isCimInstance = false;
if (!mshSource.immediateBaseObjectIsEmpty)
{
do // false loop
{
CimInstance cimInstance = mshSource.ImmediateBaseObject as CimInstance;
if (cimInstance != null)
{
isCimInstance = true;
break;
}
ErrorRecord errorRecord = mshSource.ImmediateBaseObject as ErrorRecord;
if (errorRecord != null)
{
errorRecord.ToPSObjectForRemoting(mshSource);
isErrorRecord = true;
break;
}
InformationalRecord informationalRecord = mshSource.ImmediateBaseObject as InformationalRecord;
if (informationalRecord != null)
{
informationalRecord.ToPSObjectForRemoting(mshSource);
isInformationalRecord = true;
break;
}
isEnum = mshSource.ImmediateBaseObject is Enum;
isPSObject = mshSource.ImmediateBaseObject is PSObject;
} while (false);
}
bool writeToString = true;
if (mshSource.ToStringFromDeserialization == null) // continue to write ToString from deserialized objects, but...
{
if (mshSource.immediateBaseObjectIsEmpty) // ... don't write ToString for property bags
{
writeToString = false;
}
}
string refId = _objectRefIdHandler.SetRefId(source);
WriteStartOfPSObject(
mshSource,
streamName,
property,
refId,
true, // always write TypeNames for complex objects
writeToString ? SerializationUtilities.GetToString(mshSource) : null);
PSMemberInfoInternalCollection<PSPropertyInfo> specificPropertiesToSerialize = SerializationUtilities.GetSpecificPropertiesToSerialize(mshSource, AllPropertiesCollection, _typeTable);
if (isEnum)
{
object baseObject = mshSource.ImmediateBaseObject;
WriteOneObject(System.Convert.ChangeType(baseObject, Enum.GetUnderlyingType(baseObject.GetType()), System.Globalization.CultureInfo.InvariantCulture), null, null, depth);
}
else if (isPSObject)
{
WriteOneObject(mshSource.ImmediateBaseObject, null, null, depth);
}
else if (isErrorRecord || isInformationalRecord)
{
// nothing to do
}
else
{
WritePSObjectProperties(mshSource, depth, specificPropertiesToSerialize);
}
if (isCimInstance)
{
CimInstance cimInstance = mshSource.ImmediateBaseObject as CimInstance;
PrepareCimInstanceForSerialization(mshSource, cimInstance);
}
SerializeExtendedProperties(mshSource, depth, specificPropertiesToSerialize);
_writer.WriteEndElement();
}
private static Lazy<CimSerializer> s_cimSerializer = new Lazy<CimSerializer>(CimSerializer.Create);
private void PrepareCimInstanceForSerialization(PSObject psObject, CimInstance cimInstance)
{
Queue<CimClassSerializationId> serializedClasses = new Queue<CimClassSerializationId>();
//
// CREATE SERIALIZED FORM OF THE CLASS METADATA
//
ArrayList psoClasses = new ArrayList();
for (CimClass cimClass = cimInstance.CimClass; cimClass != null; cimClass = cimClass.CimSuperClass)
{
PSObject psoClass = new PSObject();
psoClass.TypeNames.Clear();
psoClasses.Add(psoClass);
psoClass.Properties.Add(new PSNoteProperty(InternalDeserializer.CimClassNameProperty, cimClass.CimSystemProperties.ClassName));
psoClass.Properties.Add(new PSNoteProperty(InternalDeserializer.CimNamespaceProperty, cimClass.CimSystemProperties.Namespace));
psoClass.Properties.Add(new PSNoteProperty(InternalDeserializer.CimServerNameProperty, cimClass.CimSystemProperties.ServerName));
psoClass.Properties.Add(new PSNoteProperty(InternalDeserializer.CimHashCodeProperty, cimClass.GetHashCode()));
CimClassSerializationId cimClassSerializationId = new CimClassSerializationId(
cimClass.CimSystemProperties.ClassName,
cimClass.CimSystemProperties.Namespace,
cimClass.CimSystemProperties.ServerName,
cimClass.GetHashCode());
if (_context.cimClassSerializationIdCache.DoesDeserializerAlreadyHaveCimClass(cimClassSerializationId))
{
break;
}
serializedClasses.Enqueue(cimClassSerializationId);
byte[] miXmlBytes = s_cimSerializer.Value.Serialize(cimClass, ClassSerializationOptions.None);
string miXmlString = Encoding.Unicode.GetString(miXmlBytes, 0, miXmlBytes.Length);
psoClass.Properties.Add(new PSNoteProperty(InternalDeserializer.CimMiXmlProperty, miXmlString));
}
psoClasses.Reverse();
//
// UPDATE CLASSDECL CACHE
//
foreach (CimClassSerializationId serializedClassId in serializedClasses)
{
_context.cimClassSerializationIdCache.AddClassToCache(serializedClassId);
}
//
// ATTACH CLASS METADATA TO THE OBJECT BEING SERIALIZED
//
PSPropertyInfo classMetadataProperty = psObject.Properties[InternalDeserializer.CimClassMetadataProperty];
if (classMetadataProperty != null)
{
classMetadataProperty.Value = psoClasses;
}
else
{
PSNoteProperty classMetadataNote = new PSNoteProperty(
InternalDeserializer.CimClassMetadataProperty,
psoClasses);
classMetadataNote.IsHidden = true;
psObject.Properties.Add(classMetadataNote);
}
// ATTACH INSTANCE METADATA TO THE OBJECT BEING SERIALIZED
List<string> namesOfModifiedProperties = cimInstance
.CimInstanceProperties
.Where(p => p.IsValueModified)
.Select(p => p.Name)
.ToList();
if (namesOfModifiedProperties.Count != 0)
{
PSObject instanceMetadata = new PSObject();
PSPropertyInfo instanceMetadataProperty = psObject.Properties[InternalDeserializer.CimInstanceMetadataProperty];
if (instanceMetadataProperty != null)
{
instanceMetadataProperty.Value = instanceMetadata;
}
else
{
PSNoteProperty instanceMetadataNote = new PSNoteProperty(InternalDeserializer.CimInstanceMetadataProperty, instanceMetadata);
instanceMetadataNote.IsHidden = true;
psObject.Properties.Add(instanceMetadataNote);
}
instanceMetadata.InternalTypeNames = ConsolidatedString.Empty;
instanceMetadata.Properties.Add(
new PSNoteProperty(
InternalDeserializer.CimModifiedProperties,
string.Join(" ", namesOfModifiedProperties)));
}
}
/// <summary>
/// Writes start element, attributes and typeNames for PSObject.
/// </summary>
/// <param name="mshObject"></param>
/// <param name="streamName"></param>
/// <param name="property"></param>
/// <param name="refId"></param>
/// <param name="writeTypeNames">if true, TypeName information is written, else not.</param>
/// <param name="toStringValue">if not null then ToString information is written</param>
private void WriteStartOfPSObject
(
PSObject mshObject,
string streamName,
string property,
string refId,
bool writeTypeNames,
string toStringValue
)
{
Dbg.Assert(mshObject != null, "caller should validate the parameter");
//Write PSObject start element.
WriteStartElement(SerializationStrings.PSObjectTag);
if (streamName != null)
{
WriteAttribute(SerializationStrings.StreamNameAttribute, streamName);
}
if (property != null)
{
WriteNameAttribute(property);
}
if (refId != null)
{
WriteAttribute(SerializationStrings.ReferenceIdAttribute, refId);
}
if (writeTypeNames)
{
//Write TypeNames
ConsolidatedString typeNames = mshObject.InternalTypeNames;
if (typeNames.Count > 0)
{
string typeNameHierarchyReferenceId = _typeRefIdHandler.GetRefId(typeNames);
if (typeNameHierarchyReferenceId == null)
{
WriteStartElement(SerializationStrings.TypeNamesTag);
//Create a new refId and write it as attribute
string tnRefId = _typeRefIdHandler.SetRefId(typeNames);
Dbg.Assert(tnRefId != null, "SetRefId should always succeed for strings");
WriteAttribute(SerializationStrings.ReferenceIdAttribute, tnRefId);
foreach (string type in typeNames)
{
WriteEncodedElementString(SerializationStrings.TypeNamesItemTag, type);
}
_writer.WriteEndElement();
}
else
{
WriteStartElement(SerializationStrings.TypeNamesReferenceTag);
WriteAttribute(SerializationStrings.ReferenceIdAttribute, typeNameHierarchyReferenceId);
_writer.WriteEndElement();
}
}
}
if (toStringValue != null)
{
WriteEncodedElementString(SerializationStrings.ToStringElementTag, toStringValue);
}
}
#region membersets
/// <summary>
/// Returns true if PSObject has notes.
/// </summary>
/// <param name="source"></param>
/// <returns>
/// </returns>
private static bool PSObjectHasNotes(PSObject source)
{
Dbg.Assert(source != null, "Caller should validate the parameter");
if (source.InstanceMembers != null && source.InstanceMembers.Count > 0)
{
return true;
}
return false;
}
private bool? _canUseDefaultRunspaceInThreadSafeManner;
private bool CanUseDefaultRunspaceInThreadSafeManner
{
get
{
if (!_canUseDefaultRunspaceInThreadSafeManner.HasValue)
{
_canUseDefaultRunspaceInThreadSafeManner = Runspace.CanUseDefaultRunspace;
}
return _canUseDefaultRunspaceInThreadSafeManner.Value;
}
}
/// <summary>
/// Serialize member set. This method serializes without writing
/// enclosing tags and attributes.
/// </summary>
/// <param name="me">
/// enumerable containing members
/// </param>
/// <param name="depth"></param>
/// <param name="writeEnclosingMemberSetElementTag">
/// if this is true, write an enclosing "<memberset></memberset>" tag.
/// </param>
/// <returns></returns>
private void WriteMemberInfoCollection
(
IEnumerable<PSMemberInfo> me,
int depth,
bool writeEnclosingMemberSetElementTag
)
{
Dbg.Assert(me != null, "caller should validate the parameter");
bool enclosingTagWritten = false;
foreach (PSMemberInfo info in me)
{
if (!info.ShouldSerialize)
{
continue;
}
int depthOfMember = info.IsInstance ? depth : depth - 1;
if (info.MemberType == (info.MemberType & PSMemberTypes.Properties))
{
bool gotValue;
object value = SerializationUtilities.GetPropertyValueInThreadSafeManner((PSPropertyInfo)info, this.CanUseDefaultRunspaceInThreadSafeManner, out gotValue);
if (gotValue)
{
if (writeEnclosingMemberSetElementTag && !enclosingTagWritten)
{
enclosingTagWritten = true;
WriteStartElement(SerializationStrings.MemberSet);
}
WriteOneObject(value, null, info.Name, depthOfMember);
}
}
else if (info.MemberType == PSMemberTypes.MemberSet)
{
if (writeEnclosingMemberSetElementTag && !enclosingTagWritten)
{
enclosingTagWritten = true;
WriteStartElement(SerializationStrings.MemberSet);
}
WriteMemberSet((PSMemberSet)info, depthOfMember);
}
}
if (enclosingTagWritten)
{
_writer.WriteEndElement();
}
}
/// <summary>
/// Serializes MemberSet.
/// </summary>
private void WriteMemberSet
(
PSMemberSet set,
int depth
)
{
Dbg.Assert(set != null, "Caller should validate the parameter");
if (!set.ShouldSerialize)
{
return;
}
WriteStartElement(SerializationStrings.MemberSet);
WriteNameAttribute(set.Name);
WriteMemberInfoCollection(set.Members, depth, false);
_writer.WriteEndElement();
}
#endregion membersets
#region properties
/// <summary>
/// Serializes properties of PSObject
/// </summary>
private void WritePSObjectProperties
(
PSObject source,
int depth,
IEnumerable<PSPropertyInfo> specificPropertiesToSerialize
)
{
Dbg.Assert(source != null, "caller should validate the information");
//Depth available for each property is one less
--depth;
Dbg.Assert(depth >= 0, "depth should be greater or equal to zero");
if (specificPropertiesToSerialize != null)
{
SerializeProperties(specificPropertiesToSerialize, SerializationStrings.AdapterProperties, depth);
}
else
{
if (source.ShouldSerializeAdapter())
{
IEnumerable<PSPropertyInfo> adapterCollection = null;
adapterCollection = source.GetAdaptedProperties();
if (adapterCollection != null)
{
SerializeProperties(adapterCollection, SerializationStrings.AdapterProperties, depth);
}
}
}
}
private void SerializeInstanceProperties
(
PSObject source,
int depth
)
{
//Serialize instanceMembers
Dbg.Assert(source != null, "caller should validate the information");
PSMemberInfoCollection<PSMemberInfo> instanceMembers = source.InstanceMembers;
if (instanceMembers != null)
{
WriteMemberInfoCollection(instanceMembers, depth, true);
}
}
private Collection<CollectionEntry<PSMemberInfo>> _extendedMembersCollection;
private Collection<CollectionEntry<PSMemberInfo>> ExtendedMembersCollection
{
get {
return _extendedMembersCollection ??
(_extendedMembersCollection =
PSObject.GetMemberCollection(PSMemberViewTypes.Extended, _typeTable));
}
}
private Collection<CollectionEntry<PSPropertyInfo>> _allPropertiesCollection;
private Collection<CollectionEntry<PSPropertyInfo>> AllPropertiesCollection
{
get {
return _allPropertiesCollection ??
(_allPropertiesCollection = PSObject.GetPropertyCollection(PSMemberViewTypes.All, _typeTable));
}
}
private void SerializeExtendedProperties
(
PSObject source,
int depth,
IEnumerable<PSPropertyInfo> specificPropertiesToSerialize
)
{
Dbg.Assert(source != null, "caller should validate the information");
IEnumerable<PSMemberInfo> extendedMembersEnumerable = null;
if (specificPropertiesToSerialize == null)
{
// Get only extended members including hidden members from the psobect source.
PSMemberInfoIntegratingCollection<PSMemberInfo> membersToSearch =
new PSMemberInfoIntegratingCollection<PSMemberInfo>(source, ExtendedMembersCollection);
extendedMembersEnumerable = membersToSearch.Match(
"*",
PSMemberTypes.Properties | PSMemberTypes.PropertySet | PSMemberTypes.MemberSet,
MshMemberMatchOptions.IncludeHidden | MshMemberMatchOptions.OnlySerializable);
}
else
{
List<PSMemberInfo> extendedMembersList = new List<PSMemberInfo>(source.InstanceMembers);
extendedMembersEnumerable = extendedMembersList;
foreach (PSMemberInfo member in specificPropertiesToSerialize)
{
if (member.IsInstance)
{
continue;
}
if (member is PSProperty)
{
continue;
}
extendedMembersList.Add(member);
}
}
if (extendedMembersEnumerable != null)
{
WriteMemberInfoCollection(extendedMembersEnumerable, depth, true);
}
}
/// <summary>
/// Serializes properties from collection
/// </summary>
/// <param name="propertyCollection">
/// Collection of properties to serialize
/// </param>
/// <param name="name">
/// Name for enclosing element tag
/// </param>
/// <param name="depth">
/// depth to which each property should be
/// serialized
/// </param>
private void SerializeProperties
(
IEnumerable<PSPropertyInfo> propertyCollection,
string name,
int depth
)
{
Dbg.Assert(propertyCollection != null, "caller should validate the parameter");
bool startElementWritten = false;
foreach (PSMemberInfo info in propertyCollection)
{
PSProperty prop = info as PSProperty;
if (prop == null)
{
continue;
}
if (!startElementWritten)
{
WriteStartElement(name);
startElementWritten = true;
}
bool success;
object value = SerializationUtilities.GetPropertyValueInThreadSafeManner(prop, this.CanUseDefaultRunspaceInThreadSafeManner, out success);
if (success)
{
WriteOneObject(value, null, prop.Name, depth);
}
}
if (startElementWritten)
{
_writer.WriteEndElement();
}
}
#endregion base properties
#endregion WritePSObject
#region enumerable and dictionary
/// <summary>
/// Serializes IEnumerable
/// </summary>
/// <param name="enumerable">
/// enumerable which is serialized
/// </param>
/// <param name="tag">
/// </param>
/// <param name="depth"></param>
private void WriteEnumerable
(
IEnumerable enumerable,
string tag,
int depth
)
{
Dbg.Assert(enumerable != null, "caller should validate the parameter");
Dbg.Assert(!string.IsNullOrEmpty(tag), "caller should validate the parameter");
//Start element
WriteStartElement(tag);
IEnumerator enumerator = null;
try
{
enumerator = enumerable.GetEnumerator();
try
{
enumerator.Reset();
}
catch (System.NotSupportedException)
{
//ignore exceptions thrown when the enumerator doesn't support Reset() method as in win8:948569
}
}
catch (Exception exception)
{
// Catch-all OK. This is a third-party call-out.
CommandProcessorBase.CheckForSevereException(exception);
PSEtwLog.LogAnalyticWarning(
PSEventId.Serializer_EnumerationFailed, PSOpcode.Exception, PSTask.Serialization,
PSKeyword.Serializer | PSKeyword.UseAlwaysAnalytic,
enumerable.GetType().AssemblyQualifiedName,
exception.ToString());
enumerator = null;
}
//AD has incorrect implementation of IEnumerable where they returned null
//for GetEnumerator instead of empty enumerator
if (enumerator != null)
{
while (true)
{
object item = null;
try
{
if (!enumerator.MoveNext())
{
break;
}
else
{
item = enumerator.Current;
}
}
catch (Exception exception)
{
// Catch-all OK. This is a third-party call-out.
CommandProcessorBase.CheckForSevereException(exception);
PSEtwLog.LogAnalyticWarning(
PSEventId.Serializer_EnumerationFailed, PSOpcode.Exception, PSTask.Serialization,
PSKeyword.Serializer | PSKeyword.UseAlwaysAnalytic,
enumerable.GetType().AssemblyQualifiedName,
exception.ToString());
break;
}
WriteOneObject(item, null, null, depth);
}
}
//End element
_writer.WriteEndElement();
}
/// <summary>
/// Serializes IDictionary
/// </summary>
/// <param name="dictionary">dictionary which is serialized</param>
/// <param name="tag"></param>
/// <param name="depth"></param>
private void WriteDictionary
(
IDictionary dictionary,
string tag,
int depth
)
{
Dbg.Assert(dictionary != null, "caller should validate the parameter");
Dbg.Assert(!string.IsNullOrEmpty(tag), "caller should validate the parameter");
//Start element
WriteStartElement(tag);
IDictionaryEnumerator dictionaryEnum = null;
try
{
dictionaryEnum = dictionary.GetEnumerator();
}
catch (Exception exception) // ignore non-severe exceptions
{
// Catch-all OK. This is a third-party call-out.
CommandProcessorBase.CheckForSevereException(exception);
PSEtwLog.LogAnalyticWarning(
PSEventId.Serializer_EnumerationFailed, PSOpcode.Exception, PSTask.Serialization,
PSKeyword.Serializer | PSKeyword.UseAlwaysAnalytic,
dictionary.GetType().AssemblyQualifiedName,
exception.ToString());
}
if (dictionaryEnum != null)
{
while (true)
{
object key = null;
object value = null;
try
{
if (!dictionaryEnum.MoveNext())
{
break;
}
else
{
key = dictionaryEnum.Key;
value = dictionaryEnum.Value;
}
}
catch (Exception exception)
{
// Catch-all OK. This is a third-party call-out.
CommandProcessorBase.CheckForSevereException(exception);
PSEtwLog.LogAnalyticWarning(
PSEventId.Serializer_EnumerationFailed, PSOpcode.Exception, PSTask.Serialization,
PSKeyword.Serializer | PSKeyword.UseAlwaysAnalytic,
dictionary.GetType().AssemblyQualifiedName,
exception.ToString());
break;
}
Dbg.Assert(key != null, "Dictionary keys should never be null");
if (key == null) break;
WriteStartElement(SerializationStrings.DictionaryEntryTag);
WriteOneObject(key, null, SerializationStrings.DictionaryKey, depth);
WriteOneObject(value, null, SerializationStrings.DictionaryValue, depth);
_writer.WriteEndElement();
}
}
//End element
_writer.WriteEndElement();
}
#endregion enumerable and dictionary
#region serialize as string
private void HandlePSObjectAsString(
PSObject source,
string streamName,
string property,
int depth)
{
Dbg.Assert(source != null, "caller should validate the information");
string value = GetSerializationString(source);
TypeSerializationInfo pktInfo = null;
if (value != null)
{
pktInfo = KnownTypes.GetTypeSerializationInfo(value.GetType());
}
WritePrimitiveTypePSObject(source, value, pktInfo, streamName, property, depth);
}
/// <summary>
/// Gets the string from PSObject using the information from
/// types.ps1xml.
/// This string is used for serializing the PSObject at depth 0
/// or when pso.SerializationMethod == SerializationMethod.String.
/// </summary>
///
/// <param name="source">
/// PSObject to be converted to string
/// </param>
///
/// <returns>
/// string value to use for serializing this PSObject.
/// </returns>
private string GetSerializationString(PSObject source)
{
Dbg.Assert(source != null, "caller should have validated the information");
PSPropertyInfo serializationProperty = null;
try
{
serializationProperty = source.GetStringSerializationSource(_typeTable);
}
catch (ExtendedTypeSystemException e)
{
PSEtwLog.LogAnalyticWarning(
PSEventId.Serializer_ToStringFailed, PSOpcode.Exception, PSTask.Serialization,
PSKeyword.Serializer | PSKeyword.UseAlwaysAnalytic,
source.GetType().AssemblyQualifiedName,
e.InnerException != null ? e.InnerException.ToString() : e.ToString());
}
string result = null;
if (serializationProperty != null)
{
bool success;
object val = SerializationUtilities.GetPropertyValueInThreadSafeManner(serializationProperty, this.CanUseDefaultRunspaceInThreadSafeManner, out success);
if (success && (val != null))
{
result = SerializationUtilities.GetToString(val);
}
}
else
{
result = SerializationUtilities.GetToString(source);
}
return result;
}
/// <summary>
/// Reads the information the PSObject
/// and returns true if this object should be serialized as
/// string
/// </summary>
/// <param name="source">PSObject to be serialized</param>
/// <returns>true if the object needs to be serialized as a string</returns>
private bool SerializeAsString(PSObject source)
{
SerializationMethod method = source.GetSerializationMethod(_typeTable);
if (method == SerializationMethod.String)
{
PSEtwLog.LogAnalyticVerbose(
PSEventId.Serializer_ModeOverride, PSOpcode.SerializationSettings, PSTask.Serialization,
PSKeyword.Serializer | PSKeyword.UseAlwaysAnalytic,
source.InternalTypeNames.Key,
(UInt32)(SerializationMethod.String));
return true;
}
else
{
return false;
}
}
#endregion serialize as string
/// <summary>
/// compute the serialization depth for an PSObject instance subtree
/// </summary>
/// <param name="source">PSObject whose serialization depth has to be computed</param>
/// <param name="depth">current depth</param>
/// <returns></returns>
private int GetDepthOfSerialization(object source, int depth)
{
Dbg.Assert(source != null, "Caller should verify source != null");
PSObject pso = PSObject.AsPSObject(source);
if (pso == null)
{
return depth;
}
if (pso.BaseObject is CimInstance)
{
return 1;
}
if (pso.BaseObject is PSCredential)
{
return 1;
}
if (pso.BaseObject is PSSenderInfo)
{
return 4;
}
if (pso.BaseObject is SwitchParameter)
{
return 1;
}
if (0 != (_context.options & SerializationOptions.UseDepthFromTypes))
{
// get the depth from the PSObject
// NOTE: we assume that the depth out of the PSObject is > 0
// else we consider it not set in types.ps1xml
int typesPs1xmlDepth = pso.GetSerializationDepth(_typeTable);
if (typesPs1xmlDepth > 0)
{
if (typesPs1xmlDepth != depth)
{
PSEtwLog.LogAnalyticVerbose(
PSEventId.Serializer_DepthOverride, PSOpcode.SerializationSettings, PSTask.Serialization,
PSKeyword.Serializer | PSKeyword.UseAlwaysAnalytic,
pso.InternalTypeNames.Key, depth, typesPs1xmlDepth, _depthBelowTopLevel);
return typesPs1xmlDepth;
}
}
}
if (0 != (_context.options & SerializationOptions.PreserveSerializationSettingOfOriginal))
{
if ((pso.isDeserialized) && (depth <= 0))
{
return 1;
}
}
return depth;
}
/// <summary>
/// Writes null
/// </summary>
/// <param name="streamName"></param>
/// <param name="property"></param>
private void WriteNull(string streamName, string property)
{
WriteStartElement(SerializationStrings.NilTag);
if (streamName != null)
{
WriteAttribute(SerializationStrings.StreamNameAttribute, streamName);
}
if (property != null)
{
WriteNameAttribute(property);
}
_writer.WriteEndElement();
}
#region known type serialization
/// <summary>
/// Writes raw string as item or property in Monad namespace
/// </summary>
/// <param name="serializer">The serializer to which the object is serialized.</param>
/// <param name="streamName">name of the stream to write. Do not write if null.</param>
/// <param name="property">name of property. Pass null for item</param>
/// <param name="raw">string to write</param>
/// <param name="entry">serialization information</param>
private static void WriteRawString
(
InternalSerializer serializer,
string streamName,
string property,
string raw,
TypeSerializationInfo entry
)
{
Dbg.Assert(serializer != null, "caller should have validated the information");
Dbg.Assert(raw != null, "caller should have validated the information");
Dbg.Assert(entry != null, "caller should have validated the information");
if (property != null)
{
serializer.WriteStartElement(entry.PropertyTag);
serializer.WriteNameAttribute(property);
}
else
{
serializer.WriteStartElement(entry.ItemTag);
}
if (streamName != null)
{
serializer.WriteAttribute(SerializationStrings.StreamNameAttribute, streamName);
}
serializer._writer.WriteRaw(raw);
serializer._writer.WriteEndElement();
}
/// <summary>
/// Writes an item or property in Monad namespace
/// </summary>
/// <param name="serializer">The serializer to which the object is serialized.</param>
/// <param name="streamName"></param>
/// <param name="property">name of property. Pass null for item</param>
/// <param name="source">object to be written</param>
/// <param name="entry">serialization information about source</param>
private static void WriteOnePrimitiveKnownType
(
InternalSerializer serializer,
string streamName,
string property,
object source,
TypeSerializationInfo entry
)
{
Dbg.Assert(serializer != null, "caller should have validated the information");
Dbg.Assert(source != null, "caller should have validated the information");
Dbg.Assert(entry != null, "caller should have validated the information");
if (entry.Serializer == null)
{
// we are not using GetToString, because we assume that
// ToString() for primitive types never throws
string value = Convert.ToString(source, CultureInfo.InvariantCulture);
Dbg.Assert(value != null, "ToString shouldn't return null for primitive types");
WriteRawString(serializer, streamName, property, value, entry);
}
else
{
entry.Serializer(serializer, streamName, property, source, entry);
}
}
/// <summary>
/// Writes DateTime as item or property
/// </summary>
/// <param name="serializer">The serializer to which the object is serialized.</param>
/// <param name="streamName"></param>
/// <param name="property">name of property. pass null for item</param>
/// <param name="source">DateTime to write</param>
/// <param name="entry">serialization information about source</param>
internal static void WriteDateTime(InternalSerializer serializer, string streamName, string property, object source, TypeSerializationInfo entry)
{
Dbg.Assert(serializer != null, "caller should have validated the information");
Dbg.Assert(source != null, "caller should have validated the information");
Dbg.Assert(entry != null, "caller should have validated the information");
WriteRawString(serializer, streamName, property, XmlConvert.ToString((DateTime)source, XmlDateTimeSerializationMode.RoundtripKind), entry);
}
/// <summary>
/// Writes Version
/// </summary>
/// <param name="serializer">The serializer to which the object is serialized.</param>
/// <param name="streamName"></param>
/// <param name="property">name of property. pass null for item</param>
/// <param name="source">Version to write</param>
/// <param name="entry">serialization information about source</param>
internal static void WriteVersion(InternalSerializer serializer, string streamName, string property, object source, TypeSerializationInfo entry)
{
Dbg.Assert(serializer != null, "caller should have validated the information");
Dbg.Assert(source != null, "caller should have validated the information");
Dbg.Assert(source is Version, "Caller should verify that typeof(source) is Version");
Dbg.Assert(entry != null, "caller should have validated the information");
WriteRawString(serializer, streamName, property, Convert.ToString(source, CultureInfo.InvariantCulture), entry);
}
/// <summary>
/// Writes SemanticVersion
/// </summary>
/// <param name="serializer">The serializer to which the object is serialized.</param>
/// <param name="streamName"></param>
/// <param name="property">name of property. pass null for item</param>
/// <param name="source">Version to write</param>
/// <param name="entry">serialization information about source</param>
internal static void WriteSemanticVersion(InternalSerializer serializer, string streamName, string property, object source, TypeSerializationInfo entry)
{
Dbg.Assert(serializer != null, "caller should have validated the information");
Dbg.Assert(source != null, "caller should have validated the information");
Dbg.Assert(source is SemanticVersion, "Caller should verify that typeof(source) is Version");
Dbg.Assert(entry != null, "caller should have validated the information");
WriteRawString(serializer, streamName, property, Convert.ToString(source, CultureInfo.InvariantCulture), entry);
}
/// <summary>
/// Serialize scriptblock as item or property
/// </summary>
/// <param name="serializer">The serializer to which the object is serialized.</param>
/// <param name="streamName"></param>
/// <param name="property">name of property. pass null for item</param>
/// <param name="source">scriptblock to write</param>
/// <param name="entry">serialization information about source</param>
internal static void WriteScriptBlock(InternalSerializer serializer, string streamName, string property, object source, TypeSerializationInfo entry)
{
Dbg.Assert(serializer != null, "caller should have validated the information");
Dbg.Assert(source != null, "caller should have validated the information");
Dbg.Assert(source is ScriptBlock, "Caller should verify that typeof(source) is ScriptBlock");
Dbg.Assert(entry != null, "caller should have validated the information");
WriteEncodedString(serializer, streamName, property, Convert.ToString(source, CultureInfo.InvariantCulture), entry);
}
/// <summary>
/// Serialize URI as item or property
/// </summary>
/// <param name="serializer">The serializer to which the object is serialized.</param>
/// <param name="streamName"></param>
/// <param name="property">name of property. pass null for item</param>
/// <param name="source">URI to write</param>
/// <param name="entry">serialization information about source</param>
internal static void WriteUri(InternalSerializer serializer, string streamName, string property, object source, TypeSerializationInfo entry)
{
Dbg.Assert(serializer != null, "caller should have validated the information");
Dbg.Assert(source != null, "caller should have validated the information");
Dbg.Assert(source is Uri, "Caller should verify that typeof(source) is Uri");
Dbg.Assert(entry != null, "caller should have validated the information");
WriteEncodedString(serializer, streamName, property, Convert.ToString(source, CultureInfo.InvariantCulture), entry);
}
/// <summary>
/// Serialize string as item or property
/// </summary>
/// <param name="serializer">The serializer to which the object is serialized.</param>
/// <param name="streamName"></param>
/// <param name="property">name of property. pass null for item</param>
/// <param name="source">string to write</param>
/// <param name="entry">serialization information about source</param>
internal static void WriteEncodedString(InternalSerializer serializer, string streamName, string property, object source, TypeSerializationInfo entry)
{
Dbg.Assert(serializer != null, "caller should have validated the information");
Dbg.Assert(source != null, "caller should have validated the information");
Dbg.Assert(source is string, "caller should have validated the information");
Dbg.Assert(entry != null, "caller should have validated the information");
if (property != null)
{
serializer.WriteStartElement(entry.PropertyTag);
serializer.WriteNameAttribute(property);
}
else
{
serializer.WriteStartElement(entry.ItemTag);
}
if (streamName != null)
{
serializer.WriteAttribute(SerializationStrings.StreamNameAttribute, streamName);
}
//Note: We do not use WriteRaw for serializing string. WriteString
//does necessary escaping which may be needed for certain
//characters.
Dbg.Assert(source is string, "Caller should verify that typeof(source) is String");
string s = (string)source;
string encoded = EncodeString(s);
serializer._writer.WriteString(encoded);
serializer._writer.WriteEndElement();
}
/// <summary>
/// Writes Double as item or property
/// </summary>
/// <param name="serializer">The serializer to which the object is serialized.</param>
/// <param name="streamName"></param>
/// <param name="property">name of property. pass null for item</param>
/// <param name="source">Double to write</param>
/// <param name="entry">serialization information about source</param>
internal static void WriteDouble(InternalSerializer serializer, string streamName, string property, object source, TypeSerializationInfo entry)
{
Dbg.Assert(serializer != null, "caller should have validated the information");
Dbg.Assert(source != null, "caller should have validated the information");
Dbg.Assert(entry != null, "caller should have validated the information");
WriteRawString(serializer, streamName, property, XmlConvert.ToString((Double)source), entry);
}
/// <summary>
/// Writes Char as item or property
/// </summary>
/// <param name="serializer">The serializer to which the object is serialized.</param>
/// <param name="streamName"></param>
/// <param name="property">name of property. pass null for item</param>
/// <param name="source">Char to write</param>
/// <param name="entry">serialization information about source</param>
internal static void WriteChar(InternalSerializer serializer, string streamName, string property, object source, TypeSerializationInfo entry)
{
Dbg.Assert(serializer != null, "caller should have validated the information");
Dbg.Assert(source != null, "caller should have validated the information");
Dbg.Assert(entry != null, "caller should have validated the information");
//Char is defined as unsigned short in schema
WriteRawString(serializer, streamName, property, XmlConvert.ToString((UInt16)(Char)source), entry);
}
/// <summary>
/// Writes Boolean as item or property
/// </summary>
/// <param name="serializer">The serializer to which the object is serialized.</param>
/// <param name="streamName"></param>
/// <param name="property">name of property. pass null for item</param>
/// <param name="source">Boolean to write</param>
/// <param name="entry">serialization information about source</param>
internal static void WriteBoolean(InternalSerializer serializer, string streamName, string property, object source, TypeSerializationInfo entry)
{
Dbg.Assert(serializer != null, "caller should have validated the information");
Dbg.Assert(source != null, "caller should have validated the information");
Dbg.Assert(entry != null, "caller should have validated the information");
WriteRawString(serializer, streamName, property, XmlConvert.ToString((Boolean)source), entry);
}
/// <summary>
/// Writes Single as item or property
/// </summary>
/// <param name="serializer">The serializer to which the object is serialized.</param>
/// <param name="streamName"></param>
/// <param name="property">name of property. pass null for item</param>
/// <param name="source">single to write</param>
/// <param name="entry">serialization information about source</param>
internal static void WriteSingle(InternalSerializer serializer, string streamName, string property, object source, TypeSerializationInfo entry)
{
Dbg.Assert(serializer != null, "caller should have validated the information");
Dbg.Assert(source != null, "caller should have validated the information");
Dbg.Assert(entry != null, "caller should have validated the information");
WriteRawString(serializer, streamName, property, XmlConvert.ToString((Single)source), entry);
}
/// <summary>
/// Writes TimeSpan as item or property
/// </summary>
/// <param name="serializer">The serializer to which the object is serialized.</param>
/// <param name="streamName"></param>
/// <param name="property">name of property. pass null for item</param>
/// <param name="source">DateTime to write</param>
/// <param name="entry">serialization information about source</param>
internal static void WriteTimeSpan(InternalSerializer serializer, string streamName, string property, object source, TypeSerializationInfo entry)
{
Dbg.Assert(serializer != null, "caller should have validated the information");
Dbg.Assert(source != null, "caller should have validated the information");
Dbg.Assert(entry != null, "caller should have validated the information");
WriteRawString(serializer, streamName, property, XmlConvert.ToString((TimeSpan)source), entry);
}
/// <summary>
/// Writes Single as item or property
/// </summary>
/// <param name="serializer">The serializer to which the object is serialized.</param>
/// <param name="streamName"></param>
/// <param name="property">name of property. pass null for item</param>
/// <param name="source">bytearray to write</param>
/// <param name="entry">serialization information about source</param>
internal static void WriteByteArray(InternalSerializer serializer, string streamName, string property, object source, TypeSerializationInfo entry)
{
Dbg.Assert(serializer != null, "caller should have validated the information");
Dbg.Assert(source != null, "caller should have validated the information");
Dbg.Assert(entry != null, "caller should have validated the information");
Byte[] bytes = (Byte[])source;
if (property != null)
{
serializer.WriteStartElement(entry.PropertyTag);
serializer.WriteNameAttribute(property);
}
else
{
serializer.WriteStartElement(entry.ItemTag);
}
if (streamName != null)
{
serializer.WriteAttribute(SerializationStrings.StreamNameAttribute, streamName);
}
serializer._writer.WriteBase64(bytes, 0, bytes.Length);
serializer._writer.WriteEndElement();
}
internal static void WriteXmlDocument(InternalSerializer serializer, string streamName, string property, object source, TypeSerializationInfo entry)
{
Dbg.Assert(serializer != null, "caller should have validated the information");
Dbg.Assert(source != null, "caller should have validated the information");
Dbg.Assert(entry != null, "caller should have validated the information");
string xml = ((XmlDocument)source).OuterXml;
WriteEncodedString(serializer, streamName, property, xml, entry);
}
internal static void WriteProgressRecord(InternalSerializer serializer, string streamName, string property, object source, TypeSerializationInfo entry)
{
Dbg.Assert(serializer != null, "caller should have validated the information");
Dbg.Assert(source != null, "caller should have validated the information");
Dbg.Assert(entry != null, "caller should have validated the information");
ProgressRecord rec = (ProgressRecord)source;
serializer.WriteStartElement(entry.PropertyTag);
if (property != null)
{
serializer.WriteNameAttribute(property);
}
if (streamName != null)
{
serializer.WriteAttribute(SerializationStrings.StreamNameAttribute, streamName);
}
serializer.WriteEncodedElementString(SerializationStrings.ProgressRecordActivity, rec.Activity);
serializer.WriteEncodedElementString(SerializationStrings.ProgressRecordActivityId, rec.ActivityId.ToString(CultureInfo.InvariantCulture));
serializer.WriteOneObject(rec.CurrentOperation, null, null, 1);
serializer.WriteEncodedElementString(SerializationStrings.ProgressRecordParentActivityId, rec.ParentActivityId.ToString(CultureInfo.InvariantCulture));
serializer.WriteEncodedElementString(SerializationStrings.ProgressRecordPercentComplete, rec.PercentComplete.ToString(CultureInfo.InvariantCulture));
serializer.WriteEncodedElementString(SerializationStrings.ProgressRecordType, rec.RecordType.ToString());
serializer.WriteEncodedElementString(SerializationStrings.ProgressRecordSecondsRemaining, rec.SecondsRemaining.ToString(CultureInfo.InvariantCulture));
serializer.WriteEncodedElementString(SerializationStrings.ProgressRecordStatusDescription, rec.StatusDescription);
serializer._writer.WriteEndElement();
}
internal static void WriteSecureString(InternalSerializer serializer, string streamName, string property, object source, TypeSerializationInfo entry)
{
Dbg.Assert(serializer != null, "caller should have validated the information");
Dbg.Assert(source != null, "caller should have validated the information");
Dbg.Assert(entry != null, "caller should have validated the information");
serializer.HandleSecureString(source, streamName, property);
}
#endregion known type serialization
#region misc
/// <summary>
/// Writes start element in Monad namespace
/// </summary>
/// <param name="elementTag">tag of element</param>
private void WriteStartElement(string elementTag)
{
Dbg.Assert(!string.IsNullOrEmpty(elementTag), "Caller should validate the parameter");
if (SerializationOptions.NoNamespace == (_context.options & SerializationOptions.NoNamespace))
{
_writer.WriteStartElement(elementTag);
}
else
{
_writer.WriteStartElement(elementTag, SerializationStrings.MonadNamespace);
}
}
/// <summary>
/// Writes attribute in monad namespace
/// </summary>
/// <param name="name">name of attribute</param>
/// <param name="value">value of attribute</param>
private void WriteAttribute(string name, string value)
{
Dbg.Assert(!string.IsNullOrEmpty(name), "Caller should validate the parameter");
Dbg.Assert(value != null, "Caller should validate the parameter");
_writer.WriteAttributeString(name, value);
}
private void WriteNameAttribute(string value)
{
Dbg.Assert(!string.IsNullOrEmpty(value), "Caller should validate the parameter");
WriteAttribute(
SerializationStrings.NameAttribute,
EncodeString(value));
}
/// <summary>
/// Encodes the string to escape characters which would make XmlWriter.WriteString throw an exception.
/// </summary>
/// <param name="s">string to encode</param>
/// <returns>encoded string</returns>
/// <remarks>
/// Output from this method can be reverted using XmlConvert.DecodeName method
/// (or InternalDeserializer.DecodeString).
/// This method has been introduced to produce shorter output than XmlConvert.EncodeName
/// (which escapes everything that can't be part of an xml name - whitespace, punctuation).
///
/// This method has been split into 2 parts to optimize its performance:
/// 1) part1 (this method) checks if there are any encodable characters and
/// if there aren't it simply (and efficiently) returns the original string
/// 2) part2 (EncodeString(string, int)) picks up when part1 detects the first encodable
/// character. It avoids looking at the characters already verified by part1
/// and copies those already verified characters and then starts encoding
/// the rest of the string.
/// </remarks>
internal static string EncodeString(string s)
{
Dbg.Assert(s != null, "Caller should validate the parameter");
int slen = s.Length;
for (int i = 0; i < slen; ++i)
{
char c = s[i];
// A control character is in ranges 0x00-0x1F or 0x7F-0x9F
// The escape character is 0x5F ('_') if followed by an 'x'
// A surrogate character is in range 0xD800-0xDFFF
if (c <= 0x1F
|| (c >= 0x7F && c <= 0x9F)
|| (c >= 0xD800 && c <= 0xDFFF)
|| (c == 0x5F && (i + 1 < slen) &&
((s[i + 1] == 'x') || (s[i + 1] == 'X'))
))
{
return EncodeString(s, i);
}
}
// No encodable characters were found above - simply return the original string
return s;
}
private static readonly char[] s_hexlookup = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' };
/// <summary>
/// This is the real workhorse that encodes strings.
/// See <see cref="EncodeString(string)" /> for more information.
/// </summary>
/// <param name="s">string to encode</param>
/// <param name="indexOfFirstEncodableCharacter">indexOfFirstEncodableCharacter</param>
/// <returns>encoded string</returns>
private static string EncodeString(string s, int indexOfFirstEncodableCharacter)
{
Dbg.Assert(s != null, "Caller should validate the 's' parameter");
Dbg.Assert(indexOfFirstEncodableCharacter >= 0, "Caller should verify validity of indexOfFirstEncodableCharacter");
Dbg.Assert(indexOfFirstEncodableCharacter < s.Length, "Caller should verify validity of indexOfFirstEncodableCharacter");
int slen = s.Length;
char[] result = new char[indexOfFirstEncodableCharacter + (slen - indexOfFirstEncodableCharacter) * 7];
s.CopyTo(0, result, 0, indexOfFirstEncodableCharacter);
int rlen = indexOfFirstEncodableCharacter;
for (int i = indexOfFirstEncodableCharacter; i < slen; ++i)
{
char c = s[i];
// A control character is in ranges 0x00-0x1F or 0x7F-0x9F
// The escape character is 0x5F ('_') if followed by an 'x'
// A surrogate character is in range 0xD800-0xDFFF
if (c > 0x1F
&& (c < 0x7F || c > 0x9F)
&& (c < 0xD800 || c > 0xDFFF)
&& (c != 0x5F || ((i + 1 >= slen) ||
((s[i + 1] != 'x') && (s[i + 1] != 'X'))
)))
{
result[rlen++] = c;
}
else if (c == 0x5F)
{
// Special case the escape character, encode _
result[rlen + 0] = '_';
result[rlen + 1] = 'x';
result[rlen + 2] = '0';
result[rlen + 3] = '0';
result[rlen + 4] = '5';
result[rlen + 5] = 'F';
result[rlen + 6] = '_';
rlen += 7;
}
else
{
// It is a control character or a unicode surrogate
result[rlen + 0] = '_';
result[rlen + 1] = 'x';
result[rlen + 2 + 3] = s_hexlookup[c & 0x0F];
c >>= 4;
result[rlen + 2 + 2] = s_hexlookup[c & 0x0F];
c >>= 4;
result[rlen + 2 + 1] = s_hexlookup[c & 0x0F];
c >>= 4;
result[rlen + 2 + 0] = s_hexlookup[c & 0x0F];
result[rlen + 6] = '_';
rlen += 7;
}
}
return new String(result, 0, rlen);
}
/// <summary>
/// Writes element string in monad namespace
/// </summary>
/// <param name="name"></param>
/// <param name="value"></param>
private void WriteEncodedElementString(string name, string value)
{
Dbg.Assert(!string.IsNullOrEmpty(name), "Caller should validate the parameter");
Dbg.Assert(value != null, "Caller should validate the parameter");
this.CheckIfStopping();
value = EncodeString(value);
if (SerializationOptions.NoNamespace == (_context.options & SerializationOptions.NoNamespace))
{
_writer.WriteElementString(name, value);
}
else
{
_writer.WriteElementString(name, SerializationStrings.MonadNamespace, value);
}
}
#endregion misc
}
/// <summary>
/// This internal class provides methods for de-serializing mshObject.
/// </summary>
internal class InternalDeserializer
{
#region constructor
/// <summary>
/// XmlReader from which object is deserialized
/// </summary>
private readonly XmlReader _reader;
/// <summary>
/// Deserialization context
/// </summary>
private readonly DeserializationContext _context;
/// Used by Remoting infrastructure. This TypeTable instance
/// will be used by Serializer if ExecutionContext is not
/// available (to get the ExecutionContext's TypeTable)
private TypeTable _typeTable;
/// <summary>
/// If true, unknowntags are allowed inside PSObject
/// </summary>
private bool UnknownTagsAllowed
{
get
{
Dbg.Assert(_version.Major <= 1, "Deserializer assumes clixml version is <= 1.1");
//If minor version is greater than 1, it means that there can be
//some unknown tags in xml. Deserialization should ignore such element.
return (_version.Minor > 1);
}
}
private bool DuplicateRefIdsAllowed
{
get
{
#if DEBUG
Dbg.Assert(_version.Major <= 1, "Deserializer assumes clixml version is <= 1.1");
Version boundaryVersion = new Version(1, 1, 0, 1);
return (_version < boundaryVersion);
#else
return true; // handle v1 stuff gracefully
#endif
}
}
/// <summary>
/// Depth below top level - used to prevent stack overflow during deserialization
/// </summary>
private int _depthBelowTopLevel;
/// <summary>
/// Version declared by the clixml being read
/// </summary>
private Version _version;
private const int MaxDepthBelowTopLevel = 50;
private readonly ReferenceIdHandlerForDeserializer<object> _objectRefIdHandler;
private readonly ReferenceIdHandlerForDeserializer<ConsolidatedString> _typeRefIdHandler;
/// <summary>
///
/// </summary>
/// <param name="reader"></param>
/// <param name="context"></param>
internal InternalDeserializer(XmlReader reader, DeserializationContext context)
{
Dbg.Assert(reader != null, "caller should validate the parameter");
_reader = reader;
_context = context;
_objectRefIdHandler = new ReferenceIdHandlerForDeserializer<object>();
_typeRefIdHandler = new ReferenceIdHandlerForDeserializer<ConsolidatedString>();
}
#endregion constructor
#region deserialization
/// <summary>
/// Used by Remoting infrastructure. This TypeTable instance
/// will be used by Deserializer if ExecutionContext is not
/// available (to get the ExecutionContext's TypeTable)
/// </summary>
internal TypeTable TypeTable
{
get { return _typeTable; }
set { _typeTable = value; }
}
/// <summary>
/// Validates the version for correctness. Also validates that deserializer
/// can deserialize this version.
/// </summary>
/// <param name="version">
/// version in string format
/// </param>
internal void ValidateVersion(string version)
{
Dbg.Assert(version != null, "Caller should validate the parameter");
_version = null;
Exception exceptionToRethrow = null;
try
{
_version = new Version(version);
}
catch (ArgumentException e)
{
exceptionToRethrow = e;
}
catch (FormatException e)
{
exceptionToRethrow = e;
}
if (exceptionToRethrow != null)
{
throw NewXmlException(Serialization.InvalidVersion, exceptionToRethrow);
}
//Versioning Note:Future version of serialization can add new known types.
//This version will ignore those known types, if they are base object.
//It is expected that future version will still put information in base
//and adapter properties which this serializer can read and use.
//For example, assume the version 2 serialization engine supports a new known
//type IPAddress. The version 1 deserializer doesn't know IPAddress as known
//type and it must retrieve it as an PSObject. The version 2 serializer
//can serialize this as follows:
//<PSObject Version=1.2 Was=Deserialized.IPAddress >
// <TypeNames>...</TypeNames>
// <BaseObject>
// <IPAddress>120.23.35.53</IPAddress>
// </BaseObject>
// <Properties>
// <string name=Address>120.23.34.53</string>
// <string name=class>A</string>
// </Properties>
//</PSObject>
// In above example, V1 serializer will ignore <IPAddress> element and read
// properties from <Properties>
// V2 serializer can read <IPAddress> tag and ignore properties.
// Read serialization note doc for information.
//Now validate the major version number is 1
if (_version.Major != 1)
{
throw NewXmlException(Serialization.UnexpectedVersion, null, _version.Major);
}
}
private object ReadOneDeserializedObject(out string streamName, out bool isKnownPrimitiveType)
{
if (_reader.NodeType != XmlNodeType.Element)
{
throw NewXmlException(Serialization.InvalidNodeType, null,
_reader.NodeType.ToString(), XmlNodeType.Element.ToString());
}
s_trace.WriteLine("Processing start node {0}", _reader.LocalName);
streamName = _reader.GetAttribute(SerializationStrings.StreamNameAttribute);
isKnownPrimitiveType = false;
//handle nil node
if (IsNextElement(SerializationStrings.NilTag))
{
Skip();
return null;
}
//Handle reference to previous deserialized object.
if (IsNextElement(SerializationStrings.ReferenceTag))
{
string refId = _reader.GetAttribute(SerializationStrings.ReferenceIdAttribute);
if (refId == null)
{
throw NewXmlException(Serialization.AttributeExpected, null, SerializationStrings.ReferenceIdAttribute);
}
object duplicate = _objectRefIdHandler.GetReferencedObject(refId);
if (duplicate == null)
{
throw NewXmlException(Serialization.InvalidReferenceId, null, refId);
}
Skip();
return duplicate;
}
//Handle primitive known types
TypeSerializationInfo pktInfo = KnownTypes.GetTypeSerializationInfoFromItemTag(_reader.LocalName);
if (pktInfo != null)
{
s_trace.WriteLine("Primitive Knowntype Element {0}", pktInfo.ItemTag);
isKnownPrimitiveType = true;
return ReadPrimaryKnownType(pktInfo);
}
//Handle PSObject
if (IsNextElement(SerializationStrings.PSObjectTag))
{
s_trace.WriteLine("PSObject Element");
return ReadPSObject();
}
//If we are here, we have an unknown node. Unknown nodes may
//be allowed inside PSObject. We do not allow them at top level.
s_trace.TraceError("Invalid element {0} tag found", _reader.LocalName);
throw NewXmlException(Serialization.InvalidElementTag, null, _reader.LocalName);
}
private bool _isStopping = false;
/// <summary>
/// Called from a separate thread will stop the serialization process
/// </summary>
internal void Stop()
{
_isStopping = true;
}
private void CheckIfStopping()
{
if (_isStopping)
{
throw PSTraceSource.NewInvalidOperationException(Serialization.Stopping);
}
}
internal const string CimInstanceMetadataProperty = "__InstanceMetadata";
internal const string CimModifiedProperties = "Modified";
internal const string CimClassMetadataProperty = "__ClassMetadata";
internal const string CimClassNameProperty = "ClassName";
internal const string CimNamespaceProperty = "Namespace";
internal const string CimServerNameProperty = "ServerName";
internal const string CimHashCodeProperty = "Hash";
internal const string CimMiXmlProperty = "MiXml";
private bool RehydrateCimInstanceProperty(
CimInstance cimInstance,
PSPropertyInfo deserializedProperty,
HashSet<string> namesOfModifiedProperties)
{
Dbg.Assert(cimInstance != null, "Caller should make sure cimInstance != null");
Dbg.Assert(deserializedProperty != null, "Caller should make sure deserializedProperty != null");
if (deserializedProperty.Name.Equals(RemotingConstants.ComputerNameNoteProperty, StringComparison.OrdinalIgnoreCase))
{
string psComputerNameValue = deserializedProperty.Value as string;
if (psComputerNameValue != null)
{
cimInstance.SetCimSessionComputerName(psComputerNameValue);
}
return true;
}
CimProperty cimProperty = cimInstance.CimInstanceProperties[deserializedProperty.Name];
if (cimProperty == null)
{
return false;
}
// TODO/FIXME - think if it is possible to do the array handling in a more efficient way
object propertyValue = deserializedProperty.Value;
if (propertyValue != null)
{
PSObject psoPropertyValue = PSObject.AsPSObject(propertyValue);
if (psoPropertyValue.BaseObject is ArrayList)
{
if ((psoPropertyValue.InternalTypeNames == null) || (psoPropertyValue.InternalTypeNames.Count == 0))
{
return false;
}
string originalArrayTypeName = Deserializer.MaskDeserializationPrefix(psoPropertyValue.InternalTypeNames[0]);
if (originalArrayTypeName == null)
{
return false;
}
Type originalArrayType;
if (!LanguagePrimitives.TryConvertTo(originalArrayTypeName, CultureInfo.InvariantCulture, out originalArrayType))
{
return false;
}
if (!originalArrayType.IsArray)
{
return false;
}
object newPropertyValue;
if (!LanguagePrimitives.TryConvertTo(propertyValue, originalArrayType, CultureInfo.InvariantCulture, out newPropertyValue))
{
return false;
}
psoPropertyValue = PSObject.AsPSObject(newPropertyValue);
}
propertyValue = psoPropertyValue.BaseObject;
}
try
{
cimProperty.Value = propertyValue;
if (!namesOfModifiedProperties.Contains(deserializedProperty.Name))
{
cimProperty.IsValueModified = false;
}
#if DEBUG
else
{
Dbg.Assert(cimProperty.IsValueModified, "Deserialized CIM properties should by default be marked as 'modified' ");
}
#endif
}
catch (FormatException)
{
return false;
}
catch (InvalidCastException)
{
return false;
}
catch (ArgumentException)
{
return false;
}
catch (CimException)
{
return false;
}
return true;
}
private static Lazy<CimDeserializer> s_cimDeserializer = new Lazy<CimDeserializer>(CimDeserializer.Create);
private CimClass RehydrateCimClass(PSPropertyInfo classMetadataProperty)
{
if ((classMetadataProperty == null) || (classMetadataProperty.Value == null))
{
return null;
}
IEnumerable deserializedClasses = LanguagePrimitives.GetEnumerable(classMetadataProperty.Value);
if (deserializedClasses == null)
{
return null;
}
Stack<KeyValuePair<CimClassSerializationId, CimClass>> cimClassesToAddToCache = new Stack<KeyValuePair<CimClassSerializationId, CimClass>>();
//
// REHYDRATE CLASS METADATA
//
CimClass parentClass = null;
CimClass currentClass = null;
foreach (var deserializedClass in deserializedClasses)
{
parentClass = currentClass;
if (deserializedClass == null)
{
return null;
}
PSObject psoDeserializedClass = PSObject.AsPSObject(deserializedClass);
PSPropertyInfo namespaceProperty = psoDeserializedClass.InstanceMembers[InternalDeserializer.CimNamespaceProperty] as PSPropertyInfo;
if (namespaceProperty == null)
{
return null;
}
string cimNamespace = namespaceProperty.Value as string;
PSPropertyInfo classNameProperty = psoDeserializedClass.InstanceMembers[InternalDeserializer.CimClassNameProperty] as PSPropertyInfo;
if (classNameProperty == null)
{
return null;
}
string cimClassName = classNameProperty.Value as string;
PSPropertyInfo computerNameProperty = psoDeserializedClass.InstanceMembers[InternalDeserializer.CimServerNameProperty] as PSPropertyInfo;
if (computerNameProperty == null)
{
return null;
}
string computerName = computerNameProperty.Value as string;
PSPropertyInfo hashCodeProperty = psoDeserializedClass.InstanceMembers[InternalDeserializer.CimHashCodeProperty] as PSPropertyInfo;
if (hashCodeProperty == null)
{
return null;
}
var hashCodeObject = hashCodeProperty.Value;
if (hashCodeObject == null)
{
return null;
}
if (hashCodeObject is PSObject)
{
hashCodeObject = ((PSObject)hashCodeObject).BaseObject;
}
if (!(hashCodeObject is int))
{
return null;
}
int hashCode = (int)hashCodeObject;
CimClassSerializationId cimClassSerializationId = new CimClassSerializationId(cimClassName, cimNamespace, computerName, hashCode);
currentClass = _context.cimClassSerializationIdCache.GetCimClassFromCache(cimClassSerializationId);
if (currentClass != null)
{
continue;
}
PSPropertyInfo miXmlProperty = psoDeserializedClass.InstanceMembers[InternalDeserializer.CimMiXmlProperty] as PSPropertyInfo;
if ((miXmlProperty == null) || (miXmlProperty.Value == null))
{
return null;
}
string miXmlString = miXmlProperty.Value.ToString();
byte[] miXmlBytes = Encoding.Unicode.GetBytes(miXmlString);
uint offset = 0;
try
{
currentClass = s_cimDeserializer.Value.DeserializeClass(
miXmlBytes,
ref offset,
parentClass,
computerName: computerName,
namespaceName: cimNamespace);
cimClassesToAddToCache.Push(new KeyValuePair<CimClassSerializationId, CimClass>(cimClassSerializationId, currentClass));
}
catch (CimException)
{
return null;
}
}
//
// UPDATE CLASSDECL DACHE
//
foreach (var cacheEntry in cimClassesToAddToCache)
{
_context.cimClassSerializationIdCache.AddCimClassToCache(cacheEntry.Key, cacheEntry.Value);
}
return currentClass;
}
// NOTE: Win7 change for refid-s that span multiple xml documents: ADMIN: changelist #226414
private PSObject RehydrateCimInstance(PSObject deserializedObject)
{
if (!(deserializedObject.BaseObject is PSCustomObject))
{
return deserializedObject;
}
PSPropertyInfo classMetadataProperty = deserializedObject.InstanceMembers[CimClassMetadataProperty] as PSPropertyInfo;
CimClass cimClass = RehydrateCimClass(classMetadataProperty);
if (cimClass == null)
{
return deserializedObject;
}
CimInstance cimInstance;
try
{
cimInstance = new CimInstance(cimClass);
}
catch (CimException)
{
return deserializedObject;
}
PSObject psoCimInstance = PSObject.AsPSObject(cimInstance);
// process __InstanceMetadata
HashSet<string> namesOfModifiedProperties = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
PSPropertyInfo instanceMetadataProperty = deserializedObject.InstanceMembers[CimInstanceMetadataProperty] as PSPropertyInfo;
if ((instanceMetadataProperty != null) && (instanceMetadataProperty.Value != null))
{
PSObject instanceMetadata = PSObject.AsPSObject(instanceMetadataProperty.Value);
PSPropertyInfo modifiedPropertiesProperty = instanceMetadata.InstanceMembers[CimModifiedProperties] as PSPropertyInfo;
if ((modifiedPropertiesProperty != null) && (modifiedPropertiesProperty.Value != null))
{
string modifiedPropertiesString = modifiedPropertiesProperty.Value.ToString();
foreach (string nameOfModifiedProperty in modifiedPropertiesString.Split(Utils.Separators.Space))
{
namesOfModifiedProperties.Add(nameOfModifiedProperty);
}
}
}
// process properties that were originally "adapted" properties
if (deserializedObject.adaptedMembers != null)
{
foreach (PSMemberInfo deserializedMemberInfo in deserializedObject.adaptedMembers)
{
PSPropertyInfo deserializedProperty = deserializedMemberInfo as PSPropertyInfo;
if (deserializedProperty == null)
{
continue;
}
bool propertyHandledSuccessfully = RehydrateCimInstanceProperty(
cimInstance,
deserializedProperty,
namesOfModifiedProperties);
if (!propertyHandledSuccessfully)
{
return deserializedObject;
}
}
}
// process properties that were originally "extended" properties
foreach (PSMemberInfo deserializedMemberInfo in deserializedObject.InstanceMembers)
{
PSPropertyInfo deserializedProperty = deserializedMemberInfo as PSPropertyInfo;
if (deserializedProperty == null)
{
continue;
}
// skip adapted properties
if ((deserializedObject.adaptedMembers != null) && (deserializedObject.adaptedMembers[deserializedProperty.Name] != null))
{
continue;
}
// skip metadata introduced by CliXml/CimInstance serialization
if (deserializedProperty.Name.Equals(CimClassMetadataProperty, StringComparison.OrdinalIgnoreCase))
{
continue;
}
// skip properties re-added by the client (i.e. through types.ps1xml)
if (psoCimInstance.Properties[deserializedProperty.Name] != null)
{
continue;
}
PSNoteProperty noteProperty = new PSNoteProperty(deserializedProperty.Name, deserializedProperty.Value);
psoCimInstance.Properties.Add(noteProperty);
}
return psoCimInstance;
}
/// <summary>
/// Reads one object. At this point reader should be positioned
/// at the start tag of object.
/// </summary>
/// <returns>
/// Deserialized Object.
/// </returns>
internal object ReadOneObject(out string streamName)
{
this.CheckIfStopping();
try
{
_depthBelowTopLevel++;
Dbg.Assert(_depthBelowTopLevel <= MaxDepthBelowTopLevel, "depthBelowTopLevel should be <= MaxDepthBelowTopLevel");
if (_depthBelowTopLevel == MaxDepthBelowTopLevel)
{
throw NewXmlException(Serialization.DeserializationTooDeep, null);
}
bool isKnownPrimitiveType;
object result = ReadOneDeserializedObject(out streamName, out isKnownPrimitiveType);
if (null == result)
{
return null;
}
if (!isKnownPrimitiveType)
{
PSObject mshSource = PSObject.AsPSObject(result);
if (Deserializer.IsDeserializedInstanceOfType(mshSource, typeof(CimInstance)))
{
return RehydrateCimInstance(mshSource);
}
// Convert deserialized object to a user-defined type (specified in a types.ps1xml file)
Type targetType = mshSource.GetTargetTypeForDeserialization(_typeTable);
if (null != targetType)
{
Exception rehydrationException = null;
try
{
object rehydratedResult = LanguagePrimitives.ConvertTo(
result, targetType, true /* recurse */, CultureInfo.InvariantCulture, _typeTable);
PSEtwLog.LogAnalyticVerbose(PSEventId.Serializer_RehydrationSuccess,
PSOpcode.Rehydration, PSTask.Serialization, PSKeyword.Serializer,
mshSource.InternalTypeNames.Key, targetType.FullName,
rehydratedResult.GetType().FullName);
return rehydratedResult;
}
catch (InvalidCastException e)
{
rehydrationException = e;
}
catch (ArgumentException e)
{
rehydrationException = e;
}
Dbg.Assert(rehydrationException != null,
"The only way to get here is with rehydrationException != null");
PSEtwLog.LogAnalyticError(PSEventId.Serializer_RehydrationFailure,
PSOpcode.Rehydration, PSTask.Serialization, PSKeyword.Serializer,
mshSource.InternalTypeNames.Key,
targetType.FullName,
rehydrationException.ToString(),
rehydrationException.InnerException == null
? string.Empty
: rehydrationException.InnerException.ToString());
}
}
return result;
}
finally
{
_depthBelowTopLevel--;
Dbg.Assert(_depthBelowTopLevel >= 0, "depthBelowTopLevel should be >= 0");
}
}
private object ReadOneObject()
{
string ignore;
return ReadOneObject(out ignore);
}
//Reads one PSObject
private PSObject ReadPSObject()
{
PSObject dso = ReadAttributeAndCreatePSObject();
//Read start element tag
if (ReadStartElementAndHandleEmpty(SerializationStrings.PSObjectTag) == false)
{
//Empty element.
return dso;
}
bool overrideTypeInfo = true;
//Process all the child nodes
while (_reader.NodeType == XmlNodeType.Element)
{
if (IsNextElement(SerializationStrings.TypeNamesTag) ||
IsNextElement(SerializationStrings.TypeNamesReferenceTag))
{
ReadTypeNames(dso);
overrideTypeInfo = false;
}
else if (IsNextElement(SerializationStrings.AdapterProperties))
{
ReadProperties(dso);
}
else if (IsNextElement(SerializationStrings.MemberSet))
{
ReadMemberSet(dso.InstanceMembers);
}
else if (IsNextElement(SerializationStrings.ToStringElementTag))
{
dso.ToStringFromDeserialization = ReadDecodedElementString(SerializationStrings.ToStringElementTag);
dso.InstanceMembers.Add(PSObject.dotNetInstanceAdapter.GetDotNetMethod<PSMemberInfo>(dso, "ToString"));
PSGetMemberBinder.SetHasInstanceMember("ToString");
// Fix for Win8:75437
// The TokenText property is used in type conversion and it is not being populated during deserialization
// As a result, parameter binding fails in the following case on a remote session
// register-psssessionconfiguration -Name foo -psversion 3.0
// The value "3.0" is treated as a double and since the TokenText property holds null, the type converter tries to convert
// from System.Double to System.Version using Parse method of System.Version and fails
dso.TokenText = dso.ToStringFromDeserialization;
}
else
{
//Handle BaseObject
object baseObject = null;
ContainerType ct = ContainerType.None;
//Check if tag is PrimaryKnownType.
TypeSerializationInfo pktInfo = KnownTypes.GetTypeSerializationInfoFromItemTag(_reader.LocalName);
if (pktInfo != null)
{
s_trace.WriteLine("Primitive Knowntype Element {0}", pktInfo.ItemTag);
baseObject = ReadPrimaryKnownType(pktInfo);
}
else if (IsKnownContainerTag(out ct))
{
s_trace.WriteLine("Found container node {0}", ct);
baseObject = ReadKnownContainer(ct);
}
else if (IsNextElement(SerializationStrings.PSObjectTag))
{
s_trace.WriteLine("Found PSObject node");
baseObject = ReadOneObject();
}
else
{
//We have an unknown tag
s_trace.WriteLine("Unknown tag {0} encountered", _reader.LocalName);
if (UnknownTagsAllowed)
{
Skip();
}
else
{
throw NewXmlException(Serialization.InvalidElementTag, null, _reader.LocalName);
}
}
if (baseObject != null)
{
dso.SetCoreOnDeserialization(baseObject, overrideTypeInfo);
}
}
}
ReadEndElement();
PSObject immediateBasePso = dso.ImmediateBaseObject as PSObject;
if (immediateBasePso != null)
{
PSObject.CopyDeserializerFields(source: immediateBasePso, target: dso);
}
return dso;
}
/// <summary>
/// This function reads the refId attribute and creates a
/// mshObject for that attribute
/// </summary>
/// <returns>mshObject which is created for refId</returns>
private PSObject ReadAttributeAndCreatePSObject()
{
string refId = _reader.GetAttribute(SerializationStrings.ReferenceIdAttribute);
PSObject sh = new PSObject();
//RefId is not mandatory attribute
if (refId != null)
{
s_trace.WriteLine("Read PSObject with refId: {0}", refId);
_objectRefIdHandler.SetRefId(sh, refId, this.DuplicateRefIdsAllowed);
}
return sh;
}
/// <summary>
/// Read type names
/// </summary>
/// <param name="dso">
/// PSObject to which TypeNames are added
/// </param>
private void ReadTypeNames(PSObject dso)
{
Dbg.Assert(dso != null, "caller should validate the parameter");
Dbg.Assert(_reader.NodeType == XmlNodeType.Element, "NodeType should be Element");
if (IsNextElement(SerializationStrings.TypeNamesTag))
{
Collection<string> typeNames = new Collection<string>();
//Read refId attribute if available
string refId = _reader.GetAttribute(SerializationStrings.ReferenceIdAttribute);
s_trace.WriteLine("Processing TypeNamesTag with refId {0}", refId);
if (ReadStartElementAndHandleEmpty(SerializationStrings.TypeNamesTag))
{
while (_reader.NodeType == XmlNodeType.Element)
{
if (IsNextElement(SerializationStrings.TypeNamesItemTag))
{
string item = ReadDecodedElementString(SerializationStrings.TypeNamesItemTag);
if (!string.IsNullOrEmpty(item))
{
Deserializer.AddDeserializationPrefix(ref item);
typeNames.Add(item);
}
}
else
{
throw NewXmlException(Serialization.InvalidElementTag, null, _reader.LocalName);
}
}
ReadEndElement();
}
dso.InternalTypeNames = new ConsolidatedString(typeNames);
if (refId != null)
{
_typeRefIdHandler.SetRefId(dso.InternalTypeNames, refId, this.DuplicateRefIdsAllowed);
}
}
else if (IsNextElement(SerializationStrings.TypeNamesReferenceTag))
{
string refId = _reader.GetAttribute(SerializationStrings.ReferenceIdAttribute);
s_trace.WriteLine("Processing TypeNamesReferenceTag with refId {0}", refId);
if (refId == null)
{
throw NewXmlException(Serialization.AttributeExpected, null, SerializationStrings.ReferenceIdAttribute);
}
ConsolidatedString typeNames = _typeRefIdHandler.GetReferencedObject(refId);
if (typeNames == null)
{
throw NewXmlException(Serialization.InvalidTypeHierarchyReferenceId, null, refId);
}
// At this point we know that we will clone the ConsolidatedString object, so we might end up
// allocating much more memory than the length of the xml string
// We have to account for that to limit that to remoting quota and protect against OOM.
_context.LogExtraMemoryUsage(
typeNames.Key.Length * sizeof(char) // Key is shared among the cloned and original object
// but the list of strings isn't. The expression to the left
// is roughly the size of memory the list of strings occupies
- 29 // size of <Obj><TNRef RefId="0"/></Obj> in UTF8 encoding
);
dso.InternalTypeNames = new ConsolidatedString(typeNames);
//Skip the node
Skip();
}
else
{
Dbg.Assert(false, "caller should validate that we do no reach here");
}
}
/// <summary>
/// Read properties
/// </summary>
private void ReadProperties(PSObject dso)
{
Dbg.Assert(dso != null, "caller should validate the parameter");
Dbg.Assert(_reader.NodeType == XmlNodeType.Element, "NodeType should be Element");
//Since we are adding baseobject properties as propertybag,
//mark the object as deserialized.
dso.isDeserialized = true;
dso.adaptedMembers = new PSMemberInfoInternalCollection<PSPropertyInfo>();
//Add the GetType method to the instance members, so that it works on deserialized psobjects
dso.InstanceMembers.Add(PSObject.dotNetInstanceAdapter.GetDotNetMethod<PSMemberInfo>(dso, "GetType"));
PSGetMemberBinder.SetHasInstanceMember("GetType");
//Set Clr members to a collection which is empty
dso.clrMembers = new PSMemberInfoInternalCollection<PSPropertyInfo>();
if (ReadStartElementAndHandleEmpty(SerializationStrings.AdapterProperties))
{
//Read one or more property elements
while (_reader.NodeType == XmlNodeType.Element)
{
string property = ReadNameAttribute();
object value = ReadOneObject();
PSProperty prop = new PSProperty(property, value);
dso.adaptedMembers.Add(prop);
}
ReadEndElement();
}
}
#region memberset
/// <summary>
/// Read memberset.
/// </summary>
/// <param name="collection">
/// collection to which members are added
/// </param>
private void ReadMemberSet(PSMemberInfoCollection<PSMemberInfo> collection)
{
Dbg.Assert(collection != null, "caller should validate the value");
if (ReadStartElementAndHandleEmpty(SerializationStrings.MemberSet))
{
while (_reader.NodeType == XmlNodeType.Element)
{
if (IsNextElement(SerializationStrings.MemberSet))
{
string name = ReadNameAttribute();
PSMemberSet set = new PSMemberSet(name);
collection.Add(set);
ReadMemberSet(set.Members);
PSGetMemberBinder.SetHasInstanceMember(name);
}
else
{
PSNoteProperty note = ReadNoteProperty();
collection.Add(note);
PSGetMemberBinder.SetHasInstanceMember(note.Name);
}
}
ReadEndElement();
}
}
/// <summary>
/// read note
/// </summary>
/// <returns></returns>
private PSNoteProperty ReadNoteProperty()
{
string name = ReadNameAttribute();
object value = ReadOneObject();
PSNoteProperty note = new PSNoteProperty(name, value);
return note;
}
#endregion memberset
#region known container
private bool IsKnownContainerTag(out ContainerType ct)
{
Dbg.Assert(_reader.NodeType == XmlNodeType.Element, "Expected node type is element");
if (IsNextElement(SerializationStrings.DictionaryTag))
{
ct = ContainerType.Dictionary;
}
else if (IsNextElement(SerializationStrings.QueueTag))
{
ct = ContainerType.Queue;
}
else if (IsNextElement(SerializationStrings.StackTag))
{
ct = ContainerType.Stack;
}
else if (IsNextElement(SerializationStrings.ListTag))
{
ct = ContainerType.List;
}
else if (IsNextElement(SerializationStrings.CollectionTag))
{
ct = ContainerType.Enumerable;
}
else
{
ct = ContainerType.None;
}
return ct != ContainerType.None;
}
private object ReadKnownContainer(ContainerType ct)
{
switch (ct)
{
case ContainerType.Dictionary:
return ReadDictionary(ct);
case ContainerType.Enumerable:
case ContainerType.List:
case ContainerType.Queue:
case ContainerType.Stack:
return ReadListContainer(ct);
default:
Dbg.Assert(false, "Unrecognized ContainerType enum");
return null;
}
}
/// <summary>
/// Read List Containers
/// </summary>
/// <returns></returns>
private object ReadListContainer(ContainerType ct)
{
Dbg.Assert(ct == ContainerType.Enumerable ||
ct == ContainerType.List ||
ct == ContainerType.Queue ||
ct == ContainerType.Stack, "ct should be queue, stack, enumerable or list");
ArrayList list = new ArrayList();
if (ReadStartElementAndHandleEmpty(_reader.LocalName))
{
while (_reader.NodeType == XmlNodeType.Element)
{
list.Add(ReadOneObject());
}
ReadEndElement();
}
if (ct == ContainerType.Stack)
{
list.Reverse();
return new Stack(list);
}
else if (ct == ContainerType.Queue)
{
return new Queue(list);
}
return list;
}
/// <summary>
/// Deserialize Dictionary
/// </summary>
/// <returns></returns>
private object ReadDictionary(ContainerType ct)
{
Dbg.Assert(ct == ContainerType.Dictionary, "Unrecognized ContainerType enum");
// We assume the hash table is a PowerShell hash table and hence uses
// a case insensitive string comparer. If we discover a key collision,
// we'll revert back to the default comparer.
Hashtable table = new Hashtable(StringComparer.CurrentCultureIgnoreCase);
int keyClashFoundIteration = 0;
if (ReadStartElementAndHandleEmpty(SerializationStrings.DictionaryTag))
{
while (_reader.NodeType == XmlNodeType.Element)
{
ReadStartElement(SerializationStrings.DictionaryEntryTag);
//Read Key
if (_reader.NodeType != XmlNodeType.Element)
{
throw NewXmlException(Serialization.DictionaryKeyNotSpecified, null);
}
string name = ReadNameAttribute();
if (string.Compare(name, SerializationStrings.DictionaryKey, StringComparison.OrdinalIgnoreCase) != 0)
{
throw NewXmlException(Serialization.InvalidDictionaryKeyName, null);
}
object key = ReadOneObject();
if (key == null)
{
throw NewXmlException(Serialization.NullAsDictionaryKey, null);
}
//Read Value
if (_reader.NodeType != XmlNodeType.Element)
{
throw NewXmlException(Serialization.DictionaryValueNotSpecified, null);
}
name = ReadNameAttribute();
if (string.Compare(name, SerializationStrings.DictionaryValue, StringComparison.OrdinalIgnoreCase) != 0)
{
throw NewXmlException(Serialization.InvalidDictionaryValueName, null);
}
object value = ReadOneObject();
// On the first collision, copy the hash table to one that uses the default comparer.
if (table.ContainsKey(key) && (keyClashFoundIteration == 0))
{
keyClashFoundIteration++;
Hashtable newHashTable = new Hashtable();
foreach (DictionaryEntry entry in table)
{
newHashTable.Add(entry.Key, entry.Value);
}
table = newHashTable;
}
// win8: 389060. If there are still collisions even with case-sensitive default comparer,
// use an IEqualityComparer that does object ref equality.
if (table.ContainsKey(key) && (keyClashFoundIteration == 1))
{
keyClashFoundIteration++;
IEqualityComparer equalityComparer = new ReferenceEqualityComparer();
Hashtable newHashTable = new Hashtable(equalityComparer);
foreach (DictionaryEntry entry in table)
{
newHashTable.Add(entry.Key, entry.Value);
}
table = newHashTable;
}
try
{
//Add entry to hashtable
table.Add(key, value);
}
catch (ArgumentException e)
{
throw this.NewXmlException(Serialization.InvalidPrimitiveType, e, typeof(Hashtable));
}
ReadEndElement();
}
ReadEndElement();
}
return table;
}
#endregion known containers
#endregion deserialization
#region Getting XmlReaderSettings
internal static XmlReaderSettings XmlReaderSettingsForCliXml { get; } = GetXmlReaderSettingsForCliXml();
private static XmlReaderSettings GetXmlReaderSettingsForCliXml()
{
XmlReaderSettings xrs = new XmlReaderSettings();
xrs.CheckCharacters = false;
xrs.CloseInput = false;
//The XML data needs to be in conformance to the rules for a well-formed XML 1.0 document.
xrs.ConformanceLevel = ConformanceLevel.Document;
xrs.IgnoreComments = true;
xrs.IgnoreProcessingInstructions = true;
xrs.IgnoreWhitespace = false;
xrs.MaxCharactersFromEntities = 1024;
//xrs.DtdProcessing = DtdProcessing.Prohibit; //because system.management.automation needs to build as 2.0
//xrs.ProhibitDtd = true;
#if !CORECLR
// XmlReaderSettings.Schemas/ValidationFlags/ValidationType/XmlResolver Not In CoreCLR
xrs.Schemas = null;
xrs.ValidationFlags = System.Xml.Schema.XmlSchemaValidationFlags.None;
xrs.ValidationType = ValidationType.None;
xrs.XmlResolver = null;
#endif
return xrs;
}
internal static XmlReaderSettings XmlReaderSettingsForUntrustedXmlDocument { get; } = GetXmlReaderSettingsForUntrustedXmlDocument();
private static XmlReaderSettings GetXmlReaderSettingsForUntrustedXmlDocument()
{
XmlReaderSettings settings = new XmlReaderSettings();
settings.CheckCharacters = false;
settings.ConformanceLevel = ConformanceLevel.Auto;
settings.IgnoreComments = true;
settings.IgnoreProcessingInstructions = true;
settings.IgnoreWhitespace = true;
settings.MaxCharactersFromEntities = 1024;
settings.MaxCharactersInDocument = 512 * 1024 * 1024; // 512M characters = 1GB
#if CORECLR // DtdProcessing.Parse Not In CoreCLR
settings.DtdProcessing = DtdProcessing.Ignore;
#else // XmlReaderSettings.ValidationFlags/ValidationType/XmlResolver Not In CoreCLR
settings.DtdProcessing = DtdProcessing.Parse; // Allowing DTD parsing with limits of MaxCharactersFromEntities/MaxCharactersInDocument
settings.ValidationFlags = System.Xml.Schema.XmlSchemaValidationFlags.None;
settings.ValidationType = ValidationType.None;
settings.XmlResolver = null;
#endif
return settings;
}
#endregion
#region known type deserialization
internal static object DeserializeBoolean(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
try
{
return XmlConvert.ToBoolean(deserializer._reader.ReadElementContentAsString());
}
catch (FormatException e)
{
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, e, typeof(bool).FullName);
}
}
internal static object DeserializeByte(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
Exception recognizedException = null;
try
{
return XmlConvert.ToByte(deserializer._reader.ReadElementContentAsString());
}
catch (FormatException e)
{
recognizedException = e;
}
catch (OverflowException e)
{
recognizedException = e;
}
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, recognizedException, typeof(byte).FullName);
}
internal static object DeserializeChar(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
Exception recognizedException = null;
try
{
return (Char)XmlConvert.ToUInt16(deserializer._reader.ReadElementContentAsString());
}
catch (FormatException e)
{
recognizedException = e;
}
catch (OverflowException e)
{
recognizedException = e;
}
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, recognizedException, typeof(char).FullName);
}
internal static object DeserializeDateTime(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
try
{
return XmlConvert.ToDateTime(deserializer._reader.ReadElementContentAsString(), XmlDateTimeSerializationMode.RoundtripKind);
}
catch (FormatException e)
{
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, e, typeof(DateTime).FullName);
}
}
internal static object DeserializeDecimal(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
Exception recognizedException = null;
try
{
return XmlConvert.ToDecimal(deserializer._reader.ReadElementContentAsString());
}
catch (FormatException e)
{
recognizedException = e;
}
catch (OverflowException e)
{
recognizedException = e;
}
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, recognizedException, typeof(decimal).FullName);
}
internal static object DeserializeDouble(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
Exception recognizedException = null;
try
{
return XmlConvert.ToDouble(deserializer._reader.ReadElementContentAsString());
}
catch (FormatException e)
{
recognizedException = e;
}
catch (OverflowException e)
{
recognizedException = e;
}
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, recognizedException, typeof(double).FullName);
}
internal static object DeserializeGuid(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
Exception recognizedException = null;
try
{
return XmlConvert.ToGuid(deserializer._reader.ReadElementContentAsString());
}
// MSDN for XmlConvert.ToGuid doesn't list any exceptions, but
// Reflector shows that this just calls to new Guid(string)
// which MSDN documents can throw Format/OverflowException
catch (FormatException e)
{
recognizedException = e;
}
catch (OverflowException e)
{
recognizedException = e;
}
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, recognizedException, typeof(Guid).FullName);
}
internal static object DeserializeVersion(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
Exception recognizedException = null;
try
{
return new Version(deserializer._reader.ReadElementContentAsString());
}
catch (ArgumentException e)
{
recognizedException = e;
}
catch (FormatException e)
{
recognizedException = e;
}
catch (OverflowException e)
{
recognizedException = e;
}
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, recognizedException, typeof(Version).FullName);
}
internal static object DeserializeSemanticVersion(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
Exception recognizedException = null;
try
{
return new SemanticVersion(deserializer._reader.ReadElementContentAsString());
}
catch (ArgumentException e)
{
recognizedException = e;
}
catch (FormatException e)
{
recognizedException = e;
}
catch (OverflowException e)
{
recognizedException = e;
}
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, recognizedException, typeof(Version).FullName);
}
internal static object DeserializeInt16(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
Exception recognizedException = null;
try
{
return XmlConvert.ToInt16(deserializer._reader.ReadElementContentAsString());
}
catch (FormatException e)
{
recognizedException = e;
}
catch (OverflowException e)
{
recognizedException = e;
}
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, recognizedException, typeof(Int16).FullName);
}
internal static object DeserializeInt32(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
Exception recognizedException = null;
try
{
return XmlConvert.ToInt32(deserializer._reader.ReadElementContentAsString());
}
catch (FormatException e)
{
recognizedException = e;
}
catch (OverflowException e)
{
recognizedException = e;
}
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, recognizedException, typeof(Int32).FullName);
}
internal static object DeserializeInt64(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
Exception recognizedException = null;
try
{
return XmlConvert.ToInt64(deserializer._reader.ReadElementContentAsString());
}
catch (FormatException e)
{
recognizedException = e;
}
catch (OverflowException e)
{
recognizedException = e;
}
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, recognizedException, typeof(Int64).FullName);
}
internal static object DeserializeSByte(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
Exception recognizedException = null;
try
{
return XmlConvert.ToSByte(deserializer._reader.ReadElementContentAsString());
}
catch (FormatException e)
{
recognizedException = e;
}
catch (OverflowException e)
{
recognizedException = e;
}
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, recognizedException, typeof(sbyte).FullName);
}
internal static object DeserializeSingle(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
Exception recognizedException = null;
try
{
return XmlConvert.ToSingle(deserializer._reader.ReadElementContentAsString());
}
catch (FormatException e)
{
recognizedException = e;
}
catch (OverflowException e)
{
recognizedException = e;
}
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, recognizedException, typeof(float).FullName);
}
internal static object DeserializeScriptBlock(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
string scriptBlockBody = deserializer.ReadDecodedElementString(SerializationStrings.ScriptBlockTag);
if (DeserializationOptions.DeserializeScriptBlocks == (deserializer._context.options & DeserializationOptions.DeserializeScriptBlocks))
{
return ScriptBlock.Create(scriptBlockBody);
}
else
{
//Scriptblock is deserialized as string
return scriptBlockBody;
}
}
internal static object DeserializeString(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
return deserializer.ReadDecodedElementString(SerializationStrings.StringTag);
}
internal static object DeserializeTimeSpan(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
try
{
return XmlConvert.ToTimeSpan(deserializer._reader.ReadElementContentAsString());
}
catch (FormatException e)
{
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, e, typeof(TimeSpan).FullName);
}
}
internal static object DeserializeUInt16(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
Exception recognizedException = null;
try
{
return XmlConvert.ToUInt16(deserializer._reader.ReadElementContentAsString());
}
catch (FormatException e)
{
recognizedException = e;
}
catch (OverflowException e)
{
recognizedException = e;
}
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, recognizedException, typeof(UInt16).FullName);
}
internal static object DeserializeUInt32(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
Exception recognizedException = null;
try
{
return XmlConvert.ToUInt32(deserializer._reader.ReadElementContentAsString());
}
catch (FormatException e)
{
recognizedException = e;
}
catch (OverflowException e)
{
recognizedException = e;
}
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, recognizedException, typeof(UInt32).FullName);
}
internal static object DeserializeUInt64(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
Exception recognizedException = null;
try
{
return XmlConvert.ToUInt64(deserializer._reader.ReadElementContentAsString());
}
catch (FormatException e)
{
recognizedException = e;
}
catch (OverflowException e)
{
recognizedException = e;
}
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, recognizedException, typeof(UInt64).FullName);
}
internal static object DeserializeUri(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
try
{
string uriString = deserializer.ReadDecodedElementString(SerializationStrings.AnyUriTag);
return new Uri(uriString, UriKind.RelativeOrAbsolute);
}
catch (UriFormatException e)
{
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, e, typeof(Uri).FullName);
}
}
internal static object DeserializeByteArray(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
try
{
return Convert.FromBase64String(deserializer._reader.ReadElementContentAsString());
}
catch (FormatException e)
{
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, e, typeof(byte[]).FullName);
}
}
/// <exception cref="System.Xml.XmlException"></exception>
internal static XmlDocument LoadUnsafeXmlDocument(FileInfo xmlPath, bool preserveNonElements, int? maxCharactersInDocument)
{
XmlDocument doc = null;
// same FileStream options as Reflector shows for XmlDocument.Load(path) / XmlDownloadManager.GetStream:
using (Stream stream = new FileStream(xmlPath.FullName, FileMode.Open, FileAccess.Read, FileShare.Read))
{
doc = LoadUnsafeXmlDocument(stream, preserveNonElements, maxCharactersInDocument);
}
return doc;
}
/// <exception cref="System.Xml.XmlException"></exception>
internal static XmlDocument LoadUnsafeXmlDocument(string xmlContents, bool preserveNonElements, int? maxCharactersInDocument)
{
using (TextReader textReader = new StringReader(xmlContents))
{
return LoadUnsafeXmlDocument(textReader, preserveNonElements, maxCharactersInDocument);
}
}
/// <exception cref="System.Xml.XmlException"></exception>
internal static XmlDocument LoadUnsafeXmlDocument(Stream stream, bool preserveNonElements, int? maxCharactersInDocument)
{
using (TextReader textReader = new StreamReader(stream))
{
return LoadUnsafeXmlDocument(textReader, preserveNonElements, maxCharactersInDocument);
}
}
/// <exception cref="System.Xml.XmlException"></exception>
internal static XmlDocument LoadUnsafeXmlDocument(TextReader textReader, bool preserveNonElements, int? maxCharactersInDocument)
{
XmlReaderSettings settings;
if (maxCharactersInDocument.HasValue || preserveNonElements)
{
settings = InternalDeserializer.XmlReaderSettingsForUntrustedXmlDocument.Clone();
if (maxCharactersInDocument.HasValue)
{
settings.MaxCharactersInDocument = maxCharactersInDocument.Value;
}
if (preserveNonElements)
{
settings.IgnoreWhitespace = false;
settings.IgnoreProcessingInstructions = false;
settings.IgnoreComments = false;
}
}
else
{
settings = InternalDeserializer.XmlReaderSettingsForUntrustedXmlDocument;
}
try
{
XmlReader xmlReader = XmlReader.Create(textReader, settings);
XmlDocument xmlDocument = new XmlDocument();
xmlDocument.PreserveWhitespace = preserveNonElements;
xmlDocument.Load(xmlReader);
return xmlDocument;
}
catch (InvalidOperationException invalidOperationException)
{
throw new XmlException(invalidOperationException.Message, invalidOperationException);
}
}
internal static object DeserializeXmlDocument(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
string docAsString = deserializer.ReadDecodedElementString(SerializationStrings.XmlDocumentTag);
try
{
int? maxCharactersInDocument = null;
if (deserializer._context.MaximumAllowedMemory.HasValue)
{
maxCharactersInDocument = deserializer._context.MaximumAllowedMemory.Value / sizeof(char);
}
XmlDocument doc = InternalDeserializer.LoadUnsafeXmlDocument(
docAsString,
true, /* preserve whitespace, comments, etc. */
maxCharactersInDocument);
deserializer._context.LogExtraMemoryUsage((docAsString.Length - doc.OuterXml.Length) * sizeof(char));
return doc;
}
catch (XmlException e)
{
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, e, typeof(XmlDocument).FullName);
}
}
internal static object DeserializeProgressRecord(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
//
// read deserialized elements of a progress record
//
deserializer.ReadStartElement(SerializationStrings.ProgressRecord);
string activity = null, currentOperation = null, prt = null, statusDescription = null;
int activityId = 0, parentActivityId = 0, percentComplete = 0, secondsRemaining = 0;
Exception recognizedException = null;
try
{
activity = deserializer.ReadDecodedElementString(SerializationStrings.ProgressRecordActivity);
activityId = int.Parse(deserializer.ReadDecodedElementString(SerializationStrings.ProgressRecordActivityId), CultureInfo.InvariantCulture);
object tmp = deserializer.ReadOneObject();
currentOperation = (tmp == null) ? null : tmp.ToString();
parentActivityId = int.Parse(deserializer.ReadDecodedElementString(SerializationStrings.ProgressRecordParentActivityId), CultureInfo.InvariantCulture);
percentComplete = int.Parse(deserializer.ReadDecodedElementString(SerializationStrings.ProgressRecordPercentComplete), CultureInfo.InvariantCulture);
prt = deserializer.ReadDecodedElementString(SerializationStrings.ProgressRecordType);
secondsRemaining = int.Parse(deserializer.ReadDecodedElementString(SerializationStrings.ProgressRecordSecondsRemaining), CultureInfo.InvariantCulture);
statusDescription = deserializer.ReadDecodedElementString(SerializationStrings.ProgressRecordStatusDescription);
}
catch (FormatException e)
{
recognizedException = e;
}
catch (OverflowException e)
{
recognizedException = e;
}
if (recognizedException != null)
{
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, recognizedException, typeof(UInt64).FullName);
}
deserializer.ReadEndElement();
//
// Build the progress record
//
ProgressRecordType type;
try
{
type = (ProgressRecordType)Enum.Parse(typeof(ProgressRecordType), prt, true);
}
catch (ArgumentException e)
{
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, e, typeof(ProgressRecord).FullName);
}
try
{
ProgressRecord record = new ProgressRecord(activityId, activity, statusDescription);
if (!string.IsNullOrEmpty(currentOperation))
{
record.CurrentOperation = currentOperation;
}
record.ParentActivityId = parentActivityId;
record.PercentComplete = percentComplete;
record.RecordType = type;
record.SecondsRemaining = secondsRemaining;
return record;
}
catch (ArgumentException e)
{
throw deserializer.NewXmlException(Serialization.InvalidPrimitiveType, e, typeof(ProgressRecord).FullName);
}
}
internal static object DeserializeSecureString(InternalDeserializer deserializer)
{
Dbg.Assert(deserializer != null, "Caller should validate the parameter");
//
// read deserialized elements of a Secure String
//
return deserializer.ReadSecureString();
}
#endregion known type deserialization
#region misc
/// <summary>
/// Check if LocalName of next element is "tag"
/// </summary>
/// <param name="tag"></param>
/// <returns></returns>
private bool IsNextElement(string tag)
{
Dbg.Assert(!string.IsNullOrEmpty(tag), "Caller should validate the parameter");
return (_reader.LocalName == tag) &&
((0 != (_context.options & DeserializationOptions.NoNamespace)) ||
(_reader.NamespaceURI == SerializationStrings.MonadNamespace));
}
/// <summary>
/// Read start element in monad namespace
/// </summary>
/// <param name="element">element tag to read</param>
/// <returns>true if not an empty element else false</returns>
internal bool ReadStartElementAndHandleEmpty(string element)
{
Dbg.Assert(!string.IsNullOrEmpty(element), "Caller should validate the parameter");
//IsEmpty is set to true when element is of the form <tag/>
bool isEmpty = _reader.IsEmptyElement;
this.ReadStartElement(element);
//This takes care of the case: <tag></tag> or <tag> </tag>. In
//this case isEmpty is false.
if (isEmpty == false && _reader.NodeType == XmlNodeType.EndElement)
{
ReadEndElement();
isEmpty = true;
}
return !isEmpty;
}
private void ReadStartElement(string element)
{
Dbg.Assert(!string.IsNullOrEmpty(element), "Caller should validate the parameter");
if (DeserializationOptions.NoNamespace == (_context.options & DeserializationOptions.NoNamespace))
{
_reader.ReadStartElement(element);
}
else
{
_reader.ReadStartElement(element, SerializationStrings.MonadNamespace);
}
_reader.MoveToContent();
}
private void ReadEndElement()
{
_reader.ReadEndElement();
_reader.MoveToContent();
}
private string ReadDecodedElementString(string element)
{
Dbg.Assert(!string.IsNullOrEmpty(element), "Caller should validate the parameter");
this.CheckIfStopping();
string temp = null;
if (DeserializationOptions.NoNamespace == (_context.options & DeserializationOptions.NoNamespace))
{
temp = _reader.ReadElementContentAsString(element, string.Empty);
}
else
{
temp = _reader.ReadElementContentAsString(element, SerializationStrings.MonadNamespace);
}
_reader.MoveToContent();
temp = DecodeString(temp);
return temp;
}
/// <summary>
/// Skips an element and all its child elements.
/// Moves cursor to next content Node.
/// </summary>
private void Skip()
{
_reader.Skip();
_reader.MoveToContent();
}
/// <summary>
/// Reads Primary known type
/// </summary>
/// <param name="pktInfo"></param>
/// <returns></returns>
private object ReadPrimaryKnownType(TypeSerializationInfo pktInfo)
{
Dbg.Assert(pktInfo != null, "Deserializer should be available");
Dbg.Assert(pktInfo.Deserializer != null, "Deserializer should be available");
object result = pktInfo.Deserializer(this);
_reader.MoveToContent();
return result;
}
private object ReadSecureString()
{
String encryptedString = _reader.ReadElementContentAsString();
try
{
object result;
if (_context.cryptoHelper != null)
{
result = _context.cryptoHelper.DecryptSecureString(encryptedString);
}
else
{
result = Microsoft.PowerShell.SecureStringHelper.Unprotect(encryptedString);
}
_reader.MoveToContent();
return result;
}
catch (PSCryptoException)
{
throw NewXmlException(Serialization.DeserializeSecureStringFailed, null);
}
}
/// <summary>
/// Helper function for building XmlException
/// </summary>
/// <param name="resourceString">
/// resource String
/// </param>
/// <param name="innerException"></param>
/// <param name="args">
/// params for format string obtained from resourceId
/// </param>
private XmlException NewXmlException
(
string resourceString,
Exception innerException,
params object[] args
)
{
Dbg.Assert(!string.IsNullOrEmpty(resourceString), "Caller should validate the parameter");
string message = StringUtil.Format(resourceString, args);
XmlException ex = null;
IXmlLineInfo xmlLineInfo = _reader as IXmlLineInfo;
if (xmlLineInfo != null)
{
if (xmlLineInfo.HasLineInfo())
{
ex = new XmlException
(
message,
innerException,
xmlLineInfo.LineNumber,
xmlLineInfo.LinePosition
);
}
}
return ex ?? new XmlException(message, innerException);
}
private string ReadNameAttribute()
{
string encodedName = _reader.GetAttribute(SerializationStrings.NameAttribute);
if (encodedName == null)
{
throw NewXmlException(Serialization.AttributeExpected, null, SerializationStrings.NameAttribute);
}
return DecodeString(encodedName);
}
private static string DecodeString(string s)
{
Dbg.Assert(s != null, "Caller should validate the parameter");
return XmlConvert.DecodeName(s);
}
#endregion misc
[TraceSourceAttribute("InternalDeserializer", "InternalDeserializer class")]
private static readonly PSTraceSource s_trace = PSTraceSource.GetTracer("InternalDeserializer", "InternalDeserializer class");
}
/// <summary>
/// Helper class for generating reference id.
/// </summary>
internal class ReferenceIdHandlerForSerializer<T> where T : class
{
/// <summary>
/// Get new reference id.
/// </summary>
/// <returns>New reference id</returns>
private UInt64 GetNewReferenceId()
{
UInt64 refId = _seed++;
return refId;
}
/// <summary>
/// Seed is incremented by one after each reference generation
/// </summary>
private UInt64 _seed;
// note:
// any boxed UInt64 takes 16 bytes on the heap
// one-character string (i.e. "7") takes 20 bytes on the heap
private readonly IDictionary<T, UInt64> _object2refId;
internal ReferenceIdHandlerForSerializer(IDictionary<T, UInt64> dictionary)
{
_object2refId = dictionary;
}
/// <summary>
/// Assigns a RefId to the given object
/// </summary>
/// <param name="t">object to assign a RefId to</param>
/// <returns>RefId assigned to the object</returns>
internal string SetRefId(T t)
{
if (_object2refId != null)
{
Dbg.Assert(!_object2refId.ContainsKey(t), "SetRefId shouldn't be called when the object is already assigned a ref id");
UInt64 refId = GetNewReferenceId();
_object2refId.Add(t, refId);
return refId.ToString(System.Globalization.CultureInfo.InvariantCulture);
}
else
{
return null;
}
}
/// <summary>
/// Gets a RefId already assigned for the given object or <c>null</c> if there is no associated ref id
/// </summary>
/// <param name="t"></param>
/// <returns></returns>
internal string GetRefId(T t)
{
UInt64 refId;
if ((_object2refId != null) && (_object2refId.TryGetValue(t, out refId)))
{
return refId.ToString(System.Globalization.CultureInfo.InvariantCulture);
}
else
{
return null;
}
}
}
internal class ReferenceIdHandlerForDeserializer<T> where T : class
{
private readonly Dictionary<string, T> _refId2object = new Dictionary<string, T>();
internal void SetRefId(T o, string refId, bool duplicateRefIdsAllowed)
{
#if DEBUG
if (!duplicateRefIdsAllowed)
{
Dbg.Assert(!_refId2object.ContainsKey(refId), "You can't change refId association");
}
#endif
_refId2object[refId] = o;
}
internal T GetReferencedObject(string refId)
{
Dbg.Assert(_refId2object.ContainsKey(refId), "Reference id wasn't seen earlier");
T t;
if (_refId2object.TryGetValue(refId, out t))
{
return t;
}
else
{
return null;
}
}
}
/// <summary>
/// A delegate for serializing known type
/// </summary>
internal delegate void TypeSerializerDelegate(InternalSerializer serializer, string streamName, string property, object source, TypeSerializationInfo entry);
/// <summary>
/// A delegate for deserializing known type
/// </summary>
internal delegate object TypeDeserializerDelegate(InternalDeserializer deserializer);
/// <summary>
/// This class contains serialization information about a type.
/// </summary>
internal class TypeSerializationInfo
{
/// <summary>
/// Constructor
/// </summary>
/// <param name="type">Type for which this entry is created</param>
/// <param name="itemTag">ItemTag for the type</param>
/// <param name="propertyTag">PropertyTag for the type</param>
/// <param name="serializer">TypeSerializerDelegate for serializing the type</param>
/// <param name="deserializer">TypeDeserializerDelegate for deserializing the type</param>
internal TypeSerializationInfo(Type type, string itemTag, string propertyTag, TypeSerializerDelegate serializer, TypeDeserializerDelegate deserializer)
{
Type = type;
Serializer = serializer;
Deserializer = deserializer;
ItemTag = itemTag;
PropertyTag = propertyTag;
}
#region properties
/// <summary>
/// Get the type for which this TypeSerializationInfo is created.
/// </summary>
internal Type Type { get; }
/// <summary>
/// Get the item tag for this type
/// </summary>
internal string ItemTag { get; }
/// <summary>
/// Get the Property tag for this type
/// </summary>
internal string PropertyTag { get; }
/// <summary>
/// Gets the delegate to serialize this type
/// </summary>
internal TypeSerializerDelegate Serializer { get; }
/// <summary>
/// Gets the delegate to deserialize this type
/// </summary>
internal TypeDeserializerDelegate Deserializer { get; }
#endregion properties
#region private
#endregion private
}
/// <summary>
/// A class for identifying types which are treated as KnownType by Monad.
/// A KnownType is guranteed to be available on machine on which monad is
/// running.
/// </summary>
internal static class KnownTypes
{
/// <summary>
/// Static constructor
/// </summary>
static KnownTypes()
{
for (int i = 0; i < s_typeSerializationInfo.Length; i++)
{
s_knownTableKeyType.Add(s_typeSerializationInfo[i].Type.FullName, s_typeSerializationInfo[i]);
s_knownTableKeyItemTag.Add(s_typeSerializationInfo[i].ItemTag, s_typeSerializationInfo[i]);
}
}
/// <summary>
/// Gets the type serialization information about a type
/// </summary>
/// <param name="type">Type for which information is retrieved</param>
/// <returns>TypeSerializationInfo for the type, null if it doesn't exist</returns>
internal static TypeSerializationInfo GetTypeSerializationInfo(Type type)
{
TypeSerializationInfo temp;
if (!s_knownTableKeyType.TryGetValue(type.FullName, out temp) && typeof(XmlDocument).IsAssignableFrom(type))
{
temp = s_xdInfo;
}
return temp;
}
/// <summary>
/// Get TypeSerializationInfo using ItemTag as key
/// </summary>
/// <param name="itemTag">ItemTag for which TypeSerializationInfo is to be fetched</param>
/// <returns>TypeSerializationInfo entry, null if no entry exist for the tag</returns>
internal static TypeSerializationInfo GetTypeSerializationInfoFromItemTag(string itemTag)
{
TypeSerializationInfo temp;
s_knownTableKeyItemTag.TryGetValue(itemTag, out temp);
return temp;
}
#region private_fields
//TypeSerializationInfo for XmlDocument
private static readonly TypeSerializationInfo s_xdInfo =
new TypeSerializationInfo(typeof(XmlDocument),
SerializationStrings.XmlDocumentTag,
SerializationStrings.XmlDocumentTag,
InternalSerializer.WriteXmlDocument,
InternalDeserializer.DeserializeXmlDocument);
/// <summary>
/// Array of known types.
/// </summary>
private static readonly TypeSerializationInfo[] s_typeSerializationInfo = new TypeSerializationInfo[]
{
new TypeSerializationInfo(typeof(Boolean),
SerializationStrings.BooleanTag,
SerializationStrings.BooleanTag,
InternalSerializer.WriteBoolean,
InternalDeserializer.DeserializeBoolean),
new TypeSerializationInfo(typeof(Byte),
SerializationStrings.UnsignedByteTag,
SerializationStrings.UnsignedByteTag,
null,
InternalDeserializer.DeserializeByte),
new TypeSerializationInfo(typeof(Char),
SerializationStrings.CharTag,
SerializationStrings.CharTag,
InternalSerializer.WriteChar,
InternalDeserializer.DeserializeChar),
new TypeSerializationInfo(typeof(DateTime),
SerializationStrings.DateTimeTag,
SerializationStrings.DateTimeTag,
InternalSerializer.WriteDateTime,
InternalDeserializer.DeserializeDateTime),
new TypeSerializationInfo(typeof(Decimal),
SerializationStrings.DecimalTag,
SerializationStrings.DecimalTag,
null,
InternalDeserializer.DeserializeDecimal),
new TypeSerializationInfo(typeof(Double),
SerializationStrings.DoubleTag,
SerializationStrings.DoubleTag,
InternalSerializer.WriteDouble,
InternalDeserializer.DeserializeDouble),
new TypeSerializationInfo(typeof(Guid),
SerializationStrings.GuidTag,
SerializationStrings.GuidTag,
null,
InternalDeserializer.DeserializeGuid),
new TypeSerializationInfo(typeof(Int16),
SerializationStrings.ShortTag,
SerializationStrings.ShortTag,
null,
InternalDeserializer.DeserializeInt16),
new TypeSerializationInfo(typeof(Int32),
SerializationStrings.IntTag,
SerializationStrings.IntTag,
null,
InternalDeserializer.DeserializeInt32),
new TypeSerializationInfo(typeof(Int64),
SerializationStrings.LongTag,
SerializationStrings.LongTag,
null,
InternalDeserializer.DeserializeInt64),
new TypeSerializationInfo(typeof(SByte),
SerializationStrings.ByteTag,
SerializationStrings.ByteTag,
null,
InternalDeserializer.DeserializeSByte),
new TypeSerializationInfo(typeof(Single),
SerializationStrings.FloatTag,
SerializationStrings.FloatTag,
InternalSerializer.WriteSingle,
InternalDeserializer.DeserializeSingle),
new TypeSerializationInfo(typeof(ScriptBlock),
SerializationStrings.ScriptBlockTag,
SerializationStrings.ScriptBlockTag,
InternalSerializer.WriteScriptBlock,
InternalDeserializer.DeserializeScriptBlock),
new TypeSerializationInfo(typeof(String),
SerializationStrings.StringTag,
SerializationStrings.StringTag,
InternalSerializer.WriteEncodedString,
InternalDeserializer.DeserializeString),
new TypeSerializationInfo(typeof(TimeSpan),
SerializationStrings.DurationTag,
SerializationStrings.DurationTag,
InternalSerializer.WriteTimeSpan,
InternalDeserializer.DeserializeTimeSpan),
new TypeSerializationInfo(typeof(UInt16),
SerializationStrings.UnsignedShortTag,
SerializationStrings.UnsignedShortTag,
null,
InternalDeserializer.DeserializeUInt16),
new TypeSerializationInfo(typeof(UInt32),
SerializationStrings.UnsignedIntTag,
SerializationStrings.UnsignedIntTag,
null,
InternalDeserializer.DeserializeUInt32),
new TypeSerializationInfo(typeof(UInt64),
SerializationStrings.UnsignedLongTag,
SerializationStrings.UnsignedLongTag,
null,
InternalDeserializer.DeserializeUInt64),
new TypeSerializationInfo(typeof(Uri),
SerializationStrings.AnyUriTag,
SerializationStrings.AnyUriTag,
InternalSerializer.WriteUri,
InternalDeserializer.DeserializeUri),
new TypeSerializationInfo(typeof(byte[]),
SerializationStrings.Base64BinaryTag,
SerializationStrings.Base64BinaryTag,
InternalSerializer.WriteByteArray,
InternalDeserializer.DeserializeByteArray),
new TypeSerializationInfo(typeof(System.Version),
SerializationStrings.VersionTag,
SerializationStrings.VersionTag,
InternalSerializer.WriteVersion,
InternalDeserializer.DeserializeVersion),
new TypeSerializationInfo(typeof(SemanticVersion),
SerializationStrings.SemanticVersionTag,
SerializationStrings.SemanticVersionTag,
InternalSerializer.WriteSemanticVersion,
InternalDeserializer.DeserializeSemanticVersion),
s_xdInfo,
new TypeSerializationInfo(typeof(ProgressRecord),
SerializationStrings.ProgressRecord,
SerializationStrings.ProgressRecord,
InternalSerializer.WriteProgressRecord,
InternalDeserializer.DeserializeProgressRecord),
new TypeSerializationInfo(typeof(SecureString),
SerializationStrings.SecureStringTag,
SerializationStrings.SecureStringTag,
InternalSerializer.WriteSecureString,
InternalDeserializer.DeserializeSecureString),
};
/// <summary>
/// Hashtable of knowntypes.
/// Key is Type.FullName and value is Type object.
/// </summary>
private static readonly Dictionary<string, TypeSerializationInfo> s_knownTableKeyType = new Dictionary<string, TypeSerializationInfo>();
/// <summary>
/// Hashtable of knowntypes. Key is ItemTag
/// </summary>
private static readonly Dictionary<string, TypeSerializationInfo> s_knownTableKeyItemTag = new Dictionary<string, TypeSerializationInfo>();
#endregion private_fields
}
/// <summary>
/// This class contains helper routined for serialization/deserialization
/// </summary>
internal static class SerializationUtilities
{
/// <summary>
/// Extracts the value of a note property from a PSObject; returns null if the property does not exist
/// </summary>
internal static object GetPropertyValue(PSObject psObject, string propertyName)
{
PSNoteProperty property = (PSNoteProperty)psObject.Properties[propertyName];
if (property == null)
{
return null;
}
return property.Value;
}
/// <summary>
/// Returns the BaseObject of a note property encoded as a PSObject; returns null if the property does not exist
/// </summary>
internal static object GetPsObjectPropertyBaseObject(PSObject psObject, string propertyName)
{
PSObject propertyPsObject = (PSObject)GetPropertyValue(psObject, propertyName);
if (propertyPsObject == null)
{
return null;
}
return propertyPsObject.BaseObject;
}
/// <summary>
/// Checks if source is known container type and returns appropriate
/// information
/// </summary>
/// <param name="source"></param>
/// <param name="ct"></param>
/// <param name="dictionary"></param>
/// <param name="enumerable"></param>
internal static void GetKnownContainerTypeInfo(
object source,
out ContainerType ct,
out IDictionary dictionary,
out IEnumerable enumerable)
{
Dbg.Assert(source != null, "caller should validate the parameter");
ct = ContainerType.None;
dictionary = null;
enumerable = null;
dictionary = source as IDictionary;
if (dictionary != null)
{
ct = ContainerType.Dictionary;
}
else if (source is Stack)
{
ct = ContainerType.Stack;
enumerable = LanguagePrimitives.GetEnumerable(source);
Dbg.Assert(enumerable != null, "Stack is enumerable");
}
else if (source is Queue)
{
ct = ContainerType.Queue;
enumerable = LanguagePrimitives.GetEnumerable(source);
Dbg.Assert(enumerable != null, "Queue is enumerable");
}
else if (source is IList)
{
ct = ContainerType.List;
enumerable = LanguagePrimitives.GetEnumerable(source);
Dbg.Assert(enumerable != null, "IList is enumerable");
}
else
{
Type gt = source.GetType();
if (gt.GetTypeInfo().IsGenericType)
{
if (DerivesFromGenericType(gt, typeof(Stack<>)))
{
ct = ContainerType.Stack;
enumerable = LanguagePrimitives.GetEnumerable(source);
Dbg.Assert(enumerable != null, "Stack is enumerable");
}
else if (DerivesFromGenericType(gt, typeof(Queue<>)))
{
ct = ContainerType.Queue;
enumerable = LanguagePrimitives.GetEnumerable(source);
Dbg.Assert(enumerable != null, "Queue is enumerable");
}
else if (DerivesFromGenericType(gt, typeof(List<>)))
{
ct = ContainerType.List;
enumerable = LanguagePrimitives.GetEnumerable(source);
Dbg.Assert(enumerable != null, "Queue is enumerable");
}
}
}
// Check if LanguagePrimitive.GetEnumerable can do some magic to get IEnumerable instance
if (ct == ContainerType.None)
{
try
{
enumerable = LanguagePrimitives.GetEnumerable(source);
if (enumerable != null)
{
ct = ContainerType.Enumerable;
}
}
catch (Exception exception)
{
// Catch-all OK. This is a third-party call-out.
CommandProcessorBase.CheckForSevereException(exception);
PSEtwLog.LogAnalyticWarning(PSEventId.Serializer_EnumerationFailed, PSOpcode.Exception,
PSTask.Serialization, PSKeyword.Serializer, source.GetType().AssemblyQualifiedName,
exception.ToString());
}
}
//Check if type is IEnumerable
//(LanguagePrimitives.GetEnumerable above should be enough - the check below is to preserve
// backcompatibility in some corner-cases (see bugs in Windows7 - #372562 and #372563))
if (ct == ContainerType.None)
{
enumerable = source as IEnumerable;
if (enumerable != null)
{
//WinBlue: 206515 - There are no elements in the source. The source is of type XmlLinkedNode (which derives from XmlNode which implements IEnumerable).
// So, adding an additional check to see if this contains any elements
IEnumerator enumerator = enumerable.GetEnumerator();
if (enumerator != null && enumerator.MoveNext())
{
ct = ContainerType.Enumerable;
}
}
}
}
/// <summary>
/// Checks if derived is of type baseType or a type derived from baseType
/// </summary>
/// <param name="derived"></param>
/// <param name="baseType"></param>
/// <returns></returns>
private static bool DerivesFromGenericType(Type derived, Type baseType)
{
Dbg.Assert(derived != null, "caller should validate the parameter");
Dbg.Assert(baseType != null, "caller should validate the parameter");
while (derived != null)
{
if (derived.GetTypeInfo().IsGenericType)
derived = derived.GetGenericTypeDefinition();
if (derived == baseType)
{
return true;
}
derived = derived.GetTypeInfo().BaseType;
}
return false;
}
/// <summary>
/// Gets the "ToString" from PSObject.
/// </summary>
///
/// <param name="source">
/// PSObject to be converted to string
/// </param>
///
/// <returns>
/// "ToString" value
/// </returns>
internal static string GetToString(object source)
{
Dbg.Assert(source != null, "caller should have validated the information");
// fall back value
string result = null;
try
{
result = Convert.ToString(source, CultureInfo.InvariantCulture);
}
catch (Exception e)
{
// Catch-all OK. This is a third-party call-out.
CommandProcessorBase.CheckForSevereException(e);
PSEtwLog.LogAnalyticWarning(
PSEventId.Serializer_ToStringFailed, PSOpcode.Exception, PSTask.Serialization,
PSKeyword.Serializer | PSKeyword.UseAlwaysAnalytic,
source.GetType().AssemblyQualifiedName,
e.ToString());
}
return result;
}
internal static string GetToStringForPrimitiveObject(PSObject pso)
{
// if object is not wrapped in a PSObject, then nothing modifies the ToString value of the primitive object
if (pso == null)
{
return null;
}
// preserve ToString throughout deserialization/*re*serialization
if (pso.ToStringFromDeserialization != null)
{
return pso.ToStringFromDeserialization;
}
// preserve token text (i.e. double: 0E1517567410; see Windows 7 bug #694057 for more details)
string token = pso.TokenText;
if (token != null)
{
string originalToString = GetToString(pso.BaseObject);
if (originalToString == null || !string.Equals(token, originalToString, StringComparison.Ordinal))
{
return token;
}
}
// no need to write <ToString> element otherwise - the ToString method of a deserialized, live primitive object will return the right value
return null;
}
internal static PSMemberInfoInternalCollection<PSPropertyInfo> GetSpecificPropertiesToSerialize(PSObject source, Collection<CollectionEntry<PSPropertyInfo>> allPropertiesCollection, TypeTable typeTable)
{
if (source == null)
{
return null;
}
if (source.GetSerializationMethod(typeTable) == SerializationMethod.SpecificProperties)
{
PSEtwLog.LogAnalyticVerbose(
PSEventId.Serializer_ModeOverride, PSOpcode.SerializationSettings, PSTask.Serialization,
PSKeyword.Serializer | PSKeyword.UseAlwaysAnalytic,
source.InternalTypeNames.Key,
(UInt32)(SerializationMethod.SpecificProperties));
PSMemberInfoInternalCollection<PSPropertyInfo> specificProperties =
new PSMemberInfoInternalCollection<PSPropertyInfo>();
PSMemberInfoIntegratingCollection<PSPropertyInfo> allProperties =
new PSMemberInfoIntegratingCollection<PSPropertyInfo>(
source,
allPropertiesCollection);
Collection<string> namesOfPropertiesToSerialize = source.GetSpecificPropertiesToSerialize(typeTable);
foreach (string propertyName in namesOfPropertiesToSerialize)
{
PSPropertyInfo property = allProperties[propertyName];
if (property == null)
{
PSEtwLog.LogAnalyticWarning(
PSEventId.Serializer_SpecificPropertyMissing, PSOpcode.Exception, PSTask.Serialization,
PSKeyword.Serializer | PSKeyword.UseAlwaysAnalytic,
source.InternalTypeNames.Key,
propertyName);
}
else
{
specificProperties.Add(property);
}
}
return specificProperties;
}
return null;
}
internal static object GetPropertyValueInThreadSafeManner(PSPropertyInfo property, bool canUseDefaultRunspaceInThreadSafeManner, out bool success)
{
Dbg.Assert(property != null, "Caller should validate the parameter");
if (!property.IsGettable)
{
success = false;
return null;
}
PSAliasProperty alias = property as PSAliasProperty;
if (alias != null)
{
property = alias.ReferencedMember as PSPropertyInfo;
}
PSScriptProperty script = property as PSScriptProperty;
Dbg.Assert(script == null || script.GetterScript != null, "scriptProperty.IsGettable => (scriptProperty.GetterScript != null)");
if ((script != null) && (!canUseDefaultRunspaceInThreadSafeManner))
{
PSEtwLog.LogAnalyticWarning(
PSEventId.Serializer_ScriptPropertyWithoutRunspace, PSOpcode.Exception, PSTask.Serialization,
PSKeyword.Serializer | PSKeyword.UseAlwaysAnalytic,
property.Name,
property.instance == null ? string.Empty : PSObject.GetTypeNames(property.instance).Key,
script.GetterScript.ToString());
success = false;
return null;
}
try
{
object value = property.Value;
success = true;
return value;
}
catch (ExtendedTypeSystemException e)
{
PSEtwLog.LogAnalyticWarning(
PSEventId.Serializer_PropertyGetterFailed, PSOpcode.Exception, PSTask.Serialization,
PSKeyword.Serializer | PSKeyword.UseAlwaysAnalytic,
property.Name,
property.instance == null ? string.Empty : PSObject.GetTypeNames(property.instance).Key,
e.ToString(),
e.InnerException == null ? string.Empty : e.InnerException.ToString());
success = false;
return null;
}
}
}
/// <summary>
/// A dictionary from object to T where
/// 1) keys are objects,
/// 2) keys use reference equality,
/// 3) dictionary keeps only weak references to keys
/// </summary>
/// <typeparam name="T">type of dictionary values</typeparam>
internal class WeakReferenceDictionary<T> : IDictionary<object, T>
{
private class WeakReferenceEqualityComparer : IEqualityComparer<WeakReference>
{
public bool Equals(WeakReference x, WeakReference y)
{
object tx = x.Target;
if (tx == null)
{
return false; // collected object is not equal to anything (object.ReferenceEquals(null, null) == true)
}
object ty = y.Target;
if (ty == null)
{
return false; // collected object is not equal to anything (object.ReferenceEquals(null, null) == true)
}
return object.ReferenceEquals(tx, ty);
}
public int GetHashCode(WeakReference obj)
{
object t = obj.Target;
if (t == null)
{
// collected object doesn't have a hash code
// return an arbitrary hashcode here and fall back on Equal method for comparison
return RuntimeHelpers.GetHashCode(obj); // RuntimeHelpers.GetHashCode(null) returns 0 - this would cause many hashtable collisions for WeakReferences to dead objects
}
else
{
return RuntimeHelpers.GetHashCode(t);
}
}
}
private readonly IEqualityComparer<WeakReference> _weakEqualityComparer;
private Dictionary<WeakReference, T> _dictionary;
public WeakReferenceDictionary()
{
_weakEqualityComparer = new WeakReferenceEqualityComparer();
_dictionary = new Dictionary<WeakReference, T>(_weakEqualityComparer);
}
#if DEBUG
private const int initialCleanupTriggerSize = 2; // 2 will stress this code more
#else
private const int initialCleanupTriggerSize = 1000;
#endif
private int _cleanupTriggerSize = initialCleanupTriggerSize;
private void CleanUp()
{
if (this.Count > _cleanupTriggerSize)
{
Dictionary<WeakReference, T> alive = new Dictionary<WeakReference, T>(_weakEqualityComparer);
foreach (KeyValuePair<WeakReference, T> weakKeyValuePair in _dictionary)
{
object key = weakKeyValuePair.Key.Target;
if (key != null)
{
alive.Add(weakKeyValuePair.Key, weakKeyValuePair.Value);
}
}
_dictionary = alive;
_cleanupTriggerSize = initialCleanupTriggerSize + this.Count * 2;
}
}
#region IDictionary<object,T> Members
public void Add(object key, T value)
{
_dictionary.Add(new WeakReference(key), value);
this.CleanUp();
}
public bool ContainsKey(object key)
{
return _dictionary.ContainsKey(new WeakReference(key));
}
public ICollection<object> Keys
{
get
{
List<object> keys = new List<object>(_dictionary.Keys.Count);
foreach (WeakReference weakKey in _dictionary.Keys)
{
object key = weakKey.Target;
if (key != null)
{
keys.Add(key);
}
}
return keys;
}
}
public bool Remove(object key)
{
return _dictionary.Remove(new WeakReference(key));
}
public bool TryGetValue(object key, out T value)
{
WeakReference weakKey = new WeakReference(key);
return _dictionary.TryGetValue(weakKey, out value);
}
public ICollection<T> Values
{
get
{
return _dictionary.Values;
}
}
public T this[object key]
{
get
{
return _dictionary[new WeakReference(key)];
}
set
{
_dictionary[new WeakReference(key)] = value;
this.CleanUp();
}
}
#endregion
#region ICollection<KeyValuePair<object,T>> Members
private ICollection<KeyValuePair<WeakReference, T>> WeakCollection
{
get
{
return _dictionary;
}
}
private static KeyValuePair<WeakReference, T> WeakKeyValuePair(KeyValuePair<object, T> publicKeyValuePair)
{
return new KeyValuePair<WeakReference, T>(new WeakReference(publicKeyValuePair.Key), publicKeyValuePair.Value);
}
public void Add(KeyValuePair<object, T> item)
{
this.WeakCollection.Add(WeakKeyValuePair(item));
this.CleanUp();
}
public void Clear()
{
this.WeakCollection.Clear();
}
public bool Contains(KeyValuePair<object, T> item)
{
return this.WeakCollection.Contains(WeakKeyValuePair(item));
}
public void CopyTo(KeyValuePair<object, T>[] array, int arrayIndex)
{
List<KeyValuePair<object, T>> rawList = new List<KeyValuePair<object, T>>(this.WeakCollection.Count);
foreach (KeyValuePair<object, T> keyValuePair in this)
{
rawList.Add(keyValuePair);
}
rawList.CopyTo(array, arrayIndex);
}
public int Count
{
get
{
return this.WeakCollection.Count;
}
}
public bool IsReadOnly
{
get
{
return this.WeakCollection.IsReadOnly;
}
}
public bool Remove(KeyValuePair<object, T> item)
{
return this.WeakCollection.Remove(WeakKeyValuePair(item));
}
#endregion
#region IEnumerable<KeyValuePair<object,T>> Members
public IEnumerator<KeyValuePair<object, T>> GetEnumerator()
{
foreach (KeyValuePair<WeakReference, T> weakKeyValuePair in this.WeakCollection)
{
object key = weakKeyValuePair.Key.Target;
if (key != null)
{
yield return new KeyValuePair<object, T>(key, weakKeyValuePair.Value);
}
}
}
#endregion
#region IEnumerable Members
IEnumerator IEnumerable.GetEnumerator()
{
IEnumerable<KeyValuePair<object, T>> enumerable = this;
IEnumerator<KeyValuePair<object, T>> enumerator = enumerable.GetEnumerator();
return enumerator;
}
#endregion
}
/// <summary>
/// <see cref="PSPrimitiveDictionary"/> is a <see cref="Hashtable"/> that is limited to
/// 1) case-insensitive strings as keys and
/// 2) values that can be serialized and deserialized during PowerShell remoting handshake
/// (in major-version compatible versions of PowerShell remoting)
/// </summary>
[Serializable]
public sealed class PSPrimitiveDictionary : Hashtable
{
#region Constructors
/// <summary>
/// Initializes a new empty instance of the <see cref="PSPrimitiveDictionary"/> class
/// </summary>
public PSPrimitiveDictionary()
: base(StringComparer.OrdinalIgnoreCase)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="PSPrimitiveDictionary"/> class with contents
/// copied from the <paramref name="other"/> hashtable.
/// </summary>
/// <param name="other">hashtable to copy into the new instance of <see cref="PSPrimitiveDictionary"/></param>
/// <exception cref="ArgumentException">
/// This constructor will throw if the <paramref name="other"/> hashtable contains keys that are not a strings
/// or values that are not one of primitive types that will work during PowerShell remoting handshake.
/// </exception>
[SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors", Justification = "The class is sealed")]
public PSPrimitiveDictionary(Hashtable other)
: base(StringComparer.OrdinalIgnoreCase)
{
if (other == null)
{
throw new ArgumentNullException("other");
}
foreach (DictionaryEntry entry in other)
{
Hashtable valueAsHashtable = PSObject.Base(entry.Value) as Hashtable;
if (valueAsHashtable != null)
{
this.Add(entry.Key, new PSPrimitiveDictionary(valueAsHashtable));
}
else
{
this.Add(entry.Key, entry.Value);
}
}
}
#if !CORECLR // No .NET Serialization In CoreCLR
/// <summary>
/// Support for .NET serialization
/// </summary>
private PSPrimitiveDictionary(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context)
: base(info, context)
{
}
#endif
#endregion
#region Plumbing to make Hashtable reject all non-primitive types
private string VerifyKey(object key)
{
key = PSObject.Base(key);
string keyAsString = key as string;
if (keyAsString == null)
{
string message = StringUtil.Format(Serialization.PrimitiveHashtableInvalidKey,
key.GetType().FullName);
throw new ArgumentException(message);
}
else
{
return keyAsString;
}
}
private static readonly Type[] s_handshakeFriendlyTypes = new Type[] {
typeof(Boolean),
typeof(Byte),
typeof(Char),
typeof(DateTime),
typeof(Decimal),
typeof(Double),
typeof(Guid),
typeof(Int32),
typeof(Int64),
typeof(SByte),
typeof(Single),
// typeof(ScriptBlock) - don't want ScriptBlocks, because they are deserialized into strings
typeof(String),
typeof(TimeSpan),
typeof(UInt16),
typeof(UInt32),
typeof(UInt64),
typeof(Uri),
typeof(byte[]),
typeof(Version),
typeof(ProgressRecord),
typeof(XmlDocument),
typeof(PSPrimitiveDictionary)
};
private void VerifyValue(object value)
{
// null is a primitive type
if (value == null)
{
return;
}
value = PSObject.Base(value);
// this list is based on the list inside KnownTypes
// it is copied here to make sure that a list of "handshake friendly types"
// won't change even if we add new primitive types in v.next
Type valueType = value.GetType();
// if "value" is a "primitiveType" then we are good
foreach (Type goodType in s_handshakeFriendlyTypes)
{
if (valueType == goodType)
{
return;
}
}
// if "value" is an array of "primitiveType" items then we are good
// (note: we could have used IEnumerable<> or ICollection<> (covariance/contravariance concerns),
// but it is safer to limit the types to arrays.
// (one concern is that in v.next we might allow overriding SerializationMethod for
// types [i.e. check SerializationMethod *before* HandleKnownContainerTypes)
if ((valueType.IsArray) || valueType == typeof(ArrayList))
{
foreach (object o in (IEnumerable)value)
{
VerifyValue(o);
}
return;
}
string message = StringUtil.Format(Serialization.PrimitiveHashtableInvalidValue,
value.GetType().FullName);
throw new ArgumentException(message);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
/// <exception cref="ArgumentException">
/// This method will throw if the <paramref name="key"/> is not a string and the <paramref name="value"/>
/// is not one of primitive types that will work during PowerShell remoting handshake.
/// Use of strongly-typed overloads of this method is suggested if throwing an exception is not acceptable.
/// </exception>
public override void Add(object key, object value)
{
string keyAsString = this.VerifyKey(key);
this.VerifyValue(value);
base.Add(keyAsString, value);
}
/// <summary>
/// Gets or sets the value associated with the specified key.
/// </summary>
/// <param name="key">The key whose value to get or set</param>
/// <returns>The value associated with the specified key.</returns>
/// <remarks>
/// If the specified key is not found, attempting to get it returns <c>null</c>
/// and attempting to set it creates a new element using the specified key.
/// </remarks>
/// <exception cref="ArgumentException">
/// The setter will throw if the <paramref name="key"/> is not a string and the value
/// is not one of primitive types that will work during PowerShell remoting handshake.
/// Use of strongly-typed overloads of Add method is suggested if throwing an exception is not acceptable.
/// </exception>
public override object this[object key]
{
get
{
return base[key];
}
set
{
string keyAsString = this.VerifyKey(key);
this.VerifyValue(value);
base[keyAsString] = value;
}
}
/// <summary>
/// Gets or sets the value associated with the specified key.
/// </summary>
/// <param name="key">The key whose value to get or set</param>
/// <returns>The value associated with the specified key.</returns>
/// <remarks>
/// If the specified key is not found, attempting to get it returns <c>null</c>
/// and attempting to set it creates a new element using the specified key.
/// </remarks>
/// <exception cref="ArgumentException">
/// The setter will throw if the value
/// is not one of primitive types that will work during PowerShell remoting handshake.
/// Use of strongly-typed overloads of Add method is suggested if throwing an exception is not acceptable.
/// </exception>
public object this[string key]
{
get
{
return base[key];
}
set
{
this.VerifyValue(value);
base[key] = value;
}
}
#endregion
#region Helper methods
/// <summary>
/// Creates a new instance by doing a shallow copy of the current instance.
/// </summary>
/// <returns></returns>
public override object Clone()
{
return new PSPrimitiveDictionary(this);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, Boolean value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, Boolean[] value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, Byte value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, byte[] value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, Char value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, Char[] value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, DateTime value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, DateTime[] value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, Decimal value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, Decimal[] value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, Double value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, Double[] value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, Guid value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, Guid[] value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, Int32 value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, Int32[] value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, Int64 value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, Int64[] value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, SByte value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, SByte[] value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, Single value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, Single[] value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, String value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, String[] value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, TimeSpan value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, TimeSpan[] value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, UInt16 value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, UInt16[] value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, UInt32 value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, UInt32[] value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, UInt64 value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, UInt64[] value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, Uri value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, Uri[] value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, Version value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, Version[] value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, PSPrimitiveDictionary value)
{
this.Add((object)key, (object)value);
}
/// <summary>
/// Adds an element with the specified key and value into the Hashtable
/// </summary>
/// <param name="key">The key of the element to add</param>
/// <param name="value">The value of the element to add</param>
public void Add(string key, PSPrimitiveDictionary[] value)
{
this.Add((object)key, (object)value);
}
#endregion
#region Internal Methods
/// <summary>
/// If originalHash contains PSVersionTable, then just returns the Cloned copy of
/// the original hash. Othewise, creates a clone copy and add PSVersionInfo.GetPSVersionTable
/// to the clone and returns.
/// </summary>
/// <param name="originalHash"></param>
/// <returns></returns>
internal static PSPrimitiveDictionary CloneAndAddPSVersionTable(PSPrimitiveDictionary originalHash)
{
if ((null != originalHash) &&
(originalHash.ContainsKey(PSVersionInfo.PSVersionTableName)))
{
return (PSPrimitiveDictionary)originalHash.Clone();
}
PSPrimitiveDictionary result = originalHash;
if (null != originalHash)
{
result = (PSPrimitiveDictionary)originalHash.Clone();
}
else
{
result = new PSPrimitiveDictionary();
}
PSPrimitiveDictionary versionTable = new PSPrimitiveDictionary(PSVersionInfo.GetPSVersionTableForDownLevel())
{
{"PSSemanticVersion", PSVersionInfo.PSVersion.ToString()}
};
result.Add(PSVersionInfo.PSVersionTableName, versionTable);
return result;
}
/// <summary>
/// Tries to get a value that might be present in a chain of nested PSPrimitiveDictionaries.
/// For example to get $sessionInfo.ApplicationPrivateData.ImplicitRemoting.Hash you could call
/// TryPathGet<string>($sessionInfo.ApplicationPrivateData, out myHash, "ImplicitRemoting", "Hash").
/// </summary>
/// <typeparam name="T">Expected type of the value</typeparam>
/// <param name="data">The root dictionary</param>
/// <param name="result"></param>
/// <param name="keys">A chain of keys leading from the root dictionary (<paramref name="data"/>) to the value</param>
/// <returns><c>true</c> if the value was found and was of the correct type; <c>false</c> otherwise</returns>
internal static bool TryPathGet<T>(IDictionary data, out T result, params string[] keys)
{
Dbg.Assert(keys != null, "Caller should verify that keys != null");
Dbg.Assert(keys.Length >= 1, "Caller should verify that keys.Length >= 1");
Dbg.Assert(keys[0] != null, "Caller should verify that keys[i] != null");
if (data == null || !data.Contains(keys[0]))
{
result = default(T);
return false;
}
if (keys.Length == 1)
{
return LanguagePrimitives.TryConvertTo<T>(data[keys[0]], out result);
}
else
{
IDictionary subData;
if (LanguagePrimitives.TryConvertTo<IDictionary>(data[keys[0]], out subData)
&& subData != null)
{
string[] subKeys = new string[keys.Length - 1];
Array.Copy(keys, 1, subKeys, 0, subKeys.Length);
return TryPathGet<T>(subData, out result, subKeys);
}
else
{
result = default(T);
return false;
}
}
}
#endregion
}
}
namespace Microsoft.PowerShell
{
using System.Management.Automation;
using System.Security.Principal;
/// <summary>
/// Rehydrating type converter used during deserialization.
/// It takes results of serializing some common types
/// and rehydrates them back from property bags into live objects.
/// </summary>
/// <!--
/// To add a new type for rehydration:
/// - Add a new T RehydrateT(PSObject pso) method below
/// - Add this method to converters dictionary in the static constructor below
/// - If implicit rehydration is required then
/// - Add appropriate types.ps1 xml entries for
/// - SerializationDepth=X
/// - For types depending only on ToString for rehydration set
/// - SerializationMethod=SpecificProperties
/// - PropertySerializationSet=<empty>
/// - TargetTypeForDeserialization=DeserializingTypeConverter
/// - Add a field of that type in unit tests / S.M.A.Test.SerializationTest+RehydratedType
/// (testsrc\admintest\monad\DRT\engine\UnitTests\SerializationTest.cs)
/// -->
public sealed class DeserializingTypeConverter : PSTypeConverter
{
#region Infrastructure
private static readonly Dictionary<Type, Func<PSObject, object>> s_converter;
static DeserializingTypeConverter()
{
s_converter = new Dictionary<Type, Func<PSObject, object>>();
s_converter.Add(typeof(PSPrimitiveDictionary), RehydratePrimitiveHashtable);
s_converter.Add(typeof(SwitchParameter), RehydrateSwitchParameter);
s_converter.Add(typeof(PSListModifier), RehydratePSListModifier);
s_converter.Add(typeof(PSCredential), RehydratePSCredential);
s_converter.Add(typeof(PSSenderInfo), RehydratePSSenderInfo);
s_converter.Add(typeof(CultureInfo), RehydrateCultureInfo);
s_converter.Add(typeof(ParameterSetMetadata), RehydrateParameterSetMetadata);
s_converter.Add(typeof(System.Security.Cryptography.X509Certificates.X509Certificate2), RehydrateX509Certificate2);
s_converter.Add(typeof(System.Security.Cryptography.X509Certificates.X500DistinguishedName), RehydrateX500DistinguishedName);
s_converter.Add(typeof(System.Net.IPAddress), RehydrateIPAddress);
s_converter.Add(typeof(MailAddress), RehydrateMailAddress);
s_converter.Add(typeof(System.Security.AccessControl.DirectorySecurity), RehydrateObjectSecurity<System.Security.AccessControl.DirectorySecurity>);
s_converter.Add(typeof(System.Security.AccessControl.FileSecurity), RehydrateObjectSecurity<System.Security.AccessControl.FileSecurity>);
s_converter.Add(typeof(System.Security.AccessControl.RegistrySecurity), RehydrateObjectSecurity<System.Security.AccessControl.RegistrySecurity>);
s_converter.Add(typeof(ExtendedTypeDefinition), RehydrateExtendedTypeDefinition);
s_converter.Add(typeof(FormatViewDefinition), RehydrateFormatViewDefinition);
s_converter.Add(typeof(PSControl), RehydratePSControl);
s_converter.Add(typeof(PSControlGroupBy), RehydrateGroupBy);
s_converter.Add(typeof(DisplayEntry), RehydrateDisplayEntry);
s_converter.Add(typeof(EntrySelectedBy), RehydrateEntrySelectedBy);
s_converter.Add(typeof(TableControlColumnHeader), RehydrateTableControlColumnHeader);
s_converter.Add(typeof(TableControlRow), RehydrateTableControlRow);
s_converter.Add(typeof(TableControlColumn), RehydrateTableControlColumn);
s_converter.Add(typeof(ListControlEntry), RehydrateListControlEntry);
s_converter.Add(typeof(ListControlEntryItem), RehydrateListControlEntryItem);
s_converter.Add(typeof(WideControlEntryItem), RehydrateWideControlEntryItem);
s_converter.Add(typeof(CustomControlEntry), RehydrateCustomControlEntry);
s_converter.Add(typeof(CustomItemBase), RehydrateCustomItemBase);
s_converter.Add(typeof(CompletionResult), RehydrateCompletionResult);
s_converter.Add(typeof(ModuleSpecification), RehydrateModuleSpecification);
s_converter.Add(typeof(CommandCompletion), RehydrateCommandCompletion);
s_converter.Add(typeof(JobStateInfo), RehydrateJobStateInfo);
s_converter.Add(typeof(JobStateEventArgs), RehydrateJobStateEventArgs);
s_converter.Add(typeof(PSSessionOption), RehydratePSSessionOption);
s_converter.Add(typeof(LineBreakpoint), RehydrateLineBreakpoint);
s_converter.Add(typeof(CommandBreakpoint), RehydrateCommandBreakpoint);
s_converter.Add(typeof(VariableBreakpoint), RehydrateVariableBreakpoint);
s_converter.Add(typeof(BreakpointUpdatedEventArgs), RehydrateBreakpointUpdatedEventArgs);
s_converter.Add(typeof(DebuggerCommand), RehydrateDebuggerCommand);
s_converter.Add(typeof(DebuggerCommandResults), RehydrateDebuggerCommandResults);
s_converter.Add(typeof(DebuggerStopEventArgs), RehydrateDebuggerStopEventArgs);
}
/// <summary>
/// Determines if the converter can convert the <paramref name="sourceValue"/> parameter to the <paramref name="destinationType"/> parameter.
/// </summary>
/// <param name="sourceValue">The value to convert from</param>
/// <param name="destinationType">The type to convert to</param>
/// <returns>True if the converter can convert the <paramref name="sourceValue"/> parameter to the <paramref name="destinationType"/> parameter, otherwise false.</returns>
public override bool CanConvertFrom(PSObject sourceValue, Type destinationType)
{
foreach (Type type in s_converter.Keys)
{
if (Deserializer.IsDeserializedInstanceOfType(sourceValue, type))
{
return true;
}
}
return false;
}
/// <summary>
/// Converts the <paramref name="sourceValue"/> parameter to the <paramref name="destinationType"/> parameter using formatProvider and ignoreCase
/// </summary>
/// <param name="sourceValue">The value to convert from</param>
/// <param name="destinationType">The type to convert to</param>
/// <param name="formatProvider">The format provider to use like in IFormattable's ToString</param>
/// <param name="ignoreCase">true if case should be ignored</param>
/// <returns>the <paramref name="sourceValue"/> parameter converted to the <paramref name="destinationType"/> parameter using formatProvider and ignoreCase</returns>
/// <exception cref="InvalidCastException">if no conversion was possible</exception>
public override object ConvertFrom(PSObject sourceValue, Type destinationType, IFormatProvider formatProvider, bool ignoreCase)
{
if (destinationType == null)
{
throw PSTraceSource.NewArgumentNullException("destinationType");
}
if (sourceValue == null)
{
throw new PSInvalidCastException(
"InvalidCastWhenRehydratingFromNull",
PSTraceSource.NewArgumentNullException("sourceValue"),
ExtendedTypeSystem.InvalidCastFromNull,
destinationType.ToString());
}
foreach (KeyValuePair<Type, Func<PSObject, object>> item in s_converter)
{
Type type = item.Key;
Func<PSObject, object> typeConverter = item.Value;
if (Deserializer.IsDeserializedInstanceOfType(sourceValue, type))
{
return ConvertFrom(sourceValue, typeConverter);
}
}
throw new PSInvalidCastException(
"InvalidCastEnumFromTypeNotAString",
null,
ExtendedTypeSystem.InvalidCastException,
sourceValue,
destinationType);
}
private static object ConvertFrom(PSObject o, Func<PSObject, object> converter)
{
// rehydrate
PSObject dso = o;
object rehydratedObject = converter(dso);
// re-add instance properties
// (dso.InstanceMembers includes both instance and *type table* properties;
// therefore this will also re-add type table properties if they are not present when the deserializer runs;
// this is ok)
bool returnPSObject = false;
PSObject rehydratedPSObject = PSObject.AsPSObject(rehydratedObject);
foreach (PSMemberInfo member in dso.InstanceMembers)
{
if (member.MemberType == (member.MemberType & (PSMemberTypes.Properties | PSMemberTypes.MemberSet | PSMemberTypes.PropertySet)))
{
if (rehydratedPSObject.Members[member.Name] == null)
{
rehydratedPSObject.InstanceMembers.Add(member);
returnPSObject = true;
}
}
}
if (returnPSObject)
{
return rehydratedPSObject;
}
else
{
return rehydratedObject;
}
}
/// <summary>
/// Returns true if the converter can convert the <paramref name="sourceValue"/> parameter to the <paramref name="destinationType"/> parameter
/// </summary>
/// <param name="sourceValue">The value to convert from</param>
/// <param name="destinationType">The type to convert to</param>
/// <returns>True if the converter can convert the <paramref name="sourceValue"/> parameter to the <paramref name="destinationType"/> parameter, otherwise false.</returns>
public override bool CanConvertTo(object sourceValue, Type destinationType)
{
return false;
}
/// <summary>
/// Converts the <paramref name="sourceValue"/> parameter to the <paramref name="destinationType"/> parameter using formatProvider and ignoreCase
/// </summary>
/// <param name="sourceValue">The value to convert from</param>
/// <param name="destinationType">The type to convert to</param>
/// <param name="formatProvider">The format provider to use like in IFormattable's ToString</param>
/// <param name="ignoreCase">true if case should be ignored</param>
/// <returns>sourceValue converted to the <paramref name="destinationType"/> parameter using formatProvider and ignoreCase</returns>
/// <exception cref="InvalidCastException">if no conversion was possible</exception>
public override object ConvertTo(object sourceValue, Type destinationType, IFormatProvider formatProvider, bool ignoreCase)
{
throw PSTraceSource.NewNotSupportedException();
}
/// <summary>
/// This method is not implemented - an overload taking a PSObject is implemented instead
/// </summary>
public override bool CanConvertFrom(object sourceValue, Type destinationType)
{
throw new NotImplementedException();
}
/// <summary>
/// This method is not implemented - an overload taking a PSObject is implemented instead
/// </summary>
public override bool CanConvertTo(PSObject sourceValue, Type destinationType)
{
throw new NotImplementedException();
}
/// <summary>
/// This method is not implemented - an overload taking a PSObject is implemented instead
/// </summary>
public override object ConvertFrom(object sourceValue, Type destinationType, IFormatProvider formatProvider, bool ignoreCase)
{
throw new NotImplementedException();
}
/// <summary>
/// This method is not implemented - an overload taking a PSObject is implemented instead
/// </summary>
public override object ConvertTo(PSObject sourceValue, Type destinationType, IFormatProvider formatProvider, bool ignoreCase)
{
throw new NotImplementedException();
}
#endregion
#region Rehydration helpers
[Flags]
internal enum RehydrationFlags
{
NullValueBad = 0,
NullValueOk = 0x1,
NullValueMeansEmptyList = 0x3,
MissingPropertyBad = 0,
MissingPropertyOk = 0x4,
}
/// <summary>
/// Gets value of a property (has to be present, value has to be non-null).
/// Can throw any exception (which is ok - LanguagePrimitives.ConvertTo will catch that).
/// </summary>
/// <typeparam name="T">Expected type of the property</typeparam>
/// <param name="pso">Deserialized object</param>
/// <param name="propertyName">Property name</param>
/// <returns></returns>
private static T GetPropertyValue<T>(PSObject pso, string propertyName)
{
return GetPropertyValue<T>(pso, propertyName, RehydrationFlags.NullValueBad | RehydrationFlags.MissingPropertyBad);
}
/// <summary>
/// Gets value of a property. Can throw any exception (which is ok - LanguagePrimitives.ConvertTo will catch that).
/// </summary>
/// <typeparam name="T">Expected type of the property</typeparam>
/// <param name="pso">Deserialized object</param>
/// <param name="propertyName">Property name</param>
/// <param name="flags"></param>
/// <returns></returns>
internal static T GetPropertyValue<T>(PSObject pso, string propertyName, RehydrationFlags flags)
{
Dbg.Assert(pso != null, "Caller should verify pso != null");
Dbg.Assert(!string.IsNullOrEmpty(propertyName), "Caller should verify propertyName != null");
PSPropertyInfo property = pso.Properties[propertyName];
if ((property == null) && (RehydrationFlags.MissingPropertyOk == (flags & RehydrationFlags.MissingPropertyOk)))
{
return default(T);
}
else
{
object propertyValue = property.Value;
if ((propertyValue == null) && (RehydrationFlags.NullValueOk == (flags & RehydrationFlags.NullValueOk)))
{
return default(T);
}
else
{
T t = (T)LanguagePrimitives.ConvertTo(propertyValue, typeof(T), CultureInfo.InvariantCulture);
return t;
}
}
}
private static ListType RehydrateList<ListType, ItemType>(PSObject pso, string propertyName, RehydrationFlags flags)
where ListType : IList, new()
{
ArrayList deserializedList = GetPropertyValue<ArrayList>(pso, propertyName, flags);
if (deserializedList == null)
{
if (RehydrationFlags.NullValueMeansEmptyList == (flags & RehydrationFlags.NullValueMeansEmptyList))
{
return new ListType();
}
else
{
return default(ListType);
}
}
else
{
ListType newList = new ListType();
foreach (object deserializedItem in deserializedList)
{
ItemType item = (ItemType)LanguagePrimitives.ConvertTo(deserializedItem, typeof(ItemType), CultureInfo.InvariantCulture);
newList.Add(item);
}
return newList;
}
}
#endregion
#region Rehydration of miscellaneous types
private static object RehydratePrimitiveHashtable(PSObject pso)
{
Hashtable hashtable = (Hashtable)LanguagePrimitives.ConvertTo(pso, typeof(Hashtable), CultureInfo.InvariantCulture);
return new PSPrimitiveDictionary(hashtable);
}
private static object RehydrateSwitchParameter(PSObject pso)
{
return GetPropertyValue<SwitchParameter>(pso, "IsPresent");
}
private static CultureInfo RehydrateCultureInfo(PSObject pso)
{
string s = pso.ToString();
return new CultureInfo(s);
}
private static PSListModifier RehydratePSListModifier(PSObject pso)
{
Hashtable h = new Hashtable();
PSPropertyInfo addProperty = pso.Properties[PSListModifier.AddKey];
if ((addProperty != null) && (addProperty.Value != null))
{
h.Add(PSListModifier.AddKey, addProperty.Value);
}
PSPropertyInfo removeProperty = pso.Properties[PSListModifier.RemoveKey];
if ((removeProperty != null) && (removeProperty.Value != null))
{
h.Add(PSListModifier.RemoveKey, removeProperty.Value);
}
PSPropertyInfo replaceProperty = pso.Properties[PSListModifier.ReplaceKey];
if ((replaceProperty != null) && (replaceProperty.Value != null))
{
h.Add(PSListModifier.ReplaceKey, replaceProperty.Value);
}
return new PSListModifier(h);
}
private static CompletionResult RehydrateCompletionResult(PSObject pso)
{
string completionText = GetPropertyValue<string>(pso, "CompletionText");
string listItemText = GetPropertyValue<string>(pso, "ListItemText");
string toolTip = GetPropertyValue<string>(pso, "ToolTip");
CompletionResultType resultType = GetPropertyValue<CompletionResultType>(pso, "ResultType");
return new CompletionResult(completionText, listItemText, resultType, toolTip);
}
private static ModuleSpecification RehydrateModuleSpecification(PSObject pso)
{
return new ModuleSpecification
{
Name = GetPropertyValue<string>(pso, "Name"),
Guid = GetPropertyValue<Guid?>(pso, "Guid"),
Version = GetPropertyValue<Version>(pso, "Version"),
MaximumVersion = GetPropertyValue<string>(pso, "MaximumVersion"),
RequiredVersion =
GetPropertyValue<Version>(pso, "RequiredVersion")
};
}
private static CommandCompletion RehydrateCommandCompletion(PSObject pso)
{
var completions = new Collection<CompletionResult>();
foreach (var match in GetPropertyValue<ArrayList>(pso, "CompletionMatches"))
{
completions.Add((CompletionResult)match);
}
var currentMatchIndex = GetPropertyValue<int>(pso, "CurrentMatchIndex");
var replacementIndex = GetPropertyValue<int>(pso, "ReplacementIndex");
var replacementLength = GetPropertyValue<int>(pso, "ReplacementLength");
return new CommandCompletion(completions, currentMatchIndex, replacementIndex, replacementLength);
}
private static JobStateInfo RehydrateJobStateInfo(PSObject pso)
{
var jobState = GetPropertyValue<JobState>(pso, "State");
Exception reason = null;
object propertyValue = null;
PSPropertyInfo property = pso.Properties["Reason"];
string message = string.Empty;
if (property != null)
{
propertyValue = property.Value;
}
if (propertyValue != null)
{
if (Deserializer.IsDeserializedInstanceOfType(propertyValue, typeof(Exception)))
{
// if we have a deserialized remote or any other exception, use its message to construct
// an exception
message = PSObject.AsPSObject(propertyValue).Properties["Message"].Value as string;
}
else if (propertyValue is Exception)
{
reason = (Exception)propertyValue;
}
else
{
message = propertyValue.ToString();
}
if (!string.IsNullOrEmpty(message))
{
try
{
reason = (Exception)LanguagePrimitives.ConvertTo(message, typeof(Exception), CultureInfo.InvariantCulture);
}
catch (Exception)
{
// it is ok to eat this exception since we do not want
// rehydration to fail
reason = null;
}
}
}
return new JobStateInfo(jobState, reason);
}
internal static JobStateEventArgs RehydrateJobStateEventArgs(PSObject pso)
{
var jobStateInfo = RehydrateJobStateInfo(PSObject.AsPSObject(pso.Properties["JobStateInfo"].Value));
JobStateInfo previousJobStateInfo = null;
var previousJobStateInfoProperty = pso.Properties["PreviousJobStateInfo"];
if (previousJobStateInfoProperty != null && previousJobStateInfoProperty.Value != null)
{
previousJobStateInfo = RehydrateJobStateInfo(PSObject.AsPSObject(previousJobStateInfoProperty.Value));
}
return new JobStateEventArgs(jobStateInfo, previousJobStateInfo);
}
internal static PSSessionOption RehydratePSSessionOption(PSObject pso)
{
PSSessionOption option = new PSSessionOption();
option.ApplicationArguments = GetPropertyValue<PSPrimitiveDictionary>(pso, "ApplicationArguments");
option.CancelTimeout = GetPropertyValue<TimeSpan>(pso, "CancelTimeout");
option.Culture = GetPropertyValue<CultureInfo>(pso, "Culture");
option.IdleTimeout = GetPropertyValue<TimeSpan>(pso, "IdleTimeout");
option.MaximumConnectionRedirectionCount = GetPropertyValue<int>(pso, "MaximumConnectionRedirectionCount");
option.MaximumReceivedDataSizePerCommand = GetPropertyValue<Nullable<int>>(pso, "MaximumReceivedDataSizePerCommand");
option.MaximumReceivedObjectSize = GetPropertyValue<Nullable<int>>(pso, "MaximumReceivedObjectSize");
option.NoCompression = GetPropertyValue<bool>(pso, "NoCompression");
option.NoEncryption = GetPropertyValue<bool>(pso, "NoEncryption");
option.NoMachineProfile = GetPropertyValue<bool>(pso, "NoMachineProfile");
option.OpenTimeout = GetPropertyValue<TimeSpan>(pso, "OpenTimeout");
option.OperationTimeout = GetPropertyValue<TimeSpan>(pso, "OperationTimeout");
option.OutputBufferingMode = GetPropertyValue<OutputBufferingMode>(pso, "OutputBufferingMode");
option.MaxConnectionRetryCount = GetPropertyValue<int>(pso, "MaxConnectionRetryCount");
option.ProxyAccessType = GetPropertyValue<ProxyAccessType>(pso, "ProxyAccessType");
option.ProxyAuthentication = GetPropertyValue<AuthenticationMechanism>(pso, "ProxyAuthentication");
option.ProxyCredential = GetPropertyValue<PSCredential>(pso, "ProxyCredential");
option.SkipCACheck = GetPropertyValue<bool>(pso, "SkipCACheck");
option.SkipCNCheck = GetPropertyValue<bool>(pso, "SkipCNCheck");
option.SkipRevocationCheck = GetPropertyValue<bool>(pso, "SkipRevocationCheck");
option.UICulture = GetPropertyValue<CultureInfo>(pso, "UICulture");
option.UseUTF16 = GetPropertyValue<bool>(pso, "UseUTF16");
option.IncludePortInSPN = GetPropertyValue<bool>(pso, "IncludePortInSPN");
return option;
}
internal static LineBreakpoint RehydrateLineBreakpoint(PSObject pso)
{
string script = GetPropertyValue<string>(pso, "Script");
int line = GetPropertyValue<int>(pso, "Line");
int column = GetPropertyValue<int>(pso, "Column");
int id = GetPropertyValue<int>(pso, "Id");
bool enabled = GetPropertyValue<bool>(pso, "Enabled");
ScriptBlock action = RehydrateScriptBlock(
GetPropertyValue<string>(pso, "Action", RehydrationFlags.MissingPropertyOk));
var bp = new LineBreakpoint(script, line, column, action, id);
bp.SetEnabled(enabled);
return bp;
}
internal static CommandBreakpoint RehydrateCommandBreakpoint(PSObject pso)
{
string script = GetPropertyValue<string>(pso, "Script", RehydrationFlags.MissingPropertyOk);
string command = GetPropertyValue<string>(pso, "Command");
int id = GetPropertyValue<int>(pso, "Id");
bool enabled = GetPropertyValue<bool>(pso, "Enabled");
WildcardPattern pattern = WildcardPattern.Get(command, WildcardOptions.Compiled | WildcardOptions.IgnoreCase);
ScriptBlock action = RehydrateScriptBlock(
GetPropertyValue<string>(pso, "Action", RehydrationFlags.MissingPropertyOk));
var bp = new CommandBreakpoint(script, pattern, command, action, id);
bp.SetEnabled(enabled);
return bp;
}
internal static VariableBreakpoint RehydrateVariableBreakpoint(PSObject pso)
{
string script = GetPropertyValue<string>(pso, "Script", RehydrationFlags.MissingPropertyOk);
string variableName = GetPropertyValue<string>(pso, "Variable");
int id = GetPropertyValue<int>(pso, "Id");
bool enabled = GetPropertyValue<bool>(pso, "Enabled");
VariableAccessMode access = GetPropertyValue<VariableAccessMode>(pso, "AccessMode");
ScriptBlock action = RehydrateScriptBlock(
GetPropertyValue<string>(pso, "Action", RehydrationFlags.MissingPropertyOk));
var bp = new VariableBreakpoint(script, variableName, access, action, id);
bp.SetEnabled(enabled);
return bp;
}
internal static BreakpointUpdatedEventArgs RehydrateBreakpointUpdatedEventArgs(PSObject pso)
{
Breakpoint bp = GetPropertyValue<Breakpoint>(pso, "Breakpoint");
BreakpointUpdateType bpUpdateType = GetPropertyValue<BreakpointUpdateType>(pso, "UpdateType");
int bpCount = GetPropertyValue<int>(pso, "BreakpointCount");
return new BreakpointUpdatedEventArgs(bp, bpUpdateType, bpCount);
}
internal static DebuggerCommand RehydrateDebuggerCommand(PSObject pso)
{
string command = GetPropertyValue<string>(pso, "Command");
bool repeatOnEnter = GetPropertyValue<bool>(pso, "RepeatOnEnter");
bool executedByDebugger = GetPropertyValue<bool>(pso, "ExecutedByDebugger");
DebuggerResumeAction? resumeAction = GetPropertyValue<DebuggerResumeAction?>(pso, "ResumeAction", RehydrationFlags.NullValueOk);
return new DebuggerCommand(command, resumeAction, repeatOnEnter, executedByDebugger);
}
internal static DebuggerCommandResults RehydrateDebuggerCommandResults(PSObject pso)
{
DebuggerResumeAction? resumeAction = GetPropertyValue<DebuggerResumeAction?>(pso, "ResumeAction", RehydrationFlags.NullValueOk);
bool evaluatedByDebugger = GetPropertyValue<bool>(pso, "EvaluatedByDebugger");
return new DebuggerCommandResults(resumeAction, evaluatedByDebugger);
}
internal static DebuggerStopEventArgs RehydrateDebuggerStopEventArgs(PSObject pso)
{
PSObject psoInvocationInfo = GetPropertyValue<PSObject>(pso, "SerializedInvocationInfo", RehydrationFlags.NullValueOk | RehydrationFlags.MissingPropertyOk);
InvocationInfo invocationInfo = (psoInvocationInfo != null) ? new InvocationInfo(psoInvocationInfo) : null;
DebuggerResumeAction resumeAction = GetPropertyValue<DebuggerResumeAction>(pso, "ResumeAction");
Collection<Breakpoint> breakpoints = new Collection<Breakpoint>();
foreach (var item in GetPropertyValue<ArrayList>(pso, "Breakpoints"))
{
Breakpoint bp = item as Breakpoint;
if (bp != null)
{
breakpoints.Add(bp);
}
}
return new DebuggerStopEventArgs(invocationInfo, breakpoints, resumeAction);
}
private static ScriptBlock RehydrateScriptBlock(string script)
{
if (!string.IsNullOrEmpty(script))
{
return ScriptBlock.Create(script);
}
return null;
}
#endregion
#region Rehydration of security-related types
private static PSCredential RehydratePSCredential(PSObject pso)
{
string userName = GetPropertyValue<string>(pso, "UserName");
System.Security.SecureString password = GetPropertyValue<System.Security.SecureString>(pso, "Password");
if (String.IsNullOrEmpty(userName))
{
return PSCredential.Empty;
}
else
{
return new PSCredential(userName, password);
}
}
internal static PSSenderInfo RehydratePSSenderInfo(PSObject pso)
{
PSObject userInfo = GetPropertyValue<PSObject>(pso, "UserInfo");
PSObject userIdentity = GetPropertyValue<PSObject>(userInfo, "Identity");
PSObject certDetails = GetPropertyValue<PSObject>(userIdentity, "CertificateDetails");
PSCertificateDetails psCertDetails = certDetails == null ? null : new PSCertificateDetails(
GetPropertyValue<string>(certDetails, "Subject"),
GetPropertyValue<string>(certDetails, "IssuerName"),
GetPropertyValue<string>(certDetails, "IssuerThumbprint"));
PSIdentity psIdentity = new PSIdentity(
GetPropertyValue<string>(userIdentity, "AuthenticationType"),
GetPropertyValue<bool>(userIdentity, "IsAuthenticated"),
GetPropertyValue<string>(userIdentity, "Name"),
psCertDetails);
PSPrincipal psPrincipal = new PSPrincipal(psIdentity, WindowsIdentity.GetCurrent());
PSSenderInfo senderInfo = new PSSenderInfo(psPrincipal, GetPropertyValue<string>(pso, "ConnectionString"));
#if !CORECLR // TimeZone Not In CoreCLR
senderInfo.ClientTimeZone = TimeZone.CurrentTimeZone;
#endif
senderInfo.ApplicationArguments = GetPropertyValue<PSPrimitiveDictionary>(pso, "ApplicationArguments");
return senderInfo;
}
private static System.Security.Cryptography.X509Certificates.X509Certificate2 RehydrateX509Certificate2(PSObject pso)
{
byte[] rawData = GetPropertyValue<byte[]>(pso, "RawData");
return new System.Security.Cryptography.X509Certificates.X509Certificate2(rawData);
}
private static System.Security.Cryptography.X509Certificates.X500DistinguishedName RehydrateX500DistinguishedName(PSObject pso)
{
byte[] rawData = GetPropertyValue<byte[]>(pso, "RawData");
return new System.Security.Cryptography.X509Certificates.X500DistinguishedName(rawData);
}
private static System.Net.IPAddress RehydrateIPAddress(PSObject pso)
{
string s = pso.ToString();
return System.Net.IPAddress.Parse(s);
}
private static MailAddress RehydrateMailAddress(PSObject pso)
{
string s = pso.ToString();
return new MailAddress(s);
}
private static T RehydrateObjectSecurity<T>(PSObject pso)
where T : System.Security.AccessControl.ObjectSecurity, new()
{
string sddl = GetPropertyValue<string>(pso, "SDDL");
T t = new T();
t.SetSecurityDescriptorSddlForm(sddl);
return t;
}
#endregion
#region Rehydration of types needed by implicit remoting
/// <summary>
/// Gets the boolean properties of ParameterSetMetadata object encoded as an integer
/// </summary>
/// <param name="instance">
/// The PSObject for which to obtain the flags
/// </param>
/// <returns>
/// Boolean properties of ParameterSetMetadata object encoded as an integer
/// </returns>
public static UInt32 GetParameterSetMetadataFlags(PSObject instance)
{
if (instance == null)
{
throw PSTraceSource.NewArgumentNullException("instance");
}
ParameterSetMetadata parameterSetMetadata = instance.BaseObject as ParameterSetMetadata;
if (parameterSetMetadata == null)
{
throw PSTraceSource.NewArgumentNullException("instance");
}
return (UInt32)(parameterSetMetadata.Flags);
}
/// <summary>
/// Gets the full remoting serialized PSObject for the InvocationInfo property
/// of the DebuggerStopEventArgs type.
/// </summary>
/// <param name="instance">InvocationInfo instance.</param>
/// <returns>PSObject containing serialized InvocationInfo.</returns>
public static PSObject GetInvocationInfo(PSObject instance)
{
if (instance == null)
{
throw PSTraceSource.NewArgumentNullException("instance");
}
DebuggerStopEventArgs dbgStopEventArgs = instance.BaseObject as DebuggerStopEventArgs;
if (dbgStopEventArgs == null)
{
throw PSTraceSource.NewArgumentNullException("instance");
}
if (dbgStopEventArgs.InvocationInfo == null)
{
return null;
}
PSObject psoInvocationInfo = new PSObject();
dbgStopEventArgs.InvocationInfo.ToPSObjectForRemoting(psoInvocationInfo);
return psoInvocationInfo;
}
private static ParameterSetMetadata RehydrateParameterSetMetadata(PSObject pso)
{
int position = GetPropertyValue<int>(pso, "Position");
UInt32 flags = GetPropertyValue<UInt32>(pso, "Flags");
string helpMessage = GetPropertyValue<string>(pso, "HelpMessage");
return new ParameterSetMetadata(position, (ParameterSetMetadata.ParameterFlags)flags, helpMessage);
}
private static DisplayEntry RehydrateDisplayEntry(PSObject deserializedDisplayEntry)
{
var result = new DisplayEntry
{
Value = GetPropertyValue<string>(deserializedDisplayEntry, "Value"),
ValueType = GetPropertyValue<DisplayEntryValueType>(deserializedDisplayEntry, "ValueType")
};
return result;
}
private static EntrySelectedBy RehydrateEntrySelectedBy(PSObject deserializedEsb)
{
var result = new EntrySelectedBy
{
TypeNames = RehydrateList<List<string>, string>(deserializedEsb, "TypeNames", RehydrationFlags.MissingPropertyOk),
SelectionCondition = RehydrateList<List<DisplayEntry>, DisplayEntry>(deserializedEsb, "SelectionCondition", RehydrationFlags.MissingPropertyOk)
};
return result;
}
private static WideControlEntryItem RehydrateWideControlEntryItem(PSObject deserializedEntryItem)
{
var entrySelectedBy = GetPropertyValue<EntrySelectedBy>(deserializedEntryItem, "EntrySelectedBy", RehydrationFlags.MissingPropertyOk);
if (entrySelectedBy == null)
{
var selectedBy = RehydrateList<List<string>, string>(deserializedEntryItem, "SelectedBy", RehydrationFlags.MissingPropertyOk);
entrySelectedBy = EntrySelectedBy.Get(selectedBy, null);
}
var result = new WideControlEntryItem
{
DisplayEntry = GetPropertyValue<DisplayEntry>(deserializedEntryItem, "DisplayEntry"),
EntrySelectedBy = entrySelectedBy,
FormatString = GetPropertyValue<string>(deserializedEntryItem, "FormatString", RehydrationFlags.MissingPropertyOk),
};
return result;
}
private static ListControlEntryItem RehydrateListControlEntryItem(PSObject deserializedEntryItem)
{
var result = new ListControlEntryItem
{
DisplayEntry = GetPropertyValue<DisplayEntry>(deserializedEntryItem, "DisplayEntry"),
ItemSelectionCondition = GetPropertyValue<DisplayEntry>(deserializedEntryItem, "ItemSelectionCondition", RehydrationFlags.MissingPropertyOk),
FormatString = GetPropertyValue<string>(deserializedEntryItem, "FormatString", RehydrationFlags.MissingPropertyOk),
Label = GetPropertyValue<string>(deserializedEntryItem, "Label", RehydrationFlags.NullValueOk)
};
return result;
}
private static ListControlEntry RehydrateListControlEntry(PSObject deserializedEntry)
{
var entrySelectedBy = GetPropertyValue<EntrySelectedBy>(deserializedEntry, "EntrySelectedBy", RehydrationFlags.MissingPropertyOk);
if (entrySelectedBy == null)
{
var selectedBy = RehydrateList<List<string>, string>(deserializedEntry, "SelectedBy", RehydrationFlags.MissingPropertyOk);
entrySelectedBy = EntrySelectedBy.Get(selectedBy, null);
}
var result = new ListControlEntry
{
Items = RehydrateList<List<ListControlEntryItem>, ListControlEntryItem>(deserializedEntry, "Items", RehydrationFlags.NullValueBad),
EntrySelectedBy = entrySelectedBy
};
return result;
}
private static TableControlColumnHeader RehydrateTableControlColumnHeader(PSObject deserializedHeader)
{
var result = new TableControlColumnHeader
{
Alignment = GetPropertyValue<Alignment>(deserializedHeader, "Alignment"),
Label = GetPropertyValue<string>(deserializedHeader, "Label", RehydrationFlags.NullValueOk),
Width = GetPropertyValue<int>(deserializedHeader, "Width")
};
return result;
}
private static TableControlColumn RehydrateTableControlColumn(PSObject deserializedColumn)
{
var result = new TableControlColumn
{
Alignment = GetPropertyValue<Alignment>(deserializedColumn, "Alignment"),
DisplayEntry = GetPropertyValue<DisplayEntry>(deserializedColumn, "DisplayEntry"),
FormatString = GetPropertyValue<string>(deserializedColumn, "FormatString", RehydrationFlags.MissingPropertyOk)
};
return result;
}
private static TableControlRow RehydrateTableControlRow(PSObject deserializedRow)
{
var result = new TableControlRow
{
Wrap = GetPropertyValue<bool>(deserializedRow, "Wrap", RehydrationFlags.MissingPropertyOk),
SelectedBy = GetPropertyValue<EntrySelectedBy>(deserializedRow, "EntrySelectedBy", RehydrationFlags.MissingPropertyOk),
Columns = RehydrateList<List<TableControlColumn>, TableControlColumn>(deserializedRow, "Columns", RehydrationFlags.NullValueBad)
};
return result;
}
private static CustomControlEntry RehydrateCustomControlEntry(PSObject deserializedEntry)
{
var result = new CustomControlEntry
{
CustomItems = RehydrateList<List<CustomItemBase>, CustomItemBase>(deserializedEntry, "CustomItems", RehydrationFlags.MissingPropertyBad),
SelectedBy = GetPropertyValue<EntrySelectedBy>(deserializedEntry, "SelectedBy", RehydrationFlags.MissingPropertyOk)
};
return result;
}
private static CustomItemBase RehydrateCustomItemBase(PSObject deserializedItem)
{
CustomItemBase result;
if (Deserializer.IsDeserializedInstanceOfType(deserializedItem, typeof(CustomItemNewline)))
{
result = new CustomItemNewline
{
Count = GetPropertyValue<int>(deserializedItem, "Count", RehydrationFlags.MissingPropertyBad)
};
}
else if (Deserializer.IsDeserializedInstanceOfType(deserializedItem, typeof(CustomItemText)))
{
result = new CustomItemText
{
Text = GetPropertyValue<string>(deserializedItem, "Text", RehydrationFlags.MissingPropertyBad)
};
}
else if (Deserializer.IsDeserializedInstanceOfType(deserializedItem, typeof(CustomItemFrame)))
{
result = new CustomItemFrame
{
FirstLineHanging = GetPropertyValue<uint>(deserializedItem, "FirstLineHanging"),
FirstLineIndent = GetPropertyValue<uint>(deserializedItem, "FirstLineIndent"),
RightIndent = GetPropertyValue<uint>(deserializedItem, "RightIndent"),
LeftIndent = GetPropertyValue<uint>(deserializedItem, "LeftIndent"),
CustomItems = RehydrateList<List<CustomItemBase>, CustomItemBase>(deserializedItem, "CustomItems", RehydrationFlags.MissingPropertyBad)
};
}
else if (Deserializer.IsDeserializedInstanceOfType(deserializedItem, typeof(CustomItemExpression)))
{
result = new CustomItemExpression
{
EnumerateCollection = GetPropertyValue<bool>(deserializedItem, "EnumerateCollection"),
CustomControl = GetPropertyValue<CustomControl>(deserializedItem, "CustomControl", RehydrationFlags.MissingPropertyOk),
Expression = GetPropertyValue<DisplayEntry>(deserializedItem, "Expression", RehydrationFlags.MissingPropertyOk),
ItemSelectionCondition = GetPropertyValue<DisplayEntry>(deserializedItem, "ItemSelectionCondition", RehydrationFlags.MissingPropertyOk)
};
}
else
{
throw PSTraceSource.NewArgumentException("deserializedItem");
}
return result;
}
private static PSControl RehydratePSControl(PSObject deserializedControl)
{
// Earlier versions of PowerShell did not have all of the possible properties in a control, so we must
// use MissingPropertyOk to allow for connections to those older endpoints.
PSControl result;
if (Deserializer.IsDeserializedInstanceOfType(deserializedControl, typeof(TableControl)))
{
var tableControl = new TableControl
{
AutoSize = GetPropertyValue<bool>(deserializedControl, "AutoSize", RehydrationFlags.MissingPropertyOk),
HideTableHeaders = GetPropertyValue<bool>(deserializedControl, "HideTableHeaders", RehydrationFlags.MissingPropertyOk),
Headers = RehydrateList<List<TableControlColumnHeader>, TableControlColumnHeader>(deserializedControl, "Headers", RehydrationFlags.NullValueBad),
Rows = RehydrateList<List<TableControlRow>, TableControlRow>(deserializedControl, "Rows", RehydrationFlags.NullValueBad)
};
result = tableControl;
}
else if (Deserializer.IsDeserializedInstanceOfType(deserializedControl, typeof(ListControl)))
{
var listControl = new ListControl
{
Entries = RehydrateList<List<ListControlEntry>, ListControlEntry>(deserializedControl, "Entries", RehydrationFlags.NullValueBad)
};
result = listControl;
}
else if (Deserializer.IsDeserializedInstanceOfType(deserializedControl, typeof(WideControl)))
{
var wideControl = new WideControl
{
AutoSize = GetPropertyValue<bool>(deserializedControl, "Alignment", RehydrationFlags.MissingPropertyOk),
Columns = GetPropertyValue<uint>(deserializedControl, "Columns"),
Entries = RehydrateList<List<WideControlEntryItem>, WideControlEntryItem>(deserializedControl, "Entries", RehydrationFlags.NullValueBad)
};
result = wideControl;
}
else if (Deserializer.IsDeserializedInstanceOfType(deserializedControl, typeof(CustomControl)))
{
var customControl = new CustomControl
{
Entries = RehydrateList<List<CustomControlEntry>, CustomControlEntry>(deserializedControl, "Entries", RehydrationFlags.NullValueBad)
};
result = customControl;
}
else
{
throw PSTraceSource.NewArgumentException("pso");
}
result.GroupBy = GetPropertyValue<PSControlGroupBy>(deserializedControl, "GroupBy", RehydrationFlags.MissingPropertyOk);
result.OutOfBand = GetPropertyValue<bool>(deserializedControl, "OutOfBand", RehydrationFlags.MissingPropertyOk);
return result;
}
private static PSControlGroupBy RehydrateGroupBy(PSObject deserializedGroupBy)
{
var result = new PSControlGroupBy
{
CustomControl = GetPropertyValue<CustomControl>(deserializedGroupBy, "CustomControl", RehydrationFlags.MissingPropertyOk),
Expression = GetPropertyValue<DisplayEntry>(deserializedGroupBy, "Expression", RehydrationFlags.MissingPropertyOk),
Label = GetPropertyValue<string>(deserializedGroupBy, "Label", RehydrationFlags.NullValueOk)
};
return result;
}
/// <summary>
/// Gets the boolean properties of ParameterSetMetadata object encoded as an integer
/// </summary>
/// <param name="instance">
/// The PSObject for which to obtain the flags
/// </param>
/// <returns>
/// Boolean properties of ParameterSetMetadata object encoded as an integer
/// </returns>
public static Guid GetFormatViewDefinitionInstanceId(PSObject instance)
{
if (instance == null)
{
throw PSTraceSource.NewArgumentNullException("instance");
}
FormatViewDefinition formatViewDefinition = instance.BaseObject as FormatViewDefinition;
if (formatViewDefinition == null)
{
throw PSTraceSource.NewArgumentNullException("instance");
}
return formatViewDefinition.InstanceId;
}
private static FormatViewDefinition RehydrateFormatViewDefinition(PSObject deserializedViewDefinition)
{
string name = GetPropertyValue<string>(deserializedViewDefinition, "Name");
Guid instanceId = GetPropertyValue<Guid>(deserializedViewDefinition, "InstanceId");
PSControl control = GetPropertyValue<PSControl>(deserializedViewDefinition, "Control");
return new FormatViewDefinition(name, control, instanceId);
}
private static ExtendedTypeDefinition RehydrateExtendedTypeDefinition(PSObject deserializedTypeDefinition)
{
// Prefer TypeNames to TypeName - as it was incorrect to create multiple ExtendedTypeDefinitions for a group of types.
// But if a new PowerShell connects to an old endpoint, TypeNames will be missing, so fall back to TypeName in that case.
string typeName;
var typeNames = RehydrateList<List<string>, string>(deserializedTypeDefinition, "TypeNames", RehydrationFlags.MissingPropertyOk);
if (typeNames == null || typeNames.Count == 0)
{
typeName = GetPropertyValue<string>(deserializedTypeDefinition, "TypeName");
}
else
{
typeName = typeNames[0];
}
List<FormatViewDefinition> viewDefinitions = RehydrateList<List<FormatViewDefinition>, FormatViewDefinition>(
deserializedTypeDefinition,
"FormatViewDefinition",
RehydrationFlags.NullValueBad);
var result = new ExtendedTypeDefinition(typeName, viewDefinitions);
if (typeNames != null && typeNames.Count > 1)
{
for (var i = 1; i < typeNames.Count; i++)
{
result.TypeNames.Add(typeNames[i]);
}
}
return result;
}
#endregion
}
}
| {
"content_hash": "44153828b4fb554450917893bcad0685",
"timestamp": "",
"source": "github",
"line_count": 7529,
"max_line_length": 210,
"avg_line_length": 40.6350112896799,
"alnum_prop": 0.566658277249535,
"repo_name": "jsoref/PowerShell",
"id": "8bc6fafe631b051842269a5691a51115654f3d7f",
"size": "306143",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/System.Management.Automation/engine/serialization.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5425"
},
{
"name": "C#",
"bytes": "37154150"
},
{
"name": "C++",
"bytes": "304638"
},
{
"name": "CMake",
"bytes": "23659"
},
{
"name": "PowerShell",
"bytes": "2535909"
},
{
"name": "Python",
"bytes": "492"
},
{
"name": "Shell",
"bytes": "3521"
},
{
"name": "XSLT",
"bytes": "14407"
}
],
"symlink_target": ""
} |
require('./styles/');
require('./js/');
| {
"content_hash": "80d874f8b6de3ebbb7f7b9278d1c209a",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 21,
"avg_line_length": 20,
"alnum_prop": 0.55,
"repo_name": "thegsi/map-react-task",
"id": "87a39a9118020d56f598846f00b42a4a447c3156",
"size": "40",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/index.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "11477"
},
{
"name": "HTML",
"bytes": "1289"
},
{
"name": "JavaScript",
"bytes": "14000"
}
],
"symlink_target": ""
} |
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "06567a5b3bafb7ef6c5ba27065141eb9",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.23076923076923,
"alnum_prop": 0.6917293233082706,
"repo_name": "mdoering/backbone",
"id": "47f80bf56e2b0c4411747934fcaa40b01c336f21",
"size": "184",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Bacteria/Cyanobacteria/Chroococcales/Synechococcaceae/Aphanothece/Aphanothece elabens/ Syn. Micraloa elabens/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
namespace arangodb {
namespace application_features {
class ApplicationServer;
}
namespace aql {
class QueryList;
}
namespace velocypack {
class Builder;
class Slice;
class StringRef;
} // namespace velocypack
class CursorRepository;
struct DatabaseJavaScriptCache;
class DatabaseReplicationApplier;
class LogicalCollection;
class LogicalDataSource;
class LogicalView;
class ReplicationClientsProgressTracker;
class StorageEngine;
} // namespace arangodb
/// @brief document handle separator as character
constexpr char TRI_DOCUMENT_HANDLE_SEPARATOR_CHR = '/';
/// @brief document handle separator as string
constexpr auto TRI_DOCUMENT_HANDLE_SEPARATOR_STR = "/";
/// @brief index handle separator as character
constexpr char TRI_INDEX_HANDLE_SEPARATOR_CHR = '/';
/// @brief index handle separator as string
constexpr auto TRI_INDEX_HANDLE_SEPARATOR_STR = "/";
/// @brief collection enum
enum TRI_col_type_e : uint32_t {
TRI_COL_TYPE_UNKNOWN = 0, // only used to signal an invalid collection type
TRI_COL_TYPE_DOCUMENT = 2,
TRI_COL_TYPE_EDGE = 3
};
/// @brief database type
enum TRI_vocbase_type_e {
TRI_VOCBASE_TYPE_NORMAL = 0,
TRI_VOCBASE_TYPE_COORDINATOR = 1
};
/// @brief status of a collection
/// note: the NEW_BORN status is not used in ArangoDB 1.3 anymore, but is left
/// in this enum for compatibility with earlier versions
enum TRI_vocbase_col_status_e : int {
TRI_VOC_COL_STATUS_CORRUPTED = 0,
TRI_VOC_COL_STATUS_NEW_BORN = 1, // DEPRECATED, and shouldn't be used anymore
TRI_VOC_COL_STATUS_UNLOADED = 2,
TRI_VOC_COL_STATUS_LOADED = 3,
TRI_VOC_COL_STATUS_UNLOADING = 4,
TRI_VOC_COL_STATUS_DELETED = 5,
TRI_VOC_COL_STATUS_LOADING = 6
};
/// @brief database
struct TRI_vocbase_t {
friend class arangodb::StorageEngine;
TRI_vocbase_t(TRI_vocbase_type_e type, arangodb::CreateDatabaseInfo&&);
TEST_VIRTUAL ~TRI_vocbase_t();
private:
// explicitly document implicit behavior (due to presence of locks)
TRI_vocbase_t(TRI_vocbase_t&&) = delete;
TRI_vocbase_t(TRI_vocbase_t const&) = delete;
TRI_vocbase_t& operator=(TRI_vocbase_t&&) = delete;
TRI_vocbase_t& operator=(TRI_vocbase_t const&) = delete;
/// @brief sleep interval used when polling for a loading collection's status
static constexpr unsigned collectionStatusPollInterval() { return 10 * 1000; }
/// @brief states for dropping
enum DropState {
DROP_EXIT, // drop done, nothing else to do
DROP_AGAIN, // drop not done, must try again
DROP_PERFORM // drop done, must perform actual cleanup routine
};
arangodb::application_features::ApplicationServer& _server;
arangodb::CreateDatabaseInfo _info;
TRI_vocbase_type_e _type; // type (normal or coordinator)
std::atomic<uint64_t> _refCount;
bool _isOwnAppsDirectory;
std::vector<std::shared_ptr<arangodb::LogicalCollection>> _collections; // ALL collections
std::vector<std::shared_ptr<arangodb::LogicalCollection>> _deadCollections; // collections dropped that can be removed later
std::unordered_map<arangodb::DataSourceId,
std::shared_ptr<arangodb::LogicalDataSource>>
_dataSourceById; // data-source by id
std::unordered_map<std::string, std::shared_ptr<arangodb::LogicalDataSource>> _dataSourceByName; // data-source by name
std::unordered_map<std::string, std::shared_ptr<arangodb::LogicalDataSource>> _dataSourceByUuid; // data-source by uuid
mutable arangodb::basics::ReadWriteLock _dataSourceLock; // data-source iterator lock
mutable std::atomic<std::thread::id> _dataSourceLockWriteOwner; // current thread owning '_dataSourceLock'
// write lock (workaround for non-recusrive
// ReadWriteLock)
std::unique_ptr<arangodb::aql::QueryList> _queries;
std::unique_ptr<arangodb::CursorRepository> _cursorRepository;
std::unique_ptr<arangodb::DatabaseReplicationApplier> _replicationApplier;
std::unique_ptr<arangodb::ReplicationClientsProgressTracker> _replicationClients;
public:
arangodb::basics::DeadlockDetector<arangodb::TransactionId, arangodb::LogicalCollection> _deadlockDetector;
arangodb::basics::ReadWriteLock _inventoryLock; // object lock needed when
// replication is assessing
// the state of the vocbase
// structures for volatile cache data (used from JavaScript)
std::unique_ptr<arangodb::DatabaseJavaScriptCache> _cacheData;
public:
/// @brief checks if a database name is allowed
/// returns true if the name is allowed and false otherwise
static bool IsAllowedName(arangodb::velocypack::Slice slice) noexcept;
static bool IsAllowedName(bool allowSystem,
arangodb::velocypack::StringRef const& name) noexcept;
/// @brief determine whether a data-source name is a system data-source name
static bool IsSystemName(std::string const& name) noexcept;
arangodb::application_features::ApplicationServer& server() const {
return _server;
}
TRI_voc_tick_t id() const { return _info.getId(); }
std::string const& name() const { return _info.getName(); }
std::string path() const;
std::uint32_t replicationFactor() const;
std::uint32_t writeConcern() const;
std::string const& sharding() const;
bool isOneShard() const;
TRI_vocbase_type_e type() const { return _type; }
void toVelocyPack(arangodb::velocypack::Builder& result) const;
arangodb::ReplicationClientsProgressTracker& replicationClients() {
return *_replicationClients;
}
arangodb::DatabaseReplicationApplier* replicationApplier() const {
return _replicationApplier.get();
}
void addReplicationApplier();
arangodb::aql::QueryList* queryList() const { return _queries.get(); }
arangodb::CursorRepository* cursorRepository() const {
return _cursorRepository.get();
}
bool isOwnAppsDirectory() const { return _isOwnAppsDirectory; }
void setIsOwnAppsDirectory(bool value) { _isOwnAppsDirectory = value; }
/// @brief increase the reference counter for a database.
/// will return true if the refeence counter was increased, false otherwise
/// in case false is returned, the database must not be used
bool use();
void forceUse();
/// @brief decrease the reference counter for a database
void release() noexcept;
/// @brief returns whether the database is dangling
bool isDangling() const;
/// @brief whether or not the vocbase has been marked as deleted
bool isDropped() const;
/// @brief marks a database as deleted
bool markAsDropped();
/// @brief returns whether the database is the system database
bool isSystem() const;
/// @brief stop operations in this vocbase. must be called prior to
/// shutdown to clean things up
void stop();
/// @brief closes a database and all collections
void shutdown();
/// @brief sets prototype collection for sharding (_users or _graphs)
void setShardingPrototype(ShardingPrototype type);
/// @brief gets prototype collection for sharding (_users or _graphs)
ShardingPrototype shardingPrototype() const;
/// @brief gets name of prototype collection for sharding (_users or _graphs)
std::string const& shardingPrototypeName() const;
/// @brief returns all known views
std::vector<std::shared_ptr<arangodb::LogicalView>> views();
/// @brief returns all known collections
std::vector<std::shared_ptr<arangodb::LogicalCollection>> collections(bool includeDeleted);
void processCollections(std::function<void(arangodb::LogicalCollection*)> const& cb,
bool includeDeleted);
/// @brief returns names of all known collections
std::vector<std::string> collectionNames();
/// @brief creates a new view from parameter set
std::shared_ptr<arangodb::LogicalView> createView(arangodb::velocypack::Slice parameters);
/// @brief drops a view
arangodb::Result dropView(arangodb::DataSourceId cid, bool allowDropSystem);
/// @brief returns all known collections with their parameters
/// and optionally indexes
/// the result is sorted by type and name (vertices before edges)
void inventory(arangodb::velocypack::Builder& result, TRI_voc_tick_t,
std::function<bool(arangodb::LogicalCollection const*)> const& nameFilter);
/// @brief looks up a collection by identifier
std::shared_ptr<arangodb::LogicalCollection> lookupCollection(arangodb::DataSourceId id) const
noexcept;
/// @brief looks up a collection by name or stringified cid or uuid
std::shared_ptr<arangodb::LogicalCollection> lookupCollection(std::string const& nameOrId) const
noexcept;
/// @brief looks up a collection by uuid
std::shared_ptr<arangodb::LogicalCollection> lookupCollectionByUuid(std::string const& uuid) const
noexcept;
/// @brief looks up a data-source by identifier
std::shared_ptr<arangodb::LogicalDataSource> lookupDataSource(arangodb::DataSourceId id) const
noexcept;
/// @brief looks up a data-source by name or stringified cid or uuid
std::shared_ptr<arangodb::LogicalDataSource> lookupDataSource(std::string const& nameOrId) const
noexcept;
/// @brief looks up a view by identifier
std::shared_ptr<arangodb::LogicalView> lookupView(arangodb::DataSourceId id) const;
/// @brief looks up a view by name or stringified cid or uuid
std::shared_ptr<arangodb::LogicalView> lookupView(std::string const& nameOrId) const;
/// @brief renames a collection
arangodb::Result renameCollection(arangodb::DataSourceId cid, std::string const& newName);
/// @brief renames a view
arangodb::Result renameView(arangodb::DataSourceId cid, std::string const& oldName);
/// @brief creates a new collection from parameter set
/// collection id ("cid") is normally passed with a value of 0
/// this means that the system will assign a new collection id automatically
/// using a cid of > 0 is supported to import dumps from other servers etc.
/// but the functionality is not advertised
std::shared_ptr<arangodb::LogicalCollection> createCollection(arangodb::velocypack::Slice parameters);
/// @brief drops a collection, no timeout if timeout is < 0.0, otherwise
/// timeout is in seconds. Essentially, the timeout counts to acquire the
/// write lock for using the collection.
arangodb::Result dropCollection(arangodb::DataSourceId cid,
bool allowDropSystem, double timeout);
/// @brief unloads a collection
arangodb::Result unloadCollection(arangodb::LogicalCollection* collection, bool force);
/// @brief locks a collection for usage by id
/// Note that this will READ lock the collection you have to release the
/// collection lock by yourself and call @ref TRI_ReleaseCollectionVocBase
/// when you are done with the collection.
std::shared_ptr<arangodb::LogicalCollection> useCollection(arangodb::DataSourceId cid,
bool checkPermissions);
/// @brief locks a collection for usage by name
/// Note that this will READ lock the collection you have to release the
/// collection lock by yourself and call @ref TRI_ReleaseCollectionVocBase
/// when you are done with the collection.
std::shared_ptr<arangodb::LogicalCollection> useCollection(std::string const& name, bool checkPermissions);
/// @brief releases a collection from usage
void releaseCollection(arangodb::LogicalCollection* collection);
/// @brief visit all DataSources registered with this vocbase
/// @param visitor returns if visitation should continue
/// @param lockWrite acquire write lock (if 'visitor' will modify vocbase)
/// @return visitation compleated successfully
typedef std::function<bool(arangodb::LogicalDataSource& dataSource)> dataSourceVisitor;
bool visitDataSources(dataSourceVisitor const& visitor, bool lockWrite = false);
private:
/// @brief callback for collection dropping
static bool dropCollectionCallback(arangodb::LogicalCollection& collection);
/// @brief check some invariants on the various lists of collections
void checkCollectionInvariants() const;
std::shared_ptr<arangodb::LogicalCollection> useCollectionInternal(
std::shared_ptr<arangodb::LogicalCollection> const&, bool checkPermissions);
arangodb::Result loadCollection(arangodb::LogicalCollection& collection,
bool checkPermissions);
/// @brief adds a new collection
/// caller must hold _dataSourceLock in write mode or set doLock
void registerCollection(bool doLock, std::shared_ptr<arangodb::LogicalCollection> const& collection);
/// @brief removes a collection from the global list of collections
/// This function is called when a collection is dropped.
void unregisterCollection(arangodb::LogicalCollection& collection);
/// @brief creates a new collection, worker function
std::shared_ptr<arangodb::LogicalCollection> createCollectionWorker(arangodb::velocypack::Slice parameters);
/// @brief drops a collection, worker function
ErrorCode dropCollectionWorker(arangodb::LogicalCollection* collection,
DropState& state, double timeout);
/// @brief adds a new view
/// caller must hold _dataSourceLock in write mode or set doLock
void registerView(bool doLock, std::shared_ptr<arangodb::LogicalView> const& view);
/// @brief removes a view from the global list of views
/// This function is called when a view is dropped.
bool unregisterView(arangodb::LogicalView const& view);
};
/// @brief sanitize an object, given as slice, builder must contain an
/// open object which will remain open
void TRI_SanitizeObject(arangodb::velocypack::Slice const slice,
arangodb::velocypack::Builder& builder);
void TRI_SanitizeObjectWithEdges(arangodb::velocypack::Slice const slice,
arangodb::velocypack::Builder& builder);
| {
"content_hash": "31f1230ac2db4ee291ece9b3b4959aa3",
"timestamp": "",
"source": "github",
"line_count": 339,
"max_line_length": 127,
"avg_line_length": 41.36873156342183,
"alnum_prop": 0.7196948088990303,
"repo_name": "Simran-B/arangodb",
"id": "c28240f4aaa1776d838647afd9a010bec8f4bdbc",
"size": "15517",
"binary": false,
"copies": "1",
"ref": "refs/heads/devel",
"path": "arangod/VocBase/vocbase.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "61827"
},
{
"name": "Batchfile",
"bytes": "3282"
},
{
"name": "C",
"bytes": "275955"
},
{
"name": "C++",
"bytes": "29221660"
},
{
"name": "CMake",
"bytes": "375992"
},
{
"name": "CSS",
"bytes": "212174"
},
{
"name": "EJS",
"bytes": "218744"
},
{
"name": "HTML",
"bytes": "23114"
},
{
"name": "JavaScript",
"bytes": "30616196"
},
{
"name": "LLVM",
"bytes": "14753"
},
{
"name": "Makefile",
"bytes": "526"
},
{
"name": "NASL",
"bytes": "129286"
},
{
"name": "NSIS",
"bytes": "49153"
},
{
"name": "PHP",
"bytes": "46519"
},
{
"name": "Pascal",
"bytes": "75391"
},
{
"name": "Perl",
"bytes": "9811"
},
{
"name": "PowerShell",
"bytes": "7885"
},
{
"name": "Python",
"bytes": "181384"
},
{
"name": "Ruby",
"bytes": "1041531"
},
{
"name": "SCSS",
"bytes": "254419"
},
{
"name": "Shell",
"bytes": "128175"
},
{
"name": "TypeScript",
"bytes": "25245"
},
{
"name": "Yacc",
"bytes": "68516"
}
],
"symlink_target": ""
} |
module MnoEnterprise
class App < BaseResource
scope :active, -> { where(active: true) }
scope :cloud, -> { where(stack: 'cloud') }
attributes :id, :uid, :nid, :name, :description, :tiny_description, :created_at, :updated_at, :logo, :website, :slug,
:categories, :key_benefits, :key_features, :testimonials, :worldwide_usage, :tiny_description,
:popup_description, :stack, :terms_url, :pictures, :tags, :api_key, :metadata_url, :metadata, :details, :rank,
:multi_instantiable, :subcategories, :reviews, :average_rating, :running_instances_count, :pricing_text
#================================
# Associations
#================================
has_many :reviews, class_name: 'AppReview'
has_many :feedbacks, class_name: 'AppFeedback'
has_many :questions, class_name: 'AppQuestion'
has_many :shared_entities
# Return the list of available categories
def self.categories(list = nil)
app_list = list || self.all.to_a
app_list.select { |a| a.categories.present? }.map(&:categories).flatten.uniq { |e| e.downcase }.sort
end
def to_audit_event
{
app_id: id,
app_nid: nid,
app_name: name
}
end
# Sanitize the app description
# E.g.: replace any mention of Maestrano by the tenant name
def sanitized_description
@sanitized_description ||= (self.description || '').gsub(/(?<!cdn\.)(?<!cdn-prd-)maestrano(?!\.com)/i,MnoEnterprise.app_name)
end
# Methods for appinfo flags
%w(responsive coming_soon single_billing add_on).each do |method|
define_method "#{method}?" do
!!(appinfo.presence && appinfo[method])
end
end
def star_ready?
!!(appinfo.presence && appinfo['starReady'])
end
def connec_ready?
!!(appinfo.presence && !!appinfo['connecReady'])
end
def regenerate_api_key!
data = self.put(operation: 'regenerate_api_key')
self.api_key = data[:data][:api_key]
end
def refresh_metadata!(metadata_url)
self.put(operation: 'refresh_metadata', metadata_url: metadata_url)
end
end
end
| {
"content_hash": "74b1a6cc0daaf23cd0ef38124259962d",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 131,
"avg_line_length": 33.6875,
"alnum_prop": 0.6076066790352505,
"repo_name": "maestrano/mno-enterprise",
"id": "1f699e32642196bc6013306375653580d2fce8f4",
"size": "3078",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core/app/models/mno_enterprise/app.rb",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "277763"
},
{
"name": "CoffeeScript",
"bytes": "101017"
},
{
"name": "HTML",
"bytes": "136566"
},
{
"name": "JavaScript",
"bytes": "280731"
},
{
"name": "Ruby",
"bytes": "790255"
},
{
"name": "Shell",
"bytes": "1569"
}
],
"symlink_target": ""
} |
package org.spockframework.compiler;
import java.util.List;
import java.util.ListIterator;
import org.codehaus.groovy.ast.ClassCodeVisitorSupport;
import org.codehaus.groovy.ast.stmt.*;
import org.codehaus.groovy.control.SourceUnit;
/**
* Adds the ability to replace statements.
*
* @author Peter Niederwieser
*/
// Implementation note: It is only necessary to override visit methods
// for AST nodes that reference statements. For ClosureExpression we rely on
// the assumption that it always references a BlockStatement and hence our
// visitBlockStatement() method gets called.
public abstract class StatementReplacingVisitorSupport extends ClassCodeVisitorSupport {
private Statement replacement;
/**
* Visits the specified statement. If the statement's visit method calls
* replaceVisitedMethodWith(), the statement will be replaced.
*/
public Statement replace(Statement stat) {
replacement = null;
stat.visit(this);
Statement result = replacement == null ? stat : replacement;
replacement = null;
return result;
}
/**
* Visits the statements in the specified mutable list. If a statement's
* visit method calls replaceVisitedMethodWith(), the statement will be
* replaced.
*/
@SuppressWarnings("unchecked")
protected <T extends Statement> void replaceAll(List<T> stats) {
ListIterator<T> iter = stats.listIterator();
while (iter.hasNext())
iter.set((T) replace(iter.next()));
}
/**
* Replaces the currently visited statement with the specified statement.
*/
protected void replaceVisitedStatementWith(Statement other) {
replacement = other;
}
@SuppressWarnings("unchecked")
@Override
public void visitBlockStatement(BlockStatement stat) {
replaceAll(stat.getStatements());
}
@Override
public void visitForLoop(ForStatement stat) {
stat.getCollectionExpression().visit(this);
stat.setLoopBlock(replace(stat.getLoopBlock()));
}
@Override
public void visitWhileLoop(WhileStatement stat) {
stat.getBooleanExpression().visit(this);
stat.setLoopBlock(replace(stat.getLoopBlock()));
}
@Override
public void visitDoWhileLoop(DoWhileStatement stat) {
stat.getBooleanExpression().visit(this);
stat.setLoopBlock(replace(stat.getLoopBlock()));
}
@Override
public void visitIfElse(IfStatement stat) {
stat.getBooleanExpression().visit(this);
stat.setIfBlock(replace(stat.getIfBlock()));
stat.setElseBlock(replace(stat.getElseBlock()));
}
@SuppressWarnings("unchecked")
@Override
public void visitTryCatchFinally(TryCatchStatement stat) {
stat.setTryStatement(replace(stat.getTryStatement()));
replaceAll(stat.getCatchStatements());
stat.setFinallyStatement(replace(stat.getFinallyStatement()));
}
@SuppressWarnings("unchecked")
@Override
public void visitSwitch(SwitchStatement stat) {
stat.getExpression().visit(this);
replaceAll(stat.getCaseStatements());
stat.setDefaultStatement(replace(stat.getDefaultStatement()));
}
@Override
public void visitCaseStatement(CaseStatement stat) {
stat.getExpression().visit(this);
stat.setCode(replace(stat.getCode()));
}
@Override
public void visitSynchronizedStatement(SynchronizedStatement stat) {
stat.getExpression().visit(this);
stat.setCode(replace(stat.getCode()));
}
@Override
public void visitCatchStatement(CatchStatement stat) {
stat.setCode(replace(stat.getCode()));
}
@Override
protected SourceUnit getSourceUnit() {
throw new UnsupportedOperationException("getSourceUnit");
}
}
| {
"content_hash": "40f84c106fe0398ab2e0f42c7ddf93eb",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 88,
"avg_line_length": 29.235772357723576,
"alnum_prop": 0.7366518353726362,
"repo_name": "spockframework/spock",
"id": "eea8b355676101797d401a0872d7e379c049a4fc",
"size": "4211",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "spock-core/src/main/java/org/spockframework/compiler/StatementReplacingVisitorSupport.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Groovy",
"bytes": "1124541"
},
{
"name": "Java",
"bytes": "1514097"
}
],
"symlink_target": ""
} |
Binary Downloads: https://github.com/AlliterativeAlice/simpleyui/releases
## Introduction
Simple YUI Compressor .NET is a .NET 2.0-compatible library for combining and minifying JavaScript and CSS files for display on websites. Usage of SimpleYUI is similar to [SquishIt](https://github.com/jetheredge/SquishIt). SimpleYUI is not intended to be a replacement to SquishIt, but rather is intended to provide a simpler, less feature-rich solution that's compatible with .NET 2.0 and up. "Simple" in this case means:
- Just one .dll file to include
- Only supports pure JS and CSS (no support for LESS, SASS, CofeeScript, etc. If you need those, use SquishIt)
- Only supports one compression engine (YUI)
- No configuration required
Simple YUI Compressor .NET is in no way related to [SimpleYUI](http://www.yuiblog.com/blog/2010/09/03/coming-inyui-3-2-0-simpleyui/)
## How To Use
First, you have to reference SimpleYUI.dll.
An example of using SimpleYUI is:
```aspx
<!DOCTYPE html>
<html>
<head>
<title>SimpleYUI Demo</title>
<%= SimpleYUI.Bundler.CSS()
.Add("~/css/style1.css")
.Add("~/css/style2.css")
.Render("~/css/combined.css") %>
<%= SimpleYUI.Bundler.JavaScript()
.Add("~/scripts/script1.js")
.Add("~/scripts/script2.js")
.Render("~/scripts/combined.js") %>
</head>
<body>
</body>
</html>
```
SimpleYUI injects a hash into the rendered filename based on the parameters passed and the last modified times of the included files. Therefore SimpleYUI will not regenerate the combined file on every request (which would be extremely inefficient) but only when one of the included files is changed, or the parameters passed to CSS() or JavaScript() change. When debugging, you can have SimpleYUI output individual `<script>`/`<link>` tags for each file instead of the combined file by including debug=true in the request query string (e.g. http://www.example.com/index.aspx?debug=true) or setting the "debug" parameter, which is the first parameter of the `CSS()` and `JavaScript()` methods to true (e.g. `SimpleYUI.Bundler.CSS(true)`)
Note that relative paths in CSS files will not be converted to work in the combined CSS file, so you may need to use absolute paths unless the combined CSS file is going to be outputted to the same directory as the CSS file(s) containing relative paths.
You can pass a number of parameters to `CSS()` and `JavaScript()` which will change how SimpleYUI is configured. By default SimpleYUI uses the most aggressive compression settings.
### CSS() Parameters
##### debug (bool)
If set to true, no combining/compression will take place and <link> tags to each individual CSS file will be output. If no parameter is passed to debug, it's possible to trigger debug mode by adding debug=true to the request query string.
##### useCompression (bool)
If set to true, SimpleYUI will compress the CSS by removing superfluous whitespace. If set to false, no compression will take place (but files will still be combined into one.) Defaults to true.
##### removeComments (bool)
If set to true, SimpleYUI will remove comments from the CSS. If set to false, it won't. Defaults to true.
##### lineBreakPosition (int)
Roughly how many characters should be included per line in the compressed CSS file. Defaults to -1 (no line breaks.) Note that a line break will always be added after the contents of each file regardless of this setting.
### JavaScript() Parameters
##### debug (bool)
If set to true, no combining/compression will take place and `<script>` tags to each individual JS file will be output. If no parameter is passed to debug, it is possible to trigger debug mode by adding debug=true to the request query string.
##### useCompression (bool)
If set to true, SimpleYUI will compress the JavaScript by removing superfluous whitespace. If set to false, no compression will take place (but files will still be combined into one.) Defaults to true.
##### obfuscate (bool)
If set to true, SimpleYUI will obfuscate the JavaScript (by shortening variable names, etc.) If set to false, no obfuscation will take place. This may need to be set to false for code that uses certain JavaScript libraries (e.g. AngularJS.) Defaults to true.
##### preserveSemicolons (bool)
If set to true, SimpleYUI will not remove any semicolons. Defaults to false.
##### disableOptimizations (bool)
If set to true, SimpleYUI will not make any micro-optimizations (e.g. conversion of `foo['bar']` to `foo.bar`.) Defaults to false.
##### ignoreEval (bool)
If set to true, SimpleYUI won't touch code in eval() statements. Defaults to false.
##### lineBreakPosition (int)
Roughly how many characters should be included per line in the compressed JS file. Defaults to -1 (no line breaks.) Note that a line break will always be added after the contents of each file regardless of this setting.
## Planned Features
- Total JavaScript comment removal.
- Conversion of relative paths in combined CSS files.
- Test cases.
| {
"content_hash": "3fca4ba77f1c094887b2563e07c385b6",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 736,
"avg_line_length": 52.06122448979592,
"alnum_prop": 0.7357898863190906,
"repo_name": "AlliterativeAlice/simpleyui",
"id": "98e389a671924316f2e5e324a9ce3664638c554b",
"size": "5132",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C#",
"bytes": "10116"
}
],
"symlink_target": ""
} |
Subsets and Splits