context
stringlengths 2.52k
185k
| gt
stringclasses 1
value |
---|---|
/* ****************************************************************************
*
* Copyright (c) Microsoft Corporation.
*
* This source code is subject to terms and conditions of the Apache License, Version 2.0. A
* copy of the license can be found in the License.html file at the root of this distribution. If
* you cannot locate the Apache License, Version 2.0, please send an email to
* [email protected]. By using this source code in any fashion, you are agreeing to be bound
* by the terms of the Apache License, Version 2.0.
*
* You must not remove this notice, or any other, from this software.
*
*
* ***************************************************************************/
#if FEATURE_CORE_DLR
using System.Linq.Expressions;
#else
using Microsoft.Scripting.Ast;
#endif
using System;
using System.Collections;
using System.Collections.Generic;
using System.Reflection;
using System.Diagnostics;
using System.Dynamic;
using System.Text;
using Microsoft.Scripting.Actions;
using Microsoft.Scripting.Generation;
using Microsoft.Scripting.Utils;
namespace Microsoft.Scripting.Runtime {
/// <summary>
/// Helper methods that calls are generated to from the default DLR binders.
/// </summary>
public static class BinderOps {
/// <summary>
/// Helper function to combine an object array with a sequence of additional parameters that has been splatted for a function call.
/// </summary>
public static object[] GetCombinedParameters(object[] initialArgs, object additionalArgs) {
IList listArgs = additionalArgs as IList;
if (listArgs == null) {
IEnumerable ie = additionalArgs as IEnumerable;
if (ie == null) {
throw new InvalidOperationException("args must be iterable");
}
listArgs = new List<object>();
foreach (object o in ie) {
listArgs.Add(o);
}
}
object[] res = new object[initialArgs.Length + listArgs.Count];
Array.Copy(initialArgs, res, initialArgs.Length);
listArgs.CopyTo(res, initialArgs.Length);
return res;
}
public static Dictionary<TKey, TValue> MakeDictionary<TKey, TValue>(string[] names, object[] values) {
Debug.Assert(typeof(TKey) == typeof(string) || typeof(TKey) == typeof(object));
Dictionary<TKey, TValue> res = new Dictionary<TKey, TValue>();
IDictionary id = (IDictionary)res;
for (int i = 0; i < names.Length; i++) {
id[names[i]] = values[i];
}
return res;
}
public static ArgumentTypeException BadArgumentsForOperation(ExpressionType op, params object[] args) {
StringBuilder message = new StringBuilder("unsupported operand type(s) for operation ");
message.Append(op.ToString());
message.Append(": ");
string comma = "";
foreach (object o in args) {
message.Append(comma);
message.Append(CompilerHelpers.GetType(o));
comma = ", ";
}
throw new ArgumentTypeException(message.ToString());
}
// formalNormalArgumentCount - does not include FuncDefFlags.ArgList and FuncDefFlags.KwDict
// defaultArgumentCount - How many arguments in the method declaration have a default value?
// providedArgumentCount - How many arguments are passed in at the call site?
// hasArgList - Is the method declaration of the form "foo(*argList)"?
// keywordArgumentsProvided - Does the call site specify keyword arguments?
public static ArgumentTypeException TypeErrorForIncorrectArgumentCount(
string methodName,
int formalNormalArgumentCount,
int defaultArgumentCount,
int providedArgumentCount,
bool hasArgList,
bool keywordArgumentsProvided) {
return TypeErrorForIncorrectArgumentCount(methodName, formalNormalArgumentCount, formalNormalArgumentCount, defaultArgumentCount, providedArgumentCount, hasArgList, keywordArgumentsProvided);
}
public static ArgumentTypeException TypeErrorForIncorrectArgumentCount(
string methodName,
int minFormalNormalArgumentCount,
int maxFormalNormalArgumentCount,
int defaultArgumentCount,
int providedArgumentCount,
bool hasArgList,
bool keywordArgumentsProvided) {
int formalCount;
string formalCountQualifier;
string nonKeyword = keywordArgumentsProvided ? "non-keyword " : "";
if (defaultArgumentCount > 0 || hasArgList || minFormalNormalArgumentCount != maxFormalNormalArgumentCount) {
if (providedArgumentCount < minFormalNormalArgumentCount || maxFormalNormalArgumentCount == Int32.MaxValue) {
formalCountQualifier = "at least";
formalCount = minFormalNormalArgumentCount - defaultArgumentCount;
} else {
formalCountQualifier = "at most";
formalCount = maxFormalNormalArgumentCount;
}
} else if (minFormalNormalArgumentCount == 0) {
return ScriptingRuntimeHelpers.SimpleTypeError(string.Format("{0}() takes no arguments ({1} given)", methodName, providedArgumentCount));
} else {
formalCountQualifier = "exactly";
formalCount = minFormalNormalArgumentCount;
}
return new ArgumentTypeException(string.Format(
"{0}() takes {1} {2} {3}argument{4} ({5} given)",
methodName, // 0
formalCountQualifier, // 1
formalCount, // 2
nonKeyword, // 3
formalCount == 1 ? "" : "s", // 4
providedArgumentCount)); // 5
}
public static ArgumentTypeException TypeErrorForIncorrectArgumentCount(string name, int formalNormalArgumentCount, int defaultArgumentCount, int providedArgumentCount) {
return TypeErrorForIncorrectArgumentCount(name, formalNormalArgumentCount, defaultArgumentCount, providedArgumentCount, false, false);
}
public static ArgumentTypeException TypeErrorForIncorrectArgumentCount(string name, int expected, int received) {
return TypeErrorForIncorrectArgumentCount(name, expected, 0, received);
}
public static ArgumentTypeException TypeErrorForExtraKeywordArgument(string name, string argumentName) {
return new ArgumentTypeException(String.Format("{0}() got an unexpected keyword argument '{1}'", name, argumentName));
}
public static ArgumentTypeException TypeErrorForDuplicateKeywordArgument(string name, string argumentName) {
return new ArgumentTypeException(String.Format("{0}() got multiple values for keyword argument '{1}'", name, argumentName));
}
public static ArgumentTypeException TypeErrorForNonInferrableMethod(string name) {
return new ArgumentTypeException(String.Format("The type arguments for method '{0}' cannot be inferred from the usage. Try specifying the type arguments explicitly.", name));
}
public static ArgumentTypeException SimpleTypeError(string message) {
return new ArgumentTypeException(message);
}
public static ArgumentTypeException InvalidSplatteeError(string name, string typeName) {
return new ArgumentTypeException(String.Format("{0}() argument after * must be a sequence, not {1}", name, typeName));
}
public static object InvokeMethod(MethodBase mb, object obj, object[] args) {
try {
return mb.Invoke(obj, args);
} catch (TargetInvocationException tie) {
throw tie.InnerException;
}
}
public static object InvokeConstructor(ConstructorInfo ci, object[] args) {
try {
return ci.Invoke(args);
} catch (TargetInvocationException tie) {
throw tie.InnerException;
}
}
// TODO: just emit this in the generated code
public static bool CheckDictionaryMembers(IDictionary dict, string[] names, Type[] types) {
if (dict.Count != names.Length) return false;
for (int i = 0; i < names.Length; i++) {
string name = names[i];
if (!dict.Contains(name)) {
return false;
}
if (types != null) {
if (CompilerHelpers.GetType(dict[name]) != types[i]) {
return false;
}
}
}
return true;
}
public static IList<string> GetStringMembers(IList<object> members) {
List<string> res = new List<string>();
foreach (object o in members) {
string str = o as string;
if (str != null) {
res.Add(str);
}
}
return res;
}
/// <summary>
/// EventInfo.EventHandlerType getter is marked SecuritySafeCritical in CoreCLR
/// This method is to get to the property without using Reflection
/// </summary>
/// <param name="eventInfo"></param>
/// <returns></returns>
public static Type GetEventHandlerType(EventInfo eventInfo) {
ContractUtils.RequiresNotNull(eventInfo, "eventInfo");
return eventInfo.EventHandlerType;
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters")] // TODO: fix
public static void SetEvent(EventTracker eventTracker, object value) {
EventTracker et = value as EventTracker;
if (et != null) {
if (et != eventTracker) {
throw new ArgumentException(String.Format("expected event from {0}.{1}, got event from {2}.{3}",
eventTracker.DeclaringType.Name,
eventTracker.Name,
et.DeclaringType.Name,
et.Name));
}
return;
}
BoundMemberTracker bmt = value as BoundMemberTracker;
if (bmt == null) throw new ArgumentTypeException("expected bound event, got " + CompilerHelpers.GetType(value).Name);
if (bmt.BoundTo.MemberType != TrackerTypes.Event) throw new ArgumentTypeException("expected bound event, got " + bmt.BoundTo.MemberType.ToString());
if (bmt.BoundTo != eventTracker) throw new ArgumentException(String.Format("expected event from {0}.{1}, got event from {2}.{3}",
eventTracker.DeclaringType.Name,
eventTracker.Name,
bmt.BoundTo.DeclaringType.Name,
bmt.BoundTo.Name));
}
}
}
| |
// <copyright file="FirefoxDriverService.cs" company="WebDriver Committers">
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
using System;
using System.Globalization;
using System.Net;
using System.Text;
using OpenQA.Selenium.Internal;
namespace OpenQA.Selenium.Firefox
{
/// <summary>
/// Exposes the service provided by the native FirefoxDriver executable.
/// </summary>
public sealed class FirefoxDriverService : DriverService
{
private const string DefaultFirefoxDriverServiceFileName = "geckodriver";
private static readonly Uri FirefoxDriverDownloadUrl = new Uri("https://github.com/mozilla/geckodriver/releases");
private bool connectToRunningBrowser;
private bool openBrowserToolbox;
private int browserCommunicationPort = -1;
private string browserBinaryPath = string.Empty;
private string host = string.Empty;
private string browserCommunicationHost = string.Empty;
private FirefoxDriverLogLevel loggingLevel = FirefoxDriverLogLevel.Default;
/// <summary>
/// Initializes a new instance of the <see cref="FirefoxDriverService"/> class.
/// </summary>
/// <param name="executablePath">The full path to the Firefox driver executable.</param>
/// <param name="executableFileName">The file name of the Firefox driver executable.</param>
/// <param name="port">The port on which the Firefox driver executable should listen.</param>
private FirefoxDriverService(string executablePath, string executableFileName, int port)
: base(executablePath, port, executableFileName, FirefoxDriverDownloadUrl)
{
}
/// <summary>
/// Gets or sets the location of the Firefox binary executable.
/// </summary>
public string FirefoxBinaryPath
{
get { return this.browserBinaryPath; }
set { this.browserBinaryPath = value; }
}
/// <summary>
/// Gets or sets the port used by the driver executable to communicate with the browser.
/// </summary>
public int BrowserCommunicationPort
{
get { return this.browserCommunicationPort; }
set { this.browserCommunicationPort = value; }
}
/// <summary>
/// Gets or sets the value of the IP address of the host adapter used by the driver
/// executable to communicate with the browser.
/// </summary>
public string BrowserCommunicationHost
{
get { return this.browserCommunicationHost; }
set { this.browserCommunicationHost = value; }
}
/// <summary>
/// Gets or sets the value of the IP address of the host adapter on which the
/// service should listen for connections.
/// </summary>
public string Host
{
get { return this.host; }
set { this.host = value; }
}
/// <summary>
/// Gets or sets a value indicating whether to connect to an already-running
/// instance of Firefox.
/// </summary>
public bool ConnectToRunningBrowser
{
get { return this.connectToRunningBrowser; }
set { this.connectToRunningBrowser = value; }
}
/// <summary>
/// Gets or sets a value indicating whether to open the Firefox Browser Toolbox
/// when Firefox is launched.
/// </summary>
public bool OpenBrowserToolbox
{
get { return this.openBrowserToolbox; }
set { this.openBrowserToolbox = value; }
}
/// <summary>
/// Gets or sets the level at which log output is displayed.
/// </summary>
/// <remarks>
/// This is largely equivalent to setting the <see cref="FirefoxOptions.LogLevel"/>
/// property, except the log level is set when the driver launches, instead of
/// when the browser is launched, meaning that initial driver logging before
/// initiation of a session can be controlled.
/// </remarks>
public FirefoxDriverLogLevel LogLevel
{
get { return this.loggingLevel; }
set { this.loggingLevel = value; }
}
/// <summary>
/// Gets a value indicating the time to wait for the service to terminate before forcing it to terminate.
/// </summary>
protected override TimeSpan TerminationTimeout
{
// Use a very small timeout for terminating the Firefox driver,
// because the executable does not have a clean shutdown command,
// which means we have to kill the process. Using a short timeout
// gets us to the termination point much faster.
get { return TimeSpan.FromMilliseconds(100); }
}
/// <summary>
/// Gets a value indicating whether the service has a shutdown API that can be called to terminate
/// it gracefully before forcing a termination.
/// </summary>
protected override bool HasShutdown
{
// The Firefox driver executable does not have a clean shutdown command,
// which means we have to kill the process.
get { return false; }
}
/// <summary>
/// Gets the command-line arguments for the driver service.
/// </summary>
protected override string CommandLineArguments
{
get
{
StringBuilder argsBuilder = new StringBuilder();
if (this.connectToRunningBrowser)
{
argsBuilder.Append(" --connect-existing");
}
else
{
argsBuilder.Append(string.Format(CultureInfo.InvariantCulture, " --websocket-port {0}", PortUtilities.FindFreePort()));
}
if (this.browserCommunicationPort > 0)
{
argsBuilder.AppendFormat(CultureInfo.InvariantCulture, " --marionette-port {0}", this.browserCommunicationPort);
}
if (!string.IsNullOrEmpty(this.browserCommunicationHost))
{
argsBuilder.AppendFormat(CultureInfo.InvariantCulture, " --marionette-host \"{0}\"", this.host);
}
if (this.Port > 0)
{
argsBuilder.AppendFormat(CultureInfo.InvariantCulture, " --port {0}", this.Port);
}
if (!string.IsNullOrEmpty(this.browserBinaryPath))
{
argsBuilder.AppendFormat(CultureInfo.InvariantCulture, " --binary \"{0}\"", this.browserBinaryPath);
}
if (!string.IsNullOrEmpty(this.host))
{
argsBuilder.AppendFormat(CultureInfo.InvariantCulture, " --host \"{0}\"", this.host);
}
if (this.loggingLevel != FirefoxDriverLogLevel.Default)
{
argsBuilder.Append(string.Format(CultureInfo.InvariantCulture, " --log {0}", this.loggingLevel.ToString().ToLowerInvariant()));
}
if (this.openBrowserToolbox)
{
argsBuilder.Append(" --jsdebugger");
}
return argsBuilder.ToString().Trim();
}
}
/// <summary>
/// Creates a default instance of the FirefoxDriverService.
/// </summary>
/// <returns>A FirefoxDriverService that implements default settings.</returns>
public static FirefoxDriverService CreateDefaultService()
{
string serviceDirectory = DriverService.FindDriverServiceExecutable(FirefoxDriverServiceFileName(), FirefoxDriverDownloadUrl);
return CreateDefaultService(serviceDirectory);
}
/// <summary>
/// Creates a default instance of the FirefoxDriverService using a specified path to the Firefox driver executable.
/// </summary>
/// <param name="driverPath">The directory containing the Firefox driver executable.</param>
/// <returns>A FirefoxDriverService using a random port.</returns>
public static FirefoxDriverService CreateDefaultService(string driverPath)
{
return CreateDefaultService(driverPath, FirefoxDriverServiceFileName());
}
/// <summary>
/// Creates a default instance of the FirefoxDriverService using a specified path to the Firefox driver executable with the given name.
/// </summary>
/// <param name="driverPath">The directory containing the Firefox driver executable.</param>
/// <param name="driverExecutableFileName">The name of the Firefox driver executable file.</param>
/// <returns>A FirefoxDriverService using a random port.</returns>
public static FirefoxDriverService CreateDefaultService(string driverPath, string driverExecutableFileName)
{
return new FirefoxDriverService(driverPath, driverExecutableFileName, PortUtilities.FindFreePort());
}
/// <summary>
/// Returns the Firefox driver filename for the currently running platform
/// </summary>
/// <returns>The file name of the Firefox driver service executable.</returns>
private static string FirefoxDriverServiceFileName()
{
string fileName = DefaultFirefoxDriverServiceFileName;
// Unfortunately, detecting the currently running platform isn't as
// straightforward as you might hope.
// See: http://mono.wikia.com/wiki/Detecting_the_execution_platform
// and https://msdn.microsoft.com/en-us/library/3a8hyw88(v=vs.110).aspx
const int PlatformMonoUnixValue = 128;
switch (Environment.OSVersion.Platform)
{
case PlatformID.Win32NT:
case PlatformID.Win32S:
case PlatformID.Win32Windows:
case PlatformID.WinCE:
fileName += ".exe";
break;
case PlatformID.MacOSX:
case PlatformID.Unix:
break;
// Don't handle the Xbox case. Let default handle it.
// case PlatformID.Xbox:
// break;
default:
if ((int)Environment.OSVersion.Platform == PlatformMonoUnixValue)
{
break;
}
throw new WebDriverException("Unsupported platform: " + Environment.OSVersion.Platform);
}
return fileName;
}
}
}
| |
//------------------------------------------------------------------------------
// <copyright file="XmlEventCache.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// <owner current="true" primary="true">[....]</owner>
//------------------------------------------------------------------------------
using System.Collections.Generic;
using System.Diagnostics;
using System.Text;
using System.Xml.Schema;
using System.Xml.Xsl.Runtime;
namespace System.Xml {
/// <summary>
/// Caches sequence of XmlEvents so that they can be replayed later.
/// </summary>
internal sealed class XmlEventCache : XmlRawWriter {
private List<XmlEvent[]> pages; // All event pages
private XmlEvent[] pageCurr; // Page that is currently being built
private int pageSize; // Number of events in pageCurr
private bool hasRootNode; // True if the cached document has a root node, false if it's a fragment
private StringConcat singleText; // If document consists of a single text node, cache it here rather than creating pages
private string baseUri; // Base Uri of document
private enum XmlEventType {
Unknown = 0,
DocType,
StartElem,
StartAttr,
EndAttr,
CData,
Comment,
PI,
Whitespace,
String,
Raw,
EntRef,
CharEnt,
SurrCharEnt,
Base64,
BinHex,
XmlDecl1,
XmlDecl2,
StartContent,
EndElem,
FullEndElem,
Nmsp,
EndBase64,
Close,
Flush,
Dispose,
}
#if DEBUG
private const int InitialPageSize = 4;
#else
private const int InitialPageSize = 32;
#endif
public XmlEventCache(string baseUri, bool hasRootNode) {
this.baseUri = baseUri;
this.hasRootNode = hasRootNode;
}
public void EndEvents() {
if (this.singleText.Count == 0)
AddEvent(XmlEventType.Unknown);
}
//-----------------------------------------------
// XmlEventCache methods
//-----------------------------------------------
/// <summary>
/// Return Base Uri of the document.
/// </summary>
public string BaseUri {
get { return this.baseUri; }
}
/// <summary>
/// Return true if the cached document has a root node, false if it's a fragment.
/// </summary>
public bool HasRootNode {
get { return this.hasRootNode; }
}
/// <summary>
/// Replay all cached events to an XmlWriter.
/// </summary>
public void EventsToWriter(XmlWriter writer) {
XmlEvent[] page;
int idxPage, idxEvent;
byte[] bytes;
char[] chars;
XmlRawWriter rawWriter;
// Special-case single text node at the top-level
if (this.singleText.Count != 0) {
writer.WriteString(this.singleText.GetResult());
return;
}
rawWriter = writer as XmlRawWriter;
// Loop over set of pages
for (idxPage = 0; idxPage < this.pages.Count; idxPage++) {
page = this.pages[idxPage];
// Loop over events in each page
for (idxEvent = 0; idxEvent < page.Length; idxEvent++) {
switch (page[idxEvent].EventType) {
case XmlEventType.Unknown:
// No more events
Debug.Assert(idxPage + 1 == this.pages.Count);
return;
case XmlEventType.DocType:
writer.WriteDocType(page[idxEvent].String1, page[idxEvent].String2, page[idxEvent].String3, (string) page[idxEvent].Object);
break;
case XmlEventType.StartElem:
writer.WriteStartElement(page[idxEvent].String1, page[idxEvent].String2, page[idxEvent].String3);
break;
case XmlEventType.StartAttr:
writer.WriteStartAttribute(page[idxEvent].String1, page[idxEvent].String2, page[idxEvent].String3);
break;
case XmlEventType.EndAttr:
writer.WriteEndAttribute();
break;
case XmlEventType.CData:
writer.WriteCData(page[idxEvent].String1);
break;
case XmlEventType.Comment:
writer.WriteComment(page[idxEvent].String1);
break;
case XmlEventType.PI:
writer.WriteProcessingInstruction(page[idxEvent].String1, page[idxEvent].String2);
break;
case XmlEventType.Whitespace:
writer.WriteWhitespace(page[idxEvent].String1);
break;
case XmlEventType.String:
writer.WriteString(page[idxEvent].String1);
break;
case XmlEventType.Raw:
writer.WriteRaw(page[idxEvent].String1);
break;
case XmlEventType.EntRef:
writer.WriteEntityRef(page[idxEvent].String1);
break;
case XmlEventType.CharEnt:
writer.WriteCharEntity((char) page[idxEvent].Object);
break;
case XmlEventType.SurrCharEnt:
chars = (char[]) page[idxEvent].Object;
writer.WriteSurrogateCharEntity(chars[0], chars[1]);
break;
case XmlEventType.Base64:
bytes = (byte[]) page[idxEvent].Object;
writer.WriteBase64(bytes, 0, bytes.Length);
break;
case XmlEventType.BinHex:
bytes = (byte[]) page[idxEvent].Object;
writer.WriteBinHex(bytes, 0, bytes.Length);
break;
case XmlEventType.XmlDecl1:
if (rawWriter != null)
rawWriter.WriteXmlDeclaration((XmlStandalone) page[idxEvent].Object);
break;
case XmlEventType.XmlDecl2:
if (rawWriter != null)
rawWriter.WriteXmlDeclaration(page[idxEvent].String1);
break;
case XmlEventType.StartContent:
if (rawWriter != null)
rawWriter.StartElementContent();
break;
case XmlEventType.EndElem:
if (rawWriter != null)
rawWriter.WriteEndElement(page[idxEvent].String1, page[idxEvent].String2, page[idxEvent].String3);
else
writer.WriteEndElement();
break;
case XmlEventType.FullEndElem:
if (rawWriter != null)
rawWriter.WriteFullEndElement(page[idxEvent].String1, page[idxEvent].String2, page[idxEvent].String3);
else
writer.WriteFullEndElement();
break;
case XmlEventType.Nmsp:
if (rawWriter != null)
rawWriter.WriteNamespaceDeclaration(page[idxEvent].String1, page[idxEvent].String2);
else
writer.WriteAttributeString("xmlns", page[idxEvent].String1, XmlReservedNs.NsXmlNs, page[idxEvent].String2);
break;
case XmlEventType.EndBase64:
if (rawWriter != null)
rawWriter.WriteEndBase64();
break;
case XmlEventType.Close:
writer.Close();
break;
case XmlEventType.Flush:
writer.Flush();
break;
case XmlEventType.Dispose:
((IDisposable)writer).Dispose();
break;
default:
Debug.Assert(false, "Unknown event: " + page[idxEvent].EventType);
break;
}
}
}
Debug.Assert(false, "Unknown event should be added to end of event sequence.");
}
/// <summary>
/// Concatenate all element text and atomic value events and return the resulting string.
/// </summary>
public string EventsToString() {
StringBuilder bldr;
XmlEvent[] page;
int idxPage, idxEvent;
bool inAttr;
// Special-case single text node at the top-level
if (this.singleText.Count != 0)
return this.singleText.GetResult();
bldr = new StringBuilder();
// Loop over set of pages
inAttr = false;
for (idxPage = 0; idxPage < this.pages.Count; idxPage++) {
page = this.pages[idxPage];
// Loop over events in each page
for (idxEvent = 0; idxEvent < page.Length; idxEvent++) {
switch (page[idxEvent].EventType) {
case XmlEventType.Unknown:
// No more events
Debug.Assert(idxPage + 1 == this.pages.Count);
return bldr.ToString();
case XmlEventType.String:
case XmlEventType.Whitespace:
case XmlEventType.Raw:
case XmlEventType.CData:
// Append text
if (!inAttr)
bldr.Append(page[idxEvent].String1);
break;
case XmlEventType.StartAttr:
// Don't append text or atomic values if they appear within attributes
inAttr = true;
break;
case XmlEventType.EndAttr:
// No longer in an attribute
inAttr = false;
break;
}
}
}
Debug.Assert(false, "Unknown event should be added to end of event sequence.");
return string.Empty;
}
//-----------------------------------------------
// XmlWriter interface
//-----------------------------------------------
public override XmlWriterSettings Settings {
get { return null; }
}
public override void WriteDocType(string name, string pubid, string sysid, string subset) {
AddEvent(XmlEventType.DocType, name, pubid, sysid, subset);
}
public override void WriteStartElement(string prefix, string localName, string ns) {
AddEvent(XmlEventType.StartElem, prefix, localName, ns);
}
public override void WriteStartAttribute(string prefix, string localName, string ns) {
AddEvent(XmlEventType.StartAttr, prefix, localName, ns);
}
public override void WriteEndAttribute() {
AddEvent(XmlEventType.EndAttr);
}
public override void WriteCData(string text) {
AddEvent(XmlEventType.CData, text);
}
public override void WriteComment(string text) {
AddEvent(XmlEventType.Comment, text);
}
public override void WriteProcessingInstruction(string name, string text) {
AddEvent(XmlEventType.PI, name, text);
}
public override void WriteWhitespace(string ws) {
AddEvent(XmlEventType.Whitespace, ws);
}
public override void WriteString(string text) {
// Special-case single text node at the top level
if (this.pages == null) {
this.singleText.ConcatNoDelimiter(text);
}
else {
AddEvent(XmlEventType.String, text);
}
}
public override void WriteChars(char[] buffer, int index, int count) {
WriteString(new string(buffer, index, count));
}
public override void WriteRaw(char[] buffer, int index, int count) {
WriteRaw(new string(buffer, index, count));
}
public override void WriteRaw(string data) {
AddEvent(XmlEventType.Raw, data);
}
public override void WriteEntityRef(string name) {
AddEvent(XmlEventType.EntRef, name);
}
public override void WriteCharEntity(char ch) {
AddEvent(XmlEventType.CharEnt, (object) ch);
}
public override void WriteSurrogateCharEntity(char lowChar, char highChar) {
// Save high and low characters
char[] chars = {lowChar, highChar};
AddEvent(XmlEventType.SurrCharEnt, (object) chars);
}
public override void WriteBase64(byte[] buffer, int index, int count) {
AddEvent(XmlEventType.Base64, (object) ToBytes(buffer, index, count));
}
public override void WriteBinHex(byte[] buffer, int index, int count) {
AddEvent(XmlEventType.BinHex, (object) ToBytes(buffer, index, count));
}
public override void Close() {
AddEvent(XmlEventType.Close);
}
public override void Flush() {
AddEvent(XmlEventType.Flush);
}
/// <summary>
/// All other WriteValue methods are implemented by XmlWriter to delegate to WriteValue(object) or WriteValue(string), so
/// only these two methods need to be implemented.
/// </summary>
public override void WriteValue(object value) {
WriteString(XmlUntypedConverter.Untyped.ToString(value, this.resolver));
}
public override void WriteValue(string value) {
WriteString(value);
}
protected override void Dispose(bool disposing) {
try {
if (disposing) {
AddEvent(XmlEventType.Dispose);
}
}
finally {
base.Dispose(disposing);
}
}
//-----------------------------------------------
// XmlRawWriter interface
//-----------------------------------------------
internal override void WriteXmlDeclaration(XmlStandalone standalone) {
AddEvent(XmlEventType.XmlDecl1, (object) standalone);
}
internal override void WriteXmlDeclaration(string xmldecl) {
AddEvent(XmlEventType.XmlDecl2, xmldecl);
}
internal override void StartElementContent() {
AddEvent(XmlEventType.StartContent);
}
internal override void WriteEndElement(string prefix, string localName, string ns) {
AddEvent(XmlEventType.EndElem, prefix, localName, ns);
}
internal override void WriteFullEndElement(string prefix, string localName, string ns) {
AddEvent(XmlEventType.FullEndElem, prefix, localName, ns);
}
internal override void WriteNamespaceDeclaration(string prefix, string ns) {
AddEvent(XmlEventType.Nmsp, prefix, ns);
}
internal override void WriteEndBase64() {
AddEvent(XmlEventType.EndBase64);
}
//-----------------------------------------------
// Helper methods
//-----------------------------------------------
private void AddEvent(XmlEventType eventType) {
int idx = NewEvent();
this.pageCurr[idx].InitEvent(eventType);
}
private void AddEvent(XmlEventType eventType, string s1) {
int idx = NewEvent();
this.pageCurr[idx].InitEvent(eventType, s1);
}
private void AddEvent(XmlEventType eventType, string s1, string s2) {
int idx = NewEvent();
this.pageCurr[idx].InitEvent(eventType, s1, s2);
}
private void AddEvent(XmlEventType eventType, string s1, string s2, string s3) {
int idx = NewEvent();
this.pageCurr[idx].InitEvent(eventType, s1, s2, s3);
}
private void AddEvent(XmlEventType eventType, string s1, string s2, string s3, object o) {
int idx = NewEvent();
this.pageCurr[idx].InitEvent(eventType, s1, s2, s3, o);
}
private void AddEvent(XmlEventType eventType, object o) {
int idx = NewEvent();
this.pageCurr[idx].InitEvent(eventType, o);
}
private int NewEvent() {
if (this.pages == null) {
this.pages = new List<XmlEvent[]>();
this.pageCurr = new XmlEvent[InitialPageSize];
this.pages.Add(this.pageCurr);
if (this.singleText.Count != 0) {
// Review: There is no need to concatenate the strings here
this.pageCurr[0].InitEvent(XmlEventType.String, this.singleText.GetResult());
this.pageSize++;
this.singleText.Clear();
}
}
else if (this.pageSize >= this.pageCurr.Length) {
// Create new page
this.pageCurr = new XmlEvent[this.pageSize * 2];
this.pages.Add(this.pageCurr);
this.pageSize = 0;
}
return this.pageSize++;
}
/// <summary>
/// Create a standalone buffer that doesn't need an index or count passed along with it.
/// </summary>
private static byte[] ToBytes(byte[] buffer, int index, int count) {
if (index != 0 || count != buffer.Length) {
if (buffer.Length - index > count)
count = buffer.Length - index;
byte[] bufferNew = new byte[count];
Array.Copy(buffer, index, bufferNew, 0, count);
return bufferNew;
}
return buffer;
}
/// <summary>
/// Caches information for XML events like BeginElement, String, and EndAttribute so that they can be replayed later.
/// </summary>
private struct XmlEvent {
private XmlEventType eventType;
private string s1;
private string s2;
private string s3;
private object o;
public void InitEvent(XmlEventType eventType) {
this.eventType = eventType;
}
public void InitEvent(XmlEventType eventType, string s1) {
this.eventType = eventType;
this.s1 = s1;
}
public void InitEvent(XmlEventType eventType, string s1, string s2) {
this.eventType = eventType;
this.s1 = s1;
this.s2 = s2;
}
public void InitEvent(XmlEventType eventType, string s1, string s2, string s3) {
this.eventType = eventType;
this.s1 = s1;
this.s2 = s2;
this.s3 = s3;
}
public void InitEvent(XmlEventType eventType, string s1, string s2, string s3, object o) {
this.eventType = eventType;
this.s1 = s1;
this.s2 = s2;
this.s3 = s3;
this.o = o;
}
public void InitEvent(XmlEventType eventType, object o) {
this.eventType = eventType;
this.o = o;
}
public XmlEventType EventType {
get { return this.eventType; }
}
public string String1 {
get { return this.s1; }
}
public string String2 {
get { return this.s2; }
}
public string String3 {
get { return this.s3; }
}
public object Object {
get { return this.o; }
}
}
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gax = Google.Api.Gax;
using gcdv = Google.Cloud.DataFusion.V1;
using sys = System;
namespace Google.Cloud.DataFusion.V1
{
/// <summary>Resource name for the <c>Instance</c> resource.</summary>
public sealed partial class InstanceName : gax::IResourceName, sys::IEquatable<InstanceName>
{
/// <summary>The possible contents of <see cref="InstanceName"/>.</summary>
public enum ResourceNameType
{
/// <summary>An unparsed resource name.</summary>
Unparsed = 0,
/// <summary>
/// A resource name with pattern <c>projects/{project}/locations/{location}/instances/{instance}</c>.
/// </summary>
ProjectLocationInstance = 1,
}
private static gax::PathTemplate s_projectLocationInstance = new gax::PathTemplate("projects/{project}/locations/{location}/instances/{instance}");
/// <summary>Creates a <see cref="InstanceName"/> containing an unparsed resource name.</summary>
/// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param>
/// <returns>
/// A new instance of <see cref="InstanceName"/> containing the provided <paramref name="unparsedResourceName"/>
/// .
/// </returns>
public static InstanceName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) =>
new InstanceName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName)));
/// <summary>
/// Creates a <see cref="InstanceName"/> with the pattern
/// <c>projects/{project}/locations/{location}/instances/{instance}</c>.
/// </summary>
/// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="instanceId">The <c>Instance</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>A new instance of <see cref="InstanceName"/> constructed from the provided ids.</returns>
public static InstanceName FromProjectLocationInstance(string projectId, string locationId, string instanceId) =>
new InstanceName(ResourceNameType.ProjectLocationInstance, projectId: gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), locationId: gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), instanceId: gax::GaxPreconditions.CheckNotNullOrEmpty(instanceId, nameof(instanceId)));
/// <summary>
/// Formats the IDs into the string representation of this <see cref="InstanceName"/> with pattern
/// <c>projects/{project}/locations/{location}/instances/{instance}</c>.
/// </summary>
/// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="instanceId">The <c>Instance</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="InstanceName"/> with pattern
/// <c>projects/{project}/locations/{location}/instances/{instance}</c>.
/// </returns>
public static string Format(string projectId, string locationId, string instanceId) =>
FormatProjectLocationInstance(projectId, locationId, instanceId);
/// <summary>
/// Formats the IDs into the string representation of this <see cref="InstanceName"/> with pattern
/// <c>projects/{project}/locations/{location}/instances/{instance}</c>.
/// </summary>
/// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="instanceId">The <c>Instance</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="InstanceName"/> with pattern
/// <c>projects/{project}/locations/{location}/instances/{instance}</c>.
/// </returns>
public static string FormatProjectLocationInstance(string projectId, string locationId, string instanceId) =>
s_projectLocationInstance.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), gax::GaxPreconditions.CheckNotNullOrEmpty(instanceId, nameof(instanceId)));
/// <summary>Parses the given resource name string into a new <see cref="InstanceName"/> instance.</summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>projects/{project}/locations/{location}/instances/{instance}</c></description></item>
/// </list>
/// </remarks>
/// <param name="instanceName">The resource name in string form. Must not be <c>null</c>.</param>
/// <returns>The parsed <see cref="InstanceName"/> if successful.</returns>
public static InstanceName Parse(string instanceName) => Parse(instanceName, false);
/// <summary>
/// Parses the given resource name string into a new <see cref="InstanceName"/> instance; optionally allowing an
/// unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>projects/{project}/locations/{location}/instances/{instance}</c></description></item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="instanceName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <returns>The parsed <see cref="InstanceName"/> if successful.</returns>
public static InstanceName Parse(string instanceName, bool allowUnparsed) =>
TryParse(instanceName, allowUnparsed, out InstanceName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern.");
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="InstanceName"/> instance.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>projects/{project}/locations/{location}/instances/{instance}</c></description></item>
/// </list>
/// </remarks>
/// <param name="instanceName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="result">
/// When this method returns, the parsed <see cref="InstanceName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string instanceName, out InstanceName result) => TryParse(instanceName, false, out result);
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="InstanceName"/> instance; optionally
/// allowing an unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>projects/{project}/locations/{location}/instances/{instance}</c></description></item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="instanceName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <param name="result">
/// When this method returns, the parsed <see cref="InstanceName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string instanceName, bool allowUnparsed, out InstanceName result)
{
gax::GaxPreconditions.CheckNotNull(instanceName, nameof(instanceName));
gax::TemplatedResourceName resourceName;
if (s_projectLocationInstance.TryParseName(instanceName, out resourceName))
{
result = FromProjectLocationInstance(resourceName[0], resourceName[1], resourceName[2]);
return true;
}
if (allowUnparsed)
{
if (gax::UnparsedResourceName.TryParse(instanceName, out gax::UnparsedResourceName unparsedResourceName))
{
result = FromUnparsed(unparsedResourceName);
return true;
}
}
result = null;
return false;
}
private InstanceName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string instanceId = null, string locationId = null, string projectId = null)
{
Type = type;
UnparsedResource = unparsedResourceName;
InstanceId = instanceId;
LocationId = locationId;
ProjectId = projectId;
}
/// <summary>
/// Constructs a new instance of a <see cref="InstanceName"/> class from the component parts of pattern
/// <c>projects/{project}/locations/{location}/instances/{instance}</c>
/// </summary>
/// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="instanceId">The <c>Instance</c> ID. Must not be <c>null</c> or empty.</param>
public InstanceName(string projectId, string locationId, string instanceId) : this(ResourceNameType.ProjectLocationInstance, projectId: gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), locationId: gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), instanceId: gax::GaxPreconditions.CheckNotNullOrEmpty(instanceId, nameof(instanceId)))
{
}
/// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary>
public ResourceNameType Type { get; }
/// <summary>
/// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an
/// unparsed resource name.
/// </summary>
public gax::UnparsedResourceName UnparsedResource { get; }
/// <summary>
/// The <c>Instance</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string InstanceId { get; }
/// <summary>
/// The <c>Location</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string LocationId { get; }
/// <summary>
/// The <c>Project</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string ProjectId { get; }
/// <summary>Whether this instance contains a resource name with a known pattern.</summary>
public bool IsKnownPattern => Type != ResourceNameType.Unparsed;
/// <summary>The string representation of the resource name.</summary>
/// <returns>The string representation of the resource name.</returns>
public override string ToString()
{
switch (Type)
{
case ResourceNameType.Unparsed: return UnparsedResource.ToString();
case ResourceNameType.ProjectLocationInstance: return s_projectLocationInstance.Expand(ProjectId, LocationId, InstanceId);
default: throw new sys::InvalidOperationException("Unrecognized resource-type.");
}
}
/// <summary>Returns a hash code for this resource name.</summary>
public override int GetHashCode() => ToString().GetHashCode();
/// <inheritdoc/>
public override bool Equals(object obj) => Equals(obj as InstanceName);
/// <inheritdoc/>
public bool Equals(InstanceName other) => ToString() == other?.ToString();
/// <inheritdoc/>
public static bool operator ==(InstanceName a, InstanceName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false);
/// <inheritdoc/>
public static bool operator !=(InstanceName a, InstanceName b) => !(a == b);
}
public partial class Instance
{
/// <summary>
/// <see cref="gcdv::InstanceName"/>-typed view over the <see cref="Name"/> resource name property.
/// </summary>
public gcdv::InstanceName InstanceName
{
get => string.IsNullOrEmpty(Name) ? null : gcdv::InstanceName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Drawing;
using System.IO;
namespace Palaso.i18n
{
public class StringCatalog
{
private Dictionary<string, string> _catalog;
private static StringCatalog _singleton;
private static Font _font;
private static bool _inInternationalizationTestMode;
/// <summary>
/// Construct with no actual string file
/// </summary>
public StringCatalog(): this(String.Empty, 9)
{
}
/// <summary>
/// Construct with no actual string file
/// </summary>
public StringCatalog(string labelFontName, float labelFontSizeInPoints)
{
Init();
SetupUIFont(labelFontName, labelFontSizeInPoints );
}
private enum State
{
InMsgId,
InMsgStr,
Reset
} ;
public StringCatalog(string pathToPoFile, string labelFontName, float labelFontSizeInPoints)
{
Init();
_inInternationalizationTestMode = pathToPoFile == "test";
if (!_inInternationalizationTestMode)
{
using (var reader = File.OpenText(pathToPoFile))
{
string id = "";
string message = "";
string line = reader.ReadLine();
var state = State.Reset;
while (line != null)
{
switch (state)
{
case State.Reset:
if (line.StartsWith("msgid"))
{
state = State.InMsgId;
id = GetStringBetweenQuotes(line);
}
break;
case State.InMsgId:
if (line.StartsWith("msgstr"))
{
state = State.InMsgStr;
message = GetStringBetweenQuotes(line);
}
else if (line.StartsWith("\""))
{
id += GetStringBetweenQuotes(line);
}
break;
case State.InMsgStr:
if (string.IsNullOrEmpty(line))
{
state = State.Reset;
if (!(string.IsNullOrEmpty(id) || string.IsNullOrEmpty(message) || _catalog.ContainsKey(id)))
{
_catalog.Add(id.Trim(), message.Trim());
}
id = "";
message = "";
}
else if (line.StartsWith("\""))
{
message += GetStringBetweenQuotes(line);
}
break;
}
line = reader.ReadLine();
}
if (!(string.IsNullOrEmpty(id) || string.IsNullOrEmpty(message) || _catalog.ContainsKey(id)))
{
_catalog.Add(id, message);
}
}
}
SetupUIFont(labelFontName, labelFontSizeInPoints);
}
private void SetupUIFont(string labelFontName, float labelFontSizeInPoints)
{
if (_inInternationalizationTestMode)
{
LabelFont = new Font(FontFamily.GenericSansSerif, 9);
return;
}
LabelFont = new Font(FontFamily.GenericSansSerif, (float) 8.25, FontStyle.Regular);
if(!String.IsNullOrEmpty(labelFontName ))
{
try
{
LabelFont = new Font(labelFontName, labelFontSizeInPoints, FontStyle.Regular);
}
catch (Exception)
{
Palaso.Reporting.ErrorReport.NotifyUserOfProblem(
"Could not find the requested UI font '{0}'. Will use a generic font instead.",
labelFontName);
}
}
}
public static string Get(string id)
{
return Get(id, String.Empty);
}
/// <summary>
/// Clients should use this rather than running string.Format themselves,
/// because this has error checking and a helpful message, should the number
/// of parameters be wrong.
/// </summary>
/// <param name="id"></param>
/// <param name="translationNotes">just for the string scanner's use</param>
/// <param name="args">arguments to the string, used in string.format</param>
/// <returns></returns>
public static string GetFormatted(string id, string translationNotes, params object[] args)
{
//todo: this doesn't notice if the catalog has too few arugment slots, e.g.
//if it says "blah" when it should say "blah{0}"
try
{
var s = Get(id, translationNotes);
try
{
s = String.Format(s, args);
return s;
}
catch(Exception e)
{
Reporting.ErrorReport.NotifyUserOfProblem(
"There was a problem localizing\r\n'{0}'\r\ninto this UI language... check number of parameters. The code expects there to be {1}. The current localized string is\r\n'{2}'.\r\nThe error was {3}", id, args.Length, s, e.Message);
return "!!"+s; // show it without the formatting
}
}
catch(Exception e)
{
return "Error localizing string '" + id + "' to this UI language";
}
}
public static string Get(string id, string translationNotes)
{
if (!String.IsNullOrEmpty(id) && id[0] == '~')
{
id = id.Substring(1);
}
if (_singleton == null) //todo: this should not be needed
{
return id;
}
if (_inInternationalizationTestMode)
{
return "*"+_singleton[id];
}
else
{
return _singleton[id];
}
}
private void Init()
{
_singleton = this;
_catalog = new Dictionary<string, string>();
}
private static string GetStringBetweenQuotes(string line)
{
int s = line.IndexOf('"');
int f = line.LastIndexOf('"');
return line.Substring(s + 1, f - (s + 1));
}
public string this[string id]
{
get
{
string translated = null;
if (_catalog.ContainsKey(id))
{
return _catalog[id];
}
//REVIEW: What's this about? It was id = id.Replace("&&", "&"); which was removing the && we need when it gets to the UI
var idWithSingleAmpersand =id.Replace("&&", "&");
if (_catalog.ContainsKey(idWithSingleAmpersand))
{
return _catalog[idWithSingleAmpersand];
}
return id;
}
}
public static Font LabelFont
{
get
{
if (_font == null)
{
_font = new Font(FontFamily.GenericSansSerif, 9);
}
return _font;
}
set
{
_font = value;
}
}
// Font resizing is deprecated - obsolete API DG 2011-12
public static Font ModifyFontForLocalization(Font incoming)
{
return incoming;
}
}
}
| |
using System;
using System.Threading;
using NUnit.Framework;
using Tao.Sdl;
using System.Runtime.InteropServices;
namespace Tao.Sdl
{
#region SDL_mixer.h
/// <summary>
/// SDL Tests.
/// </summary>
[TestFixture]
public class SdlTestMixer
{
/// <summary>
///
/// </summary>
[SetUp]
public void Init()
{
Sdl.SDL_Quit();
}
/// <summary>
///
/// </summary>
[Test]
public void LinkedVersion()
{
Sdl.SDL_version version = SdlMixer.Mix_Linked_Version();
Console.WriteLine("Mixer version: " + version.ToString());
Assert.AreEqual(version.major.ToString()
+ "." + version.minor.ToString()
+ "." + version.patch.ToString(), "1.2.7");
}
/// <summary>
///
/// </summary>
private void InitAudio()
{
QuitAudio();
Sdl.SDL_Init(Sdl.SDL_INIT_AUDIO);
int results = SdlMixer.Mix_OpenAudio(
SdlMixer.MIX_DEFAULT_FREQUENCY,
(short) SdlMixer.MIX_DEFAULT_FORMAT,
2,
1024);
}
/// <summary>
///
/// </summary>
private void QuitAudio()
{
//SdlMixer.Mix_CloseAudio();
Sdl.SDL_Quit();
}
/// <summary>
///
/// </summary>
[Test]
public void OpenAudio()
{
Sdl.SDL_Quit();
Sdl.SDL_Init(Sdl.SDL_INIT_AUDIO);
int results = SdlMixer.Mix_OpenAudio(
SdlMixer.MIX_DEFAULT_FREQUENCY,
(short) SdlMixer.MIX_DEFAULT_FORMAT,
2,
1024);
Assert.AreEqual(results,0);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void AllocateChannels()
{
InitAudio();
//Console.WriteLine("channels allocated: " );
int results = SdlMixer.Mix_AllocateChannels(16);
Console.WriteLine("Channels Allocated: " + results.ToString());
Assert.AreEqual(results, 16);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void QuerySpec()
{
InitAudio();
int frequency;
short format;
int channels;
int results = SdlMixer.Mix_QuerySpec(out frequency, out format, out channels);
// Console.WriteLine("freq: " + frequency.ToString());
// Console.WriteLine("format: " + format.ToString());
// Console.WriteLine("chan: " + channels.ToString());
// Console.WriteLine("results: " + results.ToString());
Assert.AreEqual(frequency, SdlMixer.MIX_DEFAULT_FREQUENCY);
Assert.AreEqual(format, (short) SdlMixer.MIX_DEFAULT_FORMAT);
Assert.AreEqual(channels, 2);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void LoadWAV_RW()
{
InitAudio();
IntPtr resultPtr = SdlMixer.Mix_LoadWAV_RW(Sdl.SDL_RWFromFile("../../test.wav", "rb"), 1);
Assert.IsFalse(resultPtr == IntPtr.Zero);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void LoadWAV()
{
InitAudio();
IntPtr resultPtr = SdlMixer.Mix_LoadWAV("../../test.wav");
Assert.IsFalse(resultPtr == IntPtr.Zero);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void LoadMUSwav()
{
InitAudio();
IntPtr resultPtr = SdlMixer.Mix_LoadMUS("../../test.wav");
Assert.IsFalse(resultPtr == IntPtr.Zero);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void LoadMUSmp3()
{
InitAudio();
IntPtr resultPtr = SdlMixer.Mix_LoadMUS("../../test.mp3");
Assert.IsFalse(resultPtr == IntPtr.Zero);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void LoadMUSOGG()
{
InitAudio();
IntPtr resultPtr = SdlMixer.Mix_LoadMUS("../../test.ogg");
Assert.IsFalse(resultPtr == IntPtr.Zero);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void SetGetError()
{
string error = "Hi there";
SdlMixer.Mix_SetError(error);
Assert.AreEqual(SdlMixer.Mix_GetError(), error);
}
/// <summary>
///
/// </summary>
[Test]
public void QuickLoad_WAV()
{
InitAudio();
IntPtr resultPtr = SdlMixer.Mix_QuickLoad_WAV(Sdl.SDL_RWFromFile("../../test.wav", "rb"));
Assert.IsFalse(resultPtr == IntPtr.Zero);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void QuickLoad_RAW()
{
InitAudio();
IntPtr resultPtr = SdlMixer.Mix_QuickLoad_RAW(Sdl.SDL_RWFromFile("../../test.wav", "rb"), 1000);
Assert.IsFalse(resultPtr == IntPtr.Zero);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void FreeChunk()
{
InitAudio();
IntPtr wavPtr = Sdl.SDL_RWFromFile("../../test.wav", "rb");
SdlMixer.Mix_FreeChunk(wavPtr);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void FreeMusic()
{
InitAudio();
IntPtr wavPtr = Sdl.SDL_RWFromFile("../../test.wav", "rb");
SdlMixer.Mix_FreeMusic(wavPtr);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void GetMusicType()
{
InitAudio();
IntPtr resultPtr = SdlMixer.Mix_LoadMUS("../../test.wav");
int musicType = SdlMixer.Mix_GetMusicType(resultPtr);
Console.WriteLine("musictype:" + musicType);
//Assert.IsFalse(resultPtr == IntPtr.Zero);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
[Ignore("Not finished")]
public void SetPostMix()
{
}
/// <summary>
///
/// </summary>
[Test]
[Ignore("Not finished")]
public void HookMusic()
{
}
/// <summary>
///
/// </summary>
[Test]
[Ignore("Not finished")]
public void HookMusicFinished()
{
}
/// <summary>
///
/// </summary>
[Test]
[Ignore("Not finished")]
public void GetMusicHookData()
{
}
/// <summary>
///
/// </summary>
[Test]
[Ignore("Not finished")]
public void ChannelFinished()
{
}
/// <summary>
///
/// </summary>
[Test]
[Ignore("Not finished")]
public void RegisterEffect()
{
}
/// <summary>
///
/// </summary>
[Test]
[Ignore("Not finished")]
public void UnregisterEffect()
{
}
/// <summary>
///
/// </summary>
[Test]
[Ignore("Not finished")]
public void UnregisterAllEffects()
{
}
/// <summary>
///
/// </summary>
[Test]
public void SetPanning()
{
InitAudio();
int result = SdlMixer.Mix_SetPanning(1, 255,127);
Assert.IsTrue(result != 0);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void SetPosition()
{
InitAudio();
int result = SdlMixer.Mix_SetPosition(1, 90, 100);
Assert.IsTrue(result != 0);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void SetDistance()
{
InitAudio();
int result = SdlMixer.Mix_SetDistance(1, 140);
Assert.IsTrue(result != 0);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void SetReverseStereo()
{
InitAudio();
int result = SdlMixer.Mix_SetReverseStereo(SdlMixer.MIX_CHANNEL_POST, 1);
Assert.IsTrue(result != 0);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void ReserveChannels()
{
InitAudio();
int result = SdlMixer.Mix_ReserveChannels(1);
//Console.WriteLine("ReserveChannels: " + result.ToString());
Assert.IsTrue(result == 1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void GroupChannel()
{
InitAudio();
int result = SdlMixer.Mix_GroupChannel(1, 1);
//Console.WriteLine("ReserveChannels: " + result.ToString());
Assert.IsTrue(result == 1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void GroupChannels()
{
InitAudio();
int result = SdlMixer.Mix_GroupChannels(0, 7, 1);
//Console.WriteLine("ReserveChannels: " + result.ToString());
Assert.IsTrue(result == 8);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void GroupAvailable()
{
InitAudio();
int result = SdlMixer.Mix_GroupChannel(1, 1);
result = SdlMixer.Mix_GroupAvailable(1);
//Console.WriteLine("ReserveChannels: " + result.ToString());
Assert.IsTrue(result != -1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void GroupCount()
{
InitAudio();
int result = SdlMixer.Mix_GroupChannel(1, 1);
result = SdlMixer.Mix_GroupCount(1);
//Console.WriteLine("ReserveChannels: " + result.ToString());
Assert.IsTrue(result == 1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void GroupOldest()
{
InitAudio();
int result = SdlMixer.Mix_GroupChannels(0, 7, 1);
result = SdlMixer.Mix_GroupOldest(1);
//Console.WriteLine("GroupOldest: " + result.ToString());
Assert.IsTrue(result == -1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void GroupNewer()
{
InitAudio();
int result = SdlMixer.Mix_GroupChannels(0, 7, 1);
result = SdlMixer.Mix_GroupOldest(1);
//Console.WriteLine("GroupOldest: " + result.ToString());
Assert.IsTrue(result == -1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void PlayChannelTimed()
{
InitAudio();
int result = SdlMixer.Mix_GroupChannels(0, 7, 1);
IntPtr chunkPtr = SdlMixer.Mix_LoadWAV("test.wav");
result = SdlMixer.Mix_PlayChannelTimed(-1, chunkPtr, -1, 500);
Thread.Sleep(500);
Console.WriteLine("PlayChannelTimed: " + result.ToString());
Assert.IsTrue(result != -1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void PlayChannel()
{
InitAudio();
int result = SdlMixer.Mix_GroupChannels(0, 7, 1);
IntPtr chunkPtr = SdlMixer.Mix_LoadWAV("test.wav");
result = SdlMixer.Mix_PlayChannel(-1, chunkPtr, -1);
Thread.Sleep(500);
Console.WriteLine("PlayChannel: " + result.ToString());
Assert.IsTrue(result != -1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void PlayMusic()
{
InitAudio();
int result;
IntPtr chunkPtr = SdlMixer.Mix_LoadMUS("test.wav");
result = SdlMixer.Mix_PlayMusic( chunkPtr, -1);
Console.WriteLine("PlayMusic: " + result.ToString());
Assert.IsTrue(result != -1);
Thread.Sleep(1000);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void FadeInMusic()
{
InitAudio();
int result;
IntPtr chunkPtr = SdlMixer.Mix_LoadMUS("test.wav");
result = SdlMixer.Mix_FadeInMusic( chunkPtr, -1, 2);
Console.WriteLine("PlayMusic: " + result.ToString());
Assert.IsTrue(result != -1);
Thread.Sleep(5000);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
[Ignore("Not finished")]
public void FadeInMusicPos()
{
InitAudio();
int result;
IntPtr chunkPtr = SdlMixer.Mix_LoadMUS("test.wav");
result = SdlMixer.Mix_FadeInMusicPos( chunkPtr, -1, 2, 1);
Console.WriteLine("PlayMusic: " + result.ToString());
Assert.IsTrue(result != -1);
Thread.Sleep(5000);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
[Ignore("Not finished")]
public void FadeInChannelTimed()
{
InitAudio();
int result = SdlMixer.Mix_GroupChannels(0, 7, 1);
IntPtr chunkPtr = SdlMixer.Mix_LoadWAV("test.wav");
result = SdlMixer.Mix_FadeInChannelTimed(1, chunkPtr, -1, 0,-1);
Thread.Sleep(500);
Console.WriteLine("PlayChannel: " + result.ToString());
Assert.IsTrue(result != -1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
[Ignore("Not finished")]
public void FadeInChannel()
{
InitAudio();
int result = SdlMixer.Mix_GroupChannels(0, 7, 1);
IntPtr chunkPtr = SdlMixer.Mix_LoadWAV("test.wav");
result = SdlMixer.Mix_FadeInChannel(1, chunkPtr, -1, 0);
Thread.Sleep(500);
Console.WriteLine("PlayChannel: " + result.ToString());
Assert.IsTrue(result != -1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void Volume()
{
InitAudio();
int result = SdlMixer.Mix_GroupChannels(0, 7, 1);
IntPtr chunkPtr = SdlMixer.Mix_LoadWAV("test.wav");
result = SdlMixer.Mix_Volume(1, SdlMixer.MIX_MAX_VOLUME);
Console.WriteLine("Volume: " + result.ToString());
Assert.IsTrue(result == SdlMixer.MIX_MAX_VOLUME);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void VolumeChunk()
{
InitAudio();
int result = SdlMixer.Mix_GroupChannels(0, 7, 1);
IntPtr chunkPtr = SdlMixer.Mix_LoadWAV("test.wav");
result = SdlMixer.Mix_VolumeChunk(chunkPtr, SdlMixer.MIX_MAX_VOLUME);
Console.WriteLine("Volume: " + result.ToString());
Assert.IsTrue(result == SdlMixer.MIX_MAX_VOLUME);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void VolumeMusic()
{
InitAudio();
int result = SdlMixer.Mix_GroupChannels(0, 7, 1);
IntPtr chunkPtr = SdlMixer.Mix_LoadMUS("test.wav");
result = SdlMixer.Mix_VolumeMusic(SdlMixer.MIX_MAX_VOLUME);
Console.WriteLine("Volume: " + result.ToString());
Assert.IsTrue(result == SdlMixer.MIX_MAX_VOLUME);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void HaltChannel()
{
InitAudio();
int result = SdlMixer.Mix_GroupChannels(0, 7, 1);
result = SdlMixer.Mix_HaltChannel(1);
//Console.WriteLine("HaltChannel: " + result.ToString());
Assert.IsTrue(result == 0);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void HaltGroup()
{
InitAudio();
int result = SdlMixer.Mix_GroupChannels(0, 7, 1);
result = SdlMixer.Mix_HaltGroup(1);
//Console.WriteLine("HaltChannel: " + result.ToString());
Assert.IsTrue(result == 0);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void HaltMusic()
{
InitAudio();
int result = SdlMixer.Mix_GroupChannels(0, 7, 1);
IntPtr chunkPtr = SdlMixer.Mix_LoadMUS("test.wav");
result = SdlMixer.Mix_HaltMusic();
Assert.IsTrue(result == 0);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void ExpireChannel()
{
InitAudio();
int result = SdlMixer.Mix_GroupChannels(0, 7, 1);
result = SdlMixer.Mix_ExpireChannel(1, 100);
//Console.WriteLine("HaltChannel: " + result.ToString());
Assert.IsTrue(result == 1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
[Ignore("Not finished")]
public void FadeOutChannel()
{
InitAudio();
int result = SdlMixer.Mix_GroupChannels(0, 7, 1);
IntPtr chunkPtr = SdlMixer.Mix_LoadWAV("test.wav");
result = SdlMixer.Mix_FadeOutChannel(1, 100);
Thread.Sleep(500);
Console.WriteLine("PlayChannel: " + result.ToString());
Assert.IsTrue(result != -1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void FadeOutGroup()
{
InitAudio();
int result = SdlMixer.Mix_GroupChannel(1, 1);
result = SdlMixer.Mix_FadeOutGroup(1, 100);
Thread.Sleep(100);
//Console.WriteLine("ReserveChannels: " + result.ToString());
Assert.IsTrue(result == 1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void FadeOutMusic()
{
InitAudio();
int result;
IntPtr chunkPtr = SdlMixer.Mix_LoadMUS("test.wav");
result = SdlMixer.Mix_PlayMusic( chunkPtr, -1);
result = SdlMixer.Mix_FadeOutMusic(1000);
Thread.Sleep(2000);
Console.WriteLine("PlayMusic: " + result.ToString());
Assert.IsTrue(result == 1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
[Ignore("Works fine on its own, but something wrong when it runs as a test suite")]
public void FadingMusic()
{
InitAudio();
int result;
int resultFading;
IntPtr chunkPtr = SdlMixer.Mix_LoadMUS("test.wav");
result = SdlMixer.Mix_PlayMusic( chunkPtr, -1);
resultFading = SdlMixer.Mix_FadingMusic();
//Console.WriteLine("FadingMusic1: " + resultFading.ToString());
Assert.AreEqual(resultFading, SdlMixer.MIX_NO_FADING);
result = SdlMixer.Mix_FadeOutMusic(1000);
resultFading = SdlMixer.Mix_FadingMusic();
Assert.AreEqual(resultFading, SdlMixer.MIX_FADING_OUT);
//Console.WriteLine("FadingMusic2: " + resultFading.ToString());
Thread.Sleep(2000);
resultFading = SdlMixer.Mix_FadingMusic();
Assert.AreEqual(resultFading, SdlMixer.MIX_NO_FADING);
//Console.WriteLine("FadingMusic: " + resultFading.ToString());
Assert.IsTrue(result == 1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
[Ignore("Something wrong.")]
public void FadingChannel()
{
InitAudio();
int result;
int resultFading;
IntPtr chunkPtr = SdlMixer.Mix_LoadMUS("test.wav");
result = SdlMixer.Mix_PlayChannel(1, chunkPtr, -1);
resultFading = SdlMixer.Mix_FadingChannel(1);
//Console.WriteLine("FadingMusic1: " + resultFading.ToString());
Assert.AreEqual(resultFading, SdlMixer.MIX_NO_FADING);
result = SdlMixer.Mix_FadeOutChannel(1, 1000);
resultFading = SdlMixer.Mix_FadingChannel(1);
Assert.AreEqual(resultFading, SdlMixer.MIX_FADING_OUT);
//Console.WriteLine("FadingMusic2: " + resultFading.ToString());
Thread.Sleep(2000);
resultFading = SdlMixer.Mix_FadingChannel(1);
Assert.AreEqual(resultFading, SdlMixer.MIX_NO_FADING);
//Console.WriteLine("FadingMusic: " + resultFading.ToString());
Assert.IsTrue(result == 1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void Pause()
{
InitAudio();
int result;
IntPtr chunkPtr = SdlMixer.Mix_LoadMUS("test.wav");
result = SdlMixer.Mix_PlayChannel(1, chunkPtr, -1);
SdlMixer.Mix_Pause(-1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void Resume()
{
InitAudio();
int result;
IntPtr chunkPtr = SdlMixer.Mix_LoadMUS("test.wav");
result = SdlMixer.Mix_PlayChannel(1, chunkPtr, -1);
SdlMixer.Mix_Pause(-1);
SdlMixer.Mix_Resume(-1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
[Ignore("Not Finished")]
public void Paused()
{
InitAudio();
int result;
IntPtr chunkPtr = SdlMixer.Mix_LoadMUS("test.wav");
result = SdlMixer.Mix_PlayChannel(1, chunkPtr, -1);
SdlMixer.Mix_Pause(1);
result = SdlMixer.Mix_Paused(-1);
Assert.AreEqual(result, 1);
result = SdlMixer.Mix_Paused(-1);
Assert.AreEqual(result, 1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void PauseMusic()
{
InitAudio();
int result;
IntPtr chunkPtr = SdlMixer.Mix_LoadMUS("test.wav");
result = SdlMixer.Mix_PlayMusic( chunkPtr, -1);
SdlMixer.Mix_PauseMusic();
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void ResumeMusic()
{
InitAudio();
int result;
IntPtr chunkPtr = SdlMixer.Mix_LoadMUS("test.wav");
result = SdlMixer.Mix_PlayMusic( chunkPtr, -1);
SdlMixer.Mix_PauseMusic();
SdlMixer.Mix_ResumeMusic();
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void RewindMusic()
{
InitAudio();
int result;
IntPtr chunkPtr = SdlMixer.Mix_LoadMUS("test.ogg");
result = SdlMixer.Mix_PlayMusic( chunkPtr, -1);
SdlMixer.Mix_RewindMusic();
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void PausedMusic()
{
InitAudio();
int result;
IntPtr chunkPtr = SdlMixer.Mix_LoadMUS("test.wav");
result = SdlMixer.Mix_PlayMusic( chunkPtr, -1);
SdlMixer.Mix_PauseMusic();
result = SdlMixer.Mix_PausedMusic();
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
[Ignore("Not finished")]
public void SetMusicPosition()
{
InitAudio();
int result;
IntPtr chunkPtr = SdlMixer.Mix_LoadMUS("test.wav");
result = SdlMixer.Mix_SetMusicPosition(1000);
//Console.WriteLine("PlayMusic: " + result.ToString());
Assert.IsTrue(result != -1);
Thread.Sleep(5000);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
[Ignore("Not finished")]
public void Playing()
{
InitAudio();
int result = SdlMixer.Mix_GroupChannels(0, 7, 1);
IntPtr chunkPtr = SdlMixer.Mix_LoadWAV("test.wav");
result = SdlMixer.Mix_PlayChannel(-1, chunkPtr, -1);
Thread.Sleep(500);
//Console.WriteLine("PlayChannel: " + result.ToString());
result = SdlMixer.Mix_Playing(1);
Assert.IsTrue(result != -1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void PlayingMusic()
{
InitAudio();
int result;
IntPtr chunkPtr = SdlMixer.Mix_LoadMUS("test.wav");
result = SdlMixer.Mix_PlayMusic( chunkPtr, -1);
Console.WriteLine("PlayMusic: " + result.ToString());
result = SdlMixer.Mix_PlayingMusic();
Assert.IsTrue(result == 1);
Thread.Sleep(1000);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
[Ignore("Not Finished")]
public void SetPlayingCMD()
{
InitAudio();
int result;
//result = SdlMixer.Mix_PlayMusic( chunkPtr, -1);
result = SdlMixer.Mix_SetMusicCMD("test");
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
[Ignore("Not Finished")]
public void GetChunk()
{
InitAudio();
IntPtr resultPtr;
//result = SdlMixer.Mix_PlayMusic( chunkPtr, -1);
resultPtr= SdlMixer.Mix_GetChunk(1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
[Ignore("Not Finished")]
public void SetSynchroValue()
{
InitAudio();
int result;
result = SdlMixer.Mix_SetSynchroValue(1);
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
[Ignore("Not Finished")]
public void GetSynchroValue()
{
InitAudio();
int result;
result = SdlMixer.Mix_GetSynchroValue();
QuitAudio();
}
/// <summary>
///
/// </summary>
[Test]
public void CloseAudio()
{
InitAudio();
SdlMixer.Mix_CloseAudio();
Sdl.SDL_Quit();
}
}
#endregion SDL_mixer.h
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Text;
using System.Reflection;
using System.Diagnostics;
using System.Collections.Generic;
using System.Reflection.Runtime.General;
using System.Reflection.Runtime.TypeInfos;
using System.Reflection.Runtime.TypeInfos.EcmaFormat;
using System.Reflection.Runtime.ParameterInfos;
using System.Reflection.Runtime.ParameterInfos.EcmaFormat;
using System.Reflection.Runtime.CustomAttributes;
using System.Runtime;
using System.Runtime.InteropServices;
using Internal.Reflection.Core;
using Internal.Reflection.Core.Execution;
using Internal.Runtime.CompilerServices;
using Internal.Runtime.TypeLoader;
using System.Reflection.Metadata;
using System.Reflection.Metadata.Ecma335;
namespace System.Reflection.Runtime.MethodInfos.EcmaFormat
{
//
// Implements methods and properties common to RuntimeMethodInfo and RuntimeConstructorInfo.
//
internal struct EcmaFormatMethodCommon : IRuntimeMethodCommon<EcmaFormatMethodCommon>, IEquatable<EcmaFormatMethodCommon>
{
public bool IsGenericMethodDefinition => GenericParameterCount != 0;
public MethodInvoker GetUncachedMethodInvoker(RuntimeTypeInfo[] methodArguments, MemberInfo exceptionPertainant)
{
return ReflectionCoreExecution.ExecutionEnvironment.GetMethodInvoker(DeclaringType, new QMethodDefinition(Reader, MethodHandle), methodArguments, exceptionPertainant);
}
public QSignatureTypeHandle[] QualifiedMethodSignature
{
get
{
return this.MethodSignature;
}
}
public EcmaFormatMethodCommon RuntimeMethodCommonOfUninstantiatedMethod
{
get
{
return new EcmaFormatMethodCommon(MethodHandle, _definingTypeInfo, _definingTypeInfo);
}
}
public void FillInMetadataDescribedParameters(ref VirtualRuntimeParameterInfoArray result, QSignatureTypeHandle[] typeSignatures, MethodBase contextMethod, TypeContext typeContext)
{
foreach (ParameterHandle parameterHandle in _method.GetParameters())
{
Parameter parameterRecord = _reader.GetParameter(parameterHandle);
int index = parameterRecord.SequenceNumber;
result[index] =
EcmaFormatMethodParameterInfo.GetEcmaFormatMethodParameterInfo(
contextMethod,
_methodHandle,
index - 1,
parameterHandle,
typeSignatures[index],
typeContext);
}
}
public int GenericParameterCount => _method.GetGenericParameters().Count;
public RuntimeTypeInfo[] GetGenericTypeParametersWithSpecifiedOwningMethod(RuntimeNamedMethodInfo<EcmaFormatMethodCommon> owningMethod)
{
GenericParameterHandleCollection genericParameters = _method.GetGenericParameters();
int genericParametersCount = genericParameters.Count;
if (genericParametersCount == 0)
return Array.Empty<RuntimeTypeInfo>();
RuntimeTypeInfo[] genericTypeParameters = new RuntimeTypeInfo[genericParametersCount];
int i = 0;
foreach (GenericParameterHandle genericParameterHandle in genericParameters)
{
RuntimeTypeInfo genericParameterType = EcmaFormatRuntimeGenericParameterTypeInfoForMethods.GetRuntimeGenericParameterTypeInfoForMethods(owningMethod, Reader, genericParameterHandle);
genericTypeParameters[i++] = genericParameterType;
}
return genericTypeParameters;
}
//
// methodHandle - the "tkMethodDef" that identifies the method.
// definingType - the "tkTypeDef" that defined the method (this is where you get the metadata reader that created methodHandle.)
// contextType - the type that supplies the type context (i.e. substitutions for generic parameters.) Though you
// get your raw information from "definingType", you report "contextType" as your DeclaringType property.
//
// For example:
//
// typeof(Foo<>).GetTypeInfo().DeclaredMembers
//
// The definingType and contextType are both Foo<>
//
// typeof(Foo<int,String>).GetTypeInfo().DeclaredMembers
//
// The definingType is "Foo<,>"
// The contextType is "Foo<int,String>"
//
// We don't report any DeclaredMembers for arrays or generic parameters so those don't apply.
//
public EcmaFormatMethodCommon(MethodDefinitionHandle methodHandle, EcmaFormatRuntimeNamedTypeInfo definingTypeInfo, RuntimeTypeInfo contextTypeInfo)
{
_definingTypeInfo = definingTypeInfo;
_methodHandle = methodHandle;
_contextTypeInfo = contextTypeInfo;
_reader = definingTypeInfo.Reader;
_method = _reader.GetMethodDefinition(methodHandle);
}
public MethodAttributes Attributes
{
get
{
return _method.Attributes;
}
}
public CallingConventions CallingConvention
{
get
{
BlobReader signatureBlob = _reader.GetBlobReader(_method.Signature);
CallingConventions result;
SignatureHeader sigHeader = signatureBlob.ReadSignatureHeader();
if (sigHeader.CallingConvention == SignatureCallingConvention.VarArgs)
result = CallingConventions.VarArgs;
else
result = CallingConventions.Standard;
if (sigHeader.IsInstance)
result |= CallingConventions.HasThis;
if (sigHeader.HasExplicitThis)
result |= CallingConventions.ExplicitThis;
return result;
}
}
public RuntimeTypeInfo ContextTypeInfo
{
get
{
return _contextTypeInfo;
}
}
public RuntimeTypeInfo DeclaringType
{
get
{
return _contextTypeInfo;
}
}
public RuntimeNamedTypeInfo DefiningTypeInfo
{
get
{
return _definingTypeInfo;
}
}
public MethodImplAttributes MethodImplementationFlags
{
get
{
return _method.ImplAttributes;
}
}
public Module Module
{
get
{
return _definingTypeInfo.Module;
}
}
public int MetadataToken
{
get
{
return MetadataTokens.GetToken(_methodHandle);
}
}
public RuntimeMethodHandle GetRuntimeMethodHandle(Type[] genericArgs)
{
Debug.Assert(genericArgs == null || genericArgs.Length > 0);
RuntimeTypeHandle[] genericArgHandles;
if (genericArgs != null)
{
genericArgHandles = new RuntimeTypeHandle[genericArgs.Length];
for (int i = 0; i < genericArgHandles.Length; i++)
genericArgHandles[i] = genericArgs[i].TypeHandle;
}
else
{
genericArgHandles = null;
}
IntPtr dynamicModule = ModuleList.Instance.GetModuleInfoForMetadataReader(Reader).DynamicModulePtrAsIntPtr;
return TypeLoaderEnvironment.Instance.GetRuntimeMethodHandleForComponents(
DeclaringType.TypeHandle,
Name,
RuntimeSignature.CreateFromMethodHandle(dynamicModule, MetadataToken),
genericArgHandles);
}
//
// Returns the ParameterInfo objects for the method parameters and return parameter.
//
// The ParameterInfo objects will report "contextMethod" as their Member property and use it to get type variable information from
// the contextMethod's declaring type. The actual metadata, however, comes from "this."
//
// The methodTypeArguments provides the fill-ins for any method type variable elements in the parameter type signatures.
//
// Does not array-copy.
//
public RuntimeParameterInfo[] GetRuntimeParameters(MethodBase contextMethod, RuntimeTypeInfo[] methodTypeArguments, out RuntimeParameterInfo returnParameter)
{
MetadataReader reader = _reader;
TypeContext typeContext = contextMethod.DeclaringType.CastToRuntimeTypeInfo().TypeContext;
typeContext = new TypeContext(typeContext.GenericTypeArguments, methodTypeArguments);
QSignatureTypeHandle[] typeSignatures = this.MethodSignature;
int count = typeSignatures.Length;
VirtualRuntimeParameterInfoArray result = new VirtualRuntimeParameterInfoArray(count);
foreach (ParameterHandle parameterHandle in _method.GetParameters())
{
Parameter parameterRecord = _reader.GetParameter(parameterHandle);
int index = parameterRecord.SequenceNumber;
result[index] =
EcmaFormatMethodParameterInfo.GetEcmaFormatMethodParameterInfo(
contextMethod,
_methodHandle,
index - 1,
parameterHandle,
typeSignatures[index],
typeContext);
}
for (int i = 0; i < count; i++)
{
if (result[i] == null)
{
result[i] =
RuntimeThinMethodParameterInfo.GetRuntimeThinMethodParameterInfo(
contextMethod,
i - 1,
typeSignatures[i],
typeContext);
}
}
returnParameter = result.First;
return result.Remainder;
}
public String Name
{
get
{
return _method.Name.GetString(_reader);
}
}
public MetadataReader Reader
{
get
{
return _reader;
}
}
public MethodDefinitionHandle MethodHandle
{
get
{
return _methodHandle;
}
}
public bool HasSameMetadataDefinitionAs(EcmaFormatMethodCommon other)
{
if (!(_reader == other._reader))
return false;
if (!(_methodHandle.Equals(other._methodHandle)))
return false;
return true;
}
public IEnumerable<CustomAttributeData> TrueCustomAttributes => RuntimeCustomAttributeData.GetCustomAttributes(_reader, _method.GetCustomAttributes());
public override bool Equals(Object obj)
{
if (!(obj is EcmaFormatMethodCommon other))
return false;
return Equals(other);
}
public bool Equals(EcmaFormatMethodCommon other)
{
if (!(_reader == other._reader))
return false;
if (!(_methodHandle.Equals(other._methodHandle)))
return false;
if (!(_contextTypeInfo.Equals(other._contextTypeInfo)))
return false;
return true;
}
public override int GetHashCode()
{
return _methodHandle.GetHashCode() ^ _contextTypeInfo.GetHashCode();
}
private QSignatureTypeHandle[] MethodSignature
{
get
{
BlobReader signatureBlob = _reader.GetBlobReader(_method.Signature);
SignatureHeader header = signatureBlob.ReadSignatureHeader();
if (header.Kind != SignatureKind.Method)
throw new BadImageFormatException();
int genericParameterCount = 0;
if (header.IsGeneric)
genericParameterCount = signatureBlob.ReadCompressedInteger();
int numParameters = signatureBlob.ReadCompressedInteger();
QSignatureTypeHandle[] signatureHandles = new QSignatureTypeHandle[checked(numParameters + 1)];
signatureHandles[0] = new QSignatureTypeHandle(_reader, signatureBlob);
EcmaMetadataHelpers.SkipType(ref signatureBlob);
for (int i = 0 ; i < numParameters; i++)
{
signatureHandles[i + 1] = new QSignatureTypeHandle(_reader, signatureBlob);
}
return signatureHandles;
}
}
private readonly EcmaFormatRuntimeNamedTypeInfo _definingTypeInfo;
private readonly MethodDefinitionHandle _methodHandle;
private readonly RuntimeTypeInfo _contextTypeInfo;
private readonly MetadataReader _reader;
private readonly MethodDefinition _method;
}
}
| |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gax = Google.Api.Gax;
using gcsv = Google.Cloud.ServiceDirectory.V1;
using sys = System;
namespace Google.Cloud.ServiceDirectory.V1
{
/// <summary>Resource name for the <c>Endpoint</c> resource.</summary>
public sealed partial class EndpointName : gax::IResourceName, sys::IEquatable<EndpointName>
{
/// <summary>The possible contents of <see cref="EndpointName"/>.</summary>
public enum ResourceNameType
{
/// <summary>An unparsed resource name.</summary>
Unparsed = 0,
/// <summary>
/// A resource name with pattern
/// <c>
/// projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}/endpoints/{endpoint}</c>
/// .
/// </summary>
ProjectLocationNamespaceServiceEndpoint = 1,
}
private static gax::PathTemplate s_projectLocationNamespaceServiceEndpoint = new gax::PathTemplate("projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}/endpoints/{endpoint}");
/// <summary>Creates a <see cref="EndpointName"/> containing an unparsed resource name.</summary>
/// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param>
/// <returns>
/// A new instance of <see cref="EndpointName"/> containing the provided <paramref name="unparsedResourceName"/>
/// .
/// </returns>
public static EndpointName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) =>
new EndpointName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName)));
/// <summary>
/// Creates a <see cref="EndpointName"/> with the pattern
/// <c>projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}/endpoints/{endpoint}</c>
/// .
/// </summary>
/// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="namespaceId">The <c>Namespace</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="serviceId">The <c>Service</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="endpointId">The <c>Endpoint</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>A new instance of <see cref="EndpointName"/> constructed from the provided ids.</returns>
public static EndpointName FromProjectLocationNamespaceServiceEndpoint(string projectId, string locationId, string namespaceId, string serviceId, string endpointId) =>
new EndpointName(ResourceNameType.ProjectLocationNamespaceServiceEndpoint, projectId: gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), locationId: gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), namespaceId: gax::GaxPreconditions.CheckNotNullOrEmpty(namespaceId, nameof(namespaceId)), serviceId: gax::GaxPreconditions.CheckNotNullOrEmpty(serviceId, nameof(serviceId)), endpointId: gax::GaxPreconditions.CheckNotNullOrEmpty(endpointId, nameof(endpointId)));
/// <summary>
/// Formats the IDs into the string representation of this <see cref="EndpointName"/> with pattern
/// <c>projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}/endpoints/{endpoint}</c>
/// .
/// </summary>
/// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="namespaceId">The <c>Namespace</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="serviceId">The <c>Service</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="endpointId">The <c>Endpoint</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="EndpointName"/> with pattern
/// <c>projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}/endpoints/{endpoint}</c>
/// .
/// </returns>
public static string Format(string projectId, string locationId, string namespaceId, string serviceId, string endpointId) =>
FormatProjectLocationNamespaceServiceEndpoint(projectId, locationId, namespaceId, serviceId, endpointId);
/// <summary>
/// Formats the IDs into the string representation of this <see cref="EndpointName"/> with pattern
/// <c>projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}/endpoints/{endpoint}</c>
/// .
/// </summary>
/// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="namespaceId">The <c>Namespace</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="serviceId">The <c>Service</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="endpointId">The <c>Endpoint</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="EndpointName"/> with pattern
/// <c>projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}/endpoints/{endpoint}</c>
/// .
/// </returns>
public static string FormatProjectLocationNamespaceServiceEndpoint(string projectId, string locationId, string namespaceId, string serviceId, string endpointId) =>
s_projectLocationNamespaceServiceEndpoint.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), gax::GaxPreconditions.CheckNotNullOrEmpty(namespaceId, nameof(namespaceId)), gax::GaxPreconditions.CheckNotNullOrEmpty(serviceId, nameof(serviceId)), gax::GaxPreconditions.CheckNotNullOrEmpty(endpointId, nameof(endpointId)));
/// <summary>Parses the given resource name string into a new <see cref="EndpointName"/> instance.</summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description>
/// <c>projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}/endpoints/{endpoint}</c>
/// </description>
/// </item>
/// </list>
/// </remarks>
/// <param name="endpointName">The resource name in string form. Must not be <c>null</c>.</param>
/// <returns>The parsed <see cref="EndpointName"/> if successful.</returns>
public static EndpointName Parse(string endpointName) => Parse(endpointName, false);
/// <summary>
/// Parses the given resource name string into a new <see cref="EndpointName"/> instance; optionally allowing an
/// unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description>
/// <c>projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}/endpoints/{endpoint}</c>
/// </description>
/// </item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="endpointName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <returns>The parsed <see cref="EndpointName"/> if successful.</returns>
public static EndpointName Parse(string endpointName, bool allowUnparsed) =>
TryParse(endpointName, allowUnparsed, out EndpointName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern.");
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="EndpointName"/> instance.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description>
/// <c>projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}/endpoints/{endpoint}</c>
/// </description>
/// </item>
/// </list>
/// </remarks>
/// <param name="endpointName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="result">
/// When this method returns, the parsed <see cref="EndpointName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string endpointName, out EndpointName result) => TryParse(endpointName, false, out result);
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="EndpointName"/> instance; optionally
/// allowing an unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description>
/// <c>projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}/endpoints/{endpoint}</c>
/// </description>
/// </item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="endpointName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <param name="result">
/// When this method returns, the parsed <see cref="EndpointName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string endpointName, bool allowUnparsed, out EndpointName result)
{
gax::GaxPreconditions.CheckNotNull(endpointName, nameof(endpointName));
gax::TemplatedResourceName resourceName;
if (s_projectLocationNamespaceServiceEndpoint.TryParseName(endpointName, out resourceName))
{
result = FromProjectLocationNamespaceServiceEndpoint(resourceName[0], resourceName[1], resourceName[2], resourceName[3], resourceName[4]);
return true;
}
if (allowUnparsed)
{
if (gax::UnparsedResourceName.TryParse(endpointName, out gax::UnparsedResourceName unparsedResourceName))
{
result = FromUnparsed(unparsedResourceName);
return true;
}
}
result = null;
return false;
}
private EndpointName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string endpointId = null, string locationId = null, string namespaceId = null, string projectId = null, string serviceId = null)
{
Type = type;
UnparsedResource = unparsedResourceName;
EndpointId = endpointId;
LocationId = locationId;
NamespaceId = namespaceId;
ProjectId = projectId;
ServiceId = serviceId;
}
/// <summary>
/// Constructs a new instance of a <see cref="EndpointName"/> class from the component parts of pattern
/// <c>projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}/endpoints/{endpoint}</c>
/// </summary>
/// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="namespaceId">The <c>Namespace</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="serviceId">The <c>Service</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="endpointId">The <c>Endpoint</c> ID. Must not be <c>null</c> or empty.</param>
public EndpointName(string projectId, string locationId, string namespaceId, string serviceId, string endpointId) : this(ResourceNameType.ProjectLocationNamespaceServiceEndpoint, projectId: gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), locationId: gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), namespaceId: gax::GaxPreconditions.CheckNotNullOrEmpty(namespaceId, nameof(namespaceId)), serviceId: gax::GaxPreconditions.CheckNotNullOrEmpty(serviceId, nameof(serviceId)), endpointId: gax::GaxPreconditions.CheckNotNullOrEmpty(endpointId, nameof(endpointId)))
{
}
/// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary>
public ResourceNameType Type { get; }
/// <summary>
/// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an
/// unparsed resource name.
/// </summary>
public gax::UnparsedResourceName UnparsedResource { get; }
/// <summary>
/// The <c>Endpoint</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string EndpointId { get; }
/// <summary>
/// The <c>Location</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string LocationId { get; }
/// <summary>
/// The <c>Namespace</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string NamespaceId { get; }
/// <summary>
/// The <c>Project</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string ProjectId { get; }
/// <summary>
/// The <c>Service</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string ServiceId { get; }
/// <summary>Whether this instance contains a resource name with a known pattern.</summary>
public bool IsKnownPattern => Type != ResourceNameType.Unparsed;
/// <summary>The string representation of the resource name.</summary>
/// <returns>The string representation of the resource name.</returns>
public override string ToString()
{
switch (Type)
{
case ResourceNameType.Unparsed: return UnparsedResource.ToString();
case ResourceNameType.ProjectLocationNamespaceServiceEndpoint: return s_projectLocationNamespaceServiceEndpoint.Expand(ProjectId, LocationId, NamespaceId, ServiceId, EndpointId);
default: throw new sys::InvalidOperationException("Unrecognized resource-type.");
}
}
/// <summary>Returns a hash code for this resource name.</summary>
public override int GetHashCode() => ToString().GetHashCode();
/// <inheritdoc/>
public override bool Equals(object obj) => Equals(obj as EndpointName);
/// <inheritdoc/>
public bool Equals(EndpointName other) => ToString() == other?.ToString();
/// <inheritdoc/>
public static bool operator ==(EndpointName a, EndpointName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false);
/// <inheritdoc/>
public static bool operator !=(EndpointName a, EndpointName b) => !(a == b);
}
public partial class Endpoint
{
/// <summary>
/// <see cref="gcsv::EndpointName"/>-typed view over the <see cref="Name"/> resource name property.
/// </summary>
public gcsv::EndpointName EndpointName
{
get => string.IsNullOrEmpty(Name) ? null : gcsv::EndpointName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using JsonLD.Core;
using JsonLD.Util;
using Newtonsoft.Json.Linq;
namespace JsonLD.Core
{
internal class JsonLdUtils
{
private const int MaxContextUrls = 10;
private static readonly IList<string> keywords = new[] {
"@base",
"@context",
"@container",
"@default",
"@embed",
"@explicit",
"@graph",
"@id",
"@index",
"@language",
"@list",
"@omitDefault",
"@reverse",
"@preserve",
"@set",
"@type",
"@value",
"@vocab"
};
/// <summary>Returns whether or not the given value is a keyword (or a keyword alias).
/// </summary>
/// <remarks>Returns whether or not the given value is a keyword (or a keyword alias).
/// </remarks>
/// <param name="v">the value to check.</param>
/// <?></?>
/// <returns>true if the value is a keyword, false if not.</returns>
internal static bool IsKeyword(JToken key)
{
if (!IsString(key))
{
return false;
}
var keyString = (string)key;
return keywords.Contains(keyString);
}
public static bool DeepCompare(JToken v1, JToken v2, bool listOrderMatters)
{
if (v1 == null)
{
return v2 == null;
}
else
{
if (v2 == null)
{
return v1 == null;
}
else
{
if (v1 is JObject && v2 is JObject)
{
JObject m1 = (JObject)v1;
JObject m2 = (JObject)v2;
if (m1.Count != m2.Count)
{
return false;
}
foreach (string key in m1.GetKeys())
{
if (!((IDictionary<string,JToken>)m2).ContainsKey(key) ||
!DeepCompare(m1[key], m2[key], listOrderMatters))
{
return false;
}
}
return true;
}
else
{
if (v1 is JArray && v2 is JArray)
{
JArray l1 = (JArray)v1;
JArray l2 = (JArray)v2;
var l1Count = l1.Count;
var l2Count = l2.Count;
if (l1Count != l2Count)
{
return false;
}
// used to mark members of l2 that we have already matched to avoid
// matching the same item twice for lists that have duplicates
bool[] alreadyMatched = new bool[l2Count];
for (int i = 0; i < l1Count; i++)
{
JToken o1 = l1[i];
bool gotmatch = false;
if (listOrderMatters)
{
gotmatch = DeepCompare(o1, l2[i], listOrderMatters);
}
else
{
for (int j = 0; j < l2Count; j++)
{
if (!alreadyMatched[j] && DeepCompare(o1, l2[j], listOrderMatters))
{
alreadyMatched[j] = true;
gotmatch = true;
break;
}
}
}
if (!gotmatch)
{
return false;
}
}
return true;
}
else
{
var v1String = v1.ToString().Replace("\r\n", "").Replace("\n", "").Replace("http:", "https:");
var v2String = v2.ToString().Replace("\r\n", "").Replace("\n", "").Replace("http:", "https:");
return v1String.Equals(v2String);
}
}
}
}
}
public static bool DeepCompare(JToken v1, JToken v2)
{
return DeepCompare(v1, v2, false);
}
public static bool DeepContains(JArray values, JToken value)
{
foreach (JToken item in values)
{
if (DeepCompare(item, value, false))
{
return true;
}
}
return false;
}
internal static void MergeValue(JObject obj, string key, JToken value)
{
MergeValue(obj, key, value, skipSetContainsCheck: false);
}
internal static void MergeValue(JObject obj, string key, JToken value, bool skipSetContainsCheck)
{
if (obj == null)
{
return;
}
JArray values = (JArray)obj[key];
if (values == null)
{
values = new JArray();
obj[key] = values;
}
if (skipSetContainsCheck ||
"@list".Equals(key) ||
(value is JObject && ((IDictionary<string, JToken>)value).ContainsKey("@list")) ||
!DeepContains(values, (JToken)value))
{
values.Add(value);
}
}
internal static void MergeCompactedValue(JObject obj, string
key, JToken value)
{
if (obj == null)
{
return;
}
JToken prop = obj[key];
if (prop.IsNull())
{
obj[key] = value;
return;
}
if (!(prop is JArray))
{
JArray tmp = new JArray();
tmp.Add(prop);
}
if (value is JArray)
{
JsonLD.Collections.AddAll(((JArray)prop), (JArray)value);
}
else
{
((JArray)prop).Add(value);
}
}
public static bool IsAbsoluteIri(string value)
{
// TODO: this is a bit simplistic!
return value != null && value.Contains(":");
}
/// <summary>Returns true if the given value is a subject with properties.</summary>
/// <remarks>Returns true if the given value is a subject with properties.</remarks>
/// <param name="v">the value to check.</param>
/// <returns>true if the value is a subject with properties, false if not.</returns>
internal static bool IsNode(JToken v)
{
// Note: A value is a subject if all of these hold true:
// 1. It is an Object.
// 2. It is not a @value, @set, or @list.
// 3. It has more than 1 key OR any existing key is not @id.
if (v is JObject && !(((IDictionary<string, JToken>)v).ContainsKey("@value") || ((IDictionary<string, JToken>
)v).ContainsKey("@set") || ((IDictionary<string, JToken>)v).ContainsKey("@list")))
{
return ((IDictionary<string, JToken>)v).Count > 1 || !((IDictionary<string, JToken>)v).ContainsKey
("@id");
}
return false;
}
/// <summary>Returns true if the given value is a subject reference.</summary>
/// <remarks>Returns true if the given value is a subject reference.</remarks>
/// <param name="v">the value to check.</param>
/// <returns>true if the value is a subject reference, false if not.</returns>
internal static bool IsNodeReference(JToken v)
{
// Note: A value is a subject reference if all of these hold true:
// 1. It is an Object.
// 2. It has a single key: @id.
return (v is JObject && ((IDictionary<string, JToken>)v).Count == 1 && ((IDictionary
<string, JToken>)v).ContainsKey("@id"));
}
// TODO: fix this test
public static bool IsRelativeIri(string value)
{
if (!(IsKeyword(value) || IsAbsoluteIri(value)))
{
return true;
}
return false;
}
// //////////////////////////////////////////////////// OLD CODE BELOW
/// <summary>Adds a value to a subject.</summary>
/// <remarks>
/// Adds a value to a subject. If the value is an array, all values in the
/// array will be added.
/// Note: If the value is a subject that already exists as a property of the
/// given subject, this method makes no attempt to deeply merge properties.
/// Instead, the value will not be added.
/// </remarks>
/// <param name="subject">the subject to add the value to.</param>
/// <param name="property">the property that relates the value to the subject.</param>
/// <param name="value">the value to add.</param>
/// <?></?>
/// <?></?>
internal static void AddValue(JObject subject, string property
, JToken value, bool propertyIsArray, bool allowDuplicate)
{
if (IsArray(value))
{
if (((JArray)value).Count == 0 && propertyIsArray && !subject.ContainsKey(property
))
{
subject[property] = new JArray();
}
foreach (JToken val in (JArray)value)
{
AddValue(subject, property, val, propertyIsArray, allowDuplicate);
}
}
else
{
if (subject.ContainsKey(property))
{
// check if subject already has the value if duplicates not allowed
bool hasValue = !allowDuplicate && HasValue(subject, property, value);
// make property an array if value not present or always an array
if (!IsArray(subject[property]) && (!hasValue || propertyIsArray))
{
JArray tmp = new JArray();
tmp.Add(subject[property]);
subject[property] = tmp;
}
// add new value
if (!hasValue)
{
((JArray)subject[property]).Add(value);
}
}
else
{
// add new value as a set or single value
JToken tmp;
if (propertyIsArray)
{
tmp = new JArray();
((JArray)tmp).Add(value);
}
else
{
tmp = value;
}
subject[property] = tmp;
}
}
}
internal static void AddValue(JObject subject, string property
, JToken value, bool propertyIsArray)
{
AddValue(subject, property, value, propertyIsArray, true);
}
internal static void AddValue(JObject subject, string property
, JToken value)
{
AddValue(subject, property, value, false, true);
}
/// <summary>Prepends a base IRI to the given relative IRI.</summary>
/// <remarks>Prepends a base IRI to the given relative IRI.</remarks>
/// <param name="base">the base IRI.</param>
/// <param name="iri">the relative IRI.</param>
/// <returns>
/// the absolute IRI.
/// TODO: the URL class isn't as forgiving as the Node.js url parser,
/// we may need to re-implement the parser here to support the
/// flexibility required
/// </returns>
private static string PrependBase(JToken baseobj, string iri)
{
// already an absolute IRI
if (iri.IndexOf(":") != -1)
{
return iri;
}
// parse base if it is a string
URL @base;
if (IsString(baseobj))
{
@base = URL.Parse((string)baseobj);
}
else
{
// assume base is already a URL
@base = baseobj.Value<URL>();
}
URL rel = URL.Parse(iri);
// start hierarchical part
string hierPart = @base.protocol;
if (!string.Empty.Equals(rel.authority))
{
hierPart += "//" + rel.authority;
}
else
{
if (!string.Empty.Equals(@base.href))
{
hierPart += "//" + @base.authority;
}
}
// per RFC3986 normalize
string path;
// IRI represents an absolute path
if (rel.pathname.IndexOf("/") == 0)
{
path = rel.pathname;
}
else
{
path = @base.pathname;
// append relative path to the end of the last directory from base
if (!string.Empty.Equals(rel.pathname))
{
path = JsonLD.JavaCompat.Substring(path, 0, path.LastIndexOf("/") + 1);
if (path.Length > 0 && !path.EndsWith("/"))
{
path += "/";
}
path += rel.pathname;
}
}
// remove slashes anddots in path
path = URL.RemoveDotSegments(path, !string.Empty.Equals(hierPart));
// add query and hash
if (!string.Empty.Equals(rel.query))
{
path += "?" + rel.query;
}
if (!string.Empty.Equals(rel.hash))
{
path += rel.hash;
}
string rval = hierPart + path;
if (string.Empty.Equals(rval))
{
return "./";
}
return rval;
}
/// <summary>Expands a language map.</summary>
/// <remarks>Expands a language map.</remarks>
/// <param name="languageMap">the language map to expand.</param>
/// <returns>the expanded language map.</returns>
/// <exception cref="JsonLdError">JsonLdError</exception>
/// <exception cref="JsonLD.Core.JsonLdError"></exception>
internal static JArray ExpandLanguageMap(JObject languageMap
)
{
JArray rval = new JArray();
IList<string> keys = new List<string>(languageMap.GetKeys());
keys.SortInPlace();
// lexicographically sort languages
foreach (string key in keys)
{
JToken val;
if (!IsArray(languageMap[key]))
{
val = new JArray();
((JArray)val).Add(languageMap[key]);
}
else
{
val = (JArray)languageMap[key];
}
foreach (JToken item in val)
{
if (!IsString(item))
{
throw new JsonLdError(JsonLdError.Error.SyntaxError);
}
JObject tmp = new JObject();
tmp["@value"] = item;
tmp["@language"] = key.ToLower();
rval.Add(tmp);
}
}
return rval;
}
/// <summary>Throws an exception if the given value is not a valid @type value.</summary>
/// <remarks>Throws an exception if the given value is not a valid @type value.</remarks>
/// <param name="v">the value to check.</param>
/// <exception cref="JsonLdError">JsonLdError</exception>
/// <exception cref="JsonLD.Core.JsonLdError"></exception>
internal static bool ValidateTypeValue(JToken v)
{
if (v.IsNull())
{
throw new ArgumentNullException("\"@type\" value cannot be null");
}
// must be a string, subject reference, or empty object
if (v.Type == JTokenType.String || (v is JObject && (((JObject)v).ContainsKey
("@id") || ((JArray)v).Count == 0)))
{
return true;
}
// must be an array
bool isValid = false;
if (v is JArray)
{
isValid = true;
foreach (JToken i in (JArray)v)
{
if (!(i.Type == JTokenType.String || i is JObject && ((JObject)i).ContainsKey
("@id")))
{
isValid = false;
break;
}
}
}
if (!isValid)
{
throw new JsonLdError(JsonLdError.Error.SyntaxError);
}
return true;
}
/// <summary>Removes a base IRI from the given absolute IRI.</summary>
/// <remarks>Removes a base IRI from the given absolute IRI.</remarks>
/// <param name="base">the base IRI.</param>
/// <param name="iri">the absolute IRI.</param>
/// <returns>the relative IRI if relative to base, otherwise the absolute IRI.</returns>
private static string RemoveBase(JToken baseobj, string iri)
{
URL @base;
if (IsString(baseobj))
{
@base = URL.Parse((string)baseobj);
}
else
{
@base = baseobj.Value<URL>();
}
// establish base root
string root = string.Empty;
if (!string.Empty.Equals(@base.href))
{
root += (@base.protocol) + "//" + @base.authority;
}
else
{
// support network-path reference with empty base
if (iri.IndexOf("//") != 0)
{
root += "//";
}
}
// IRI not relative to base
if (iri.IndexOf(root) != 0)
{
return iri;
}
// remove root from IRI and parse remainder
URL rel = URL.Parse(JsonLD.JavaCompat.Substring(iri, root.Length));
// remove path segments that match
IList<string> baseSegments = _split(@base.normalizedPath, "/");
IList<string> iriSegments = _split(rel.normalizedPath, "/");
while (baseSegments.Count > 0 && iriSegments.Count > 0)
{
if (!baseSegments[0].Equals(iriSegments[0]))
{
break;
}
if (baseSegments.Count > 0)
{
baseSegments.RemoveAt(0);
}
if (iriSegments.Count > 0)
{
iriSegments.RemoveAt(0);
}
}
// use '../' for each non-matching base segment
string rval = string.Empty;
if (baseSegments.Count > 0)
{
// don't count the last segment if it isn't a path (doesn't end in
// '/')
// don't count empty first segment, it means base began with '/'
if ([email protected]("/") || string.Empty.Equals(baseSegments[0]))
{
baseSegments.RemoveAt(baseSegments.Count - 1);
}
for (int i = 0; i < baseSegments.Count; ++i)
{
rval += "../";
}
}
// prepend remaining segments
rval += _join(iriSegments, "/");
// add query and hash
if (!string.Empty.Equals(rel.query))
{
rval += "?" + rel.query;
}
if (!string.Empty.Equals(rel.hash))
{
rval += rel.hash;
}
if (string.Empty.Equals(rval))
{
rval = "./";
}
return rval;
}
/// <summary>Removes the @preserve keywords as the last step of the framing algorithm.
/// </summary>
/// <remarks>Removes the @preserve keywords as the last step of the framing algorithm.
/// </remarks>
/// <param name="ctx">the active context used to compact the input.</param>
/// <param name="input">the framed, compacted output.</param>
/// <param name="options">the compaction options used.</param>
/// <returns>the resulting output.</returns>
/// <exception cref="JsonLdError">JsonLdError</exception>
/// <exception cref="JsonLD.Core.JsonLdError"></exception>
internal static JToken RemovePreserve(Context ctx, JToken input, JsonLdOptions opts
)
{
// recurse through arrays
if (IsArray(input))
{
JArray output = new JArray();
foreach (JToken i in (JArray)input)
{
JToken result = RemovePreserve(ctx, i, opts);
// drop nulls from arrays
if (!result.IsNull())
{
output.Add(result);
}
}
input = output;
}
else
{
if (IsObject(input))
{
// remove @preserve
if (((JObject)input).ContainsKey("@preserve"))
{
if (((JObject)input)["@preserve"].SafeCompare("@null"))
{
return null;
}
return ((JObject)input)["@preserve"];
}
// skip @values
if (IsValue(input))
{
return input;
}
// recurse through @lists
if (IsList(input))
{
((JObject)input)["@list"] = RemovePreserve(ctx, ((JObject)input)["@list"], opts);
return input;
}
// recurse through properties
foreach (string prop in input.GetKeys())
{
JToken result = RemovePreserve(ctx, ((JObject)input)[prop], opts
);
string container = ctx.GetContainer(prop);
if (opts.GetCompactArrays() && IsArray(result) && ((JArray)result).Count ==
1 && container == null)
{
result = ((JArray)result)[0];
}
((JObject)input)[prop] = result;
}
}
}
return input;
}
/// <summary>replicate javascript .join because i'm too lazy to keep doing it manually
/// </summary>
/// <param name="iriSegments"></param>
/// <param name="string"></param>
/// <returns></returns>
private static string _join(IList<string> list, string joiner)
{
string rval = string.Empty;
if (list.Count > 0)
{
rval += list[0];
}
for (int i = 1; i < list.Count; i++)
{
rval += joiner + list[i];
}
return rval;
}
/// <summary>
/// replicates the functionality of javascript .split, which has different
/// results to java's String.split if there is a trailing /
/// </summary>
/// <param name="string"></param>
/// <param name="delim"></param>
/// <returns></returns>
private static IList<string> _split(string @string, string delim)
{
IList<string> rval = new List<string>(System.Linq.Enumerable.ToList(@string.Split
(delim)));
if (@string.EndsWith("/"))
{
// javascript .split includes a blank entry if the string ends with
// the delimiter, java .split does not so we need to add it manually
rval.Add(string.Empty);
}
return rval;
}
/// <summary>Compares two strings first based on length and then lexicographically.</summary>
/// <remarks>Compares two strings first based on length and then lexicographically.</remarks>
/// <param name="a">the first string.</param>
/// <param name="b">the second string.</param>
/// <returns>-1 if a < b, 1 if a > b, 0 if a == b.</returns>
internal static int CompareShortestLeast(string a, string b)
{
if (a.Length < b.Length)
{
return -1;
}
else
{
if (b.Length < a.Length)
{
return 1;
}
}
return System.Math.Sign(string.CompareOrdinal(a, b));
}
/// <summary>Determines if the given value is a property of the given subject.</summary>
/// <remarks>Determines if the given value is a property of the given subject.</remarks>
/// <param name="subject">the subject to check.</param>
/// <param name="property">the property to check.</param>
/// <param name="value">the value to check.</param>
/// <returns>true if the value exists, false if not.</returns>
internal static bool HasValue(JObject subject, string property
, JToken value)
{
bool rval = false;
if (HasProperty(subject, property))
{
JToken val = subject[property];
bool isList = IsList(val);
if (isList || val is JArray)
{
if (isList)
{
val = (JObject)val["@list"];
}
foreach (JToken i in (JArray)val)
{
if (CompareValues(value, i))
{
rval = true;
break;
}
}
}
else
{
if (!(value is JArray))
{
rval = CompareValues(value, val);
}
}
}
return rval;
}
private static bool HasProperty(JObject subject, string property
)
{
bool rval = false;
if (subject.ContainsKey(property))
{
JToken value = subject[property];
rval = (!(value is JArray) || ((JArray)value).Count > 0);
}
return rval;
}
/// <summary>Compares two JSON-LD values for equality.</summary>
/// <remarks>
/// Compares two JSON-LD values for equality. Two JSON-LD values will be
/// considered equal if:
/// 1. They are both primitives of the same type and value. 2. They are both @values
/// with the same @value, @type, and @language, OR 3. They both have @ids
/// they are the same.
/// </remarks>
/// <param name="v1">the first value.</param>
/// <param name="v2">the second value.</param>
/// <returns>true if v1 and v2 are considered equal, false if not.</returns>
internal static bool CompareValues(JToken v1, JToken v2)
{
if (v1.Equals(v2))
{
return true;
}
if (IsValue(v1) && IsValue(v2) && Obj.Equals(((JObject)v1)["@value"
], ((JObject)v2)["@value"]) && Obj.Equals(((JObject)v1)["@type"], ((JObject)v2)["@type"]) && Obj.Equals
(((JObject)v1)["@language"], ((JObject)v2
)["@language"]) && Obj.Equals(((JObject)v1)["@index"], ((JObject)v2)["@index"]))
{
return true;
}
if ((v1 is JObject && ((JObject)v1).ContainsKey("@id")) &&
(v2 is JObject && ((JObject)v2).ContainsKey("@id")) &&
((JObject)v1)["@id"].Equals(((JObject)v2
)["@id"]))
{
return true;
}
return false;
}
/// <summary>Removes a value from a subject.</summary>
/// <remarks>Removes a value from a subject.</remarks>
/// <param name="subject">the subject.</param>
/// <param name="property">the property that relates the value to the subject.</param>
/// <param name="value">the value to remove.</param>
/// <?></?>
internal static void RemoveValue(JObject subject, string property
, JObject value)
{
RemoveValue(subject, property, value, false);
}
internal static void RemoveValue(JObject subject, string property
, JObject value, bool propertyIsArray)
{
// filter out value
JArray values = new JArray();
if (subject[property] is JArray)
{
foreach (JToken e in ((JArray)subject[property]))
{
if (!e.SafeCompare(value))
{
values.Add(value);
}
}
}
else
{
if (!subject[property].SafeCompare(value))
{
values.Add(subject[property]);
}
}
if (values.Count == 0)
{
JsonLD.Collections.Remove(subject, property);
}
else
{
if (values.Count == 1 && !propertyIsArray)
{
subject[property] = values[0];
}
else
{
subject[property] = values;
}
}
}
/// <summary>Returns true if the given value is a blank node.</summary>
/// <remarks>Returns true if the given value is a blank node.</remarks>
/// <param name="v">the value to check.</param>
/// <returns>true if the value is a blank node, false if not.</returns>
internal static bool IsBlankNode(JToken v)
{
// Note: A value is a blank node if all of these hold true:
// 1. It is an Object.
// 2. If it has an @id key its value begins with '_:'.
// 3. It has no keys OR is not a @value, @set, or @list.
if (v is JObject)
{
if (((JObject)v).ContainsKey("@id"))
{
return ((string)((JObject)v)["@id"]).StartsWith("_:");
}
else
{
return ((JObject)v).Count == 0 || !(((JObject)v).ContainsKey("@value") ||
((JObject)v).ContainsKey("@set") || ((JObject)v).ContainsKey("@list"));
}
}
return false;
}
/// <summary>Resolves external @context URLs using the given URL resolver.</summary>
/// <remarks>
/// Resolves external @context URLs using the given URL resolver. Each
/// instance of @context in the input that refers to a URL will be replaced
/// with the JSON @context found at that URL.
/// </remarks>
/// <param name="input">the JSON-LD input with possible contexts.</param>
/// <param name="resolver">(url, callback(err, jsonCtx)) the URL resolver to use.</param>
/// <param name="callback">(err, input) called once the operation completes.</param>
/// <exception cref="JsonLdError">JsonLdError</exception>
/// <exception cref="JsonLD.Core.JsonLdError"></exception>
internal static void ResolveContextUrls(JToken input)
{
Resolve(input, new JObject());
}
/// <exception cref="JsonLD.Core.JsonLdError"></exception>
private static void Resolve(JToken input, JObject cycles)
{
Pattern regex = Pattern.Compile("(http|https)://(\\w+:{0,1}\\w*@)?(\\S+)(:[0-9]+)?(/|/([\\w#!:.?+=&%@!\\-/]))?"
);
if (cycles.Count > MaxContextUrls)
{
throw new JsonLdError(JsonLdError.Error.UnknownError);
}
// for tracking the URLs to resolve
JObject urls = new JObject();
// find all URLs in the given input
if (!FindContextUrls(input, urls, false))
{
// finished
FindContextUrls(input, urls, true);
}
// queue all unresolved URLs
JArray queue = new JArray();
foreach (string url in urls.GetKeys())
{
if (urls[url].SafeCompare(false))
{
// validate URL
if (!regex.Matcher(url).Matches())
{
throw new JsonLdError(JsonLdError.Error.UnknownError);
}
queue.Add(url);
}
}
// resolve URLs in queue
int count = queue.Count;
foreach (string url_1 in queue)
{
// check for context URL cycle
if (cycles.ContainsKey(url_1))
{
throw new JsonLdError(JsonLdError.Error.UnknownError);
}
JObject _cycles = (JObject)Clone(cycles);
_cycles[url_1] = true;
try
{
JObject ctx = (JObject)new DocumentLoader().LoadDocument(url_1).Document;
if (!ctx.ContainsKey("@context"))
{
ctx = new JObject();
ctx["@context"] = new JObject();
}
Resolve(ctx, _cycles);
urls[url_1] = ctx["@context"];
count -= 1;
if (count == 0)
{
FindContextUrls(input, urls, true);
}
}
//catch (JsonParseException)
//{
// throw new JsonLdError(JsonLdError.Error.UnknownError);
//}
//catch (MalformedURLException)
//{
// throw new JsonLdError(JsonLdError.Error.UnknownError);
//}
catch (IOException)
{
throw new JsonLdError(JsonLdError.Error.UnknownError);
}
}
}
/// <summary>Finds all @context URLs in the given JSON-LD input.</summary>
/// <remarks>Finds all @context URLs in the given JSON-LD input.</remarks>
/// <param name="input">the JSON-LD input.</param>
/// <param name="urls">a map of URLs (url => false/@contexts).</param>
/// <param name="replace">true to replace the URLs in the given input with the</param>
/// <contexts>from the urls map, false not to.</contexts>
/// <returns>true if new URLs to resolve were found, false if not.</returns>
private static bool FindContextUrls(JToken input, JObject urls
, bool replace)
{
int count = urls.Count;
if (input is JArray)
{
foreach (JToken i in (JArray)input)
{
FindContextUrls(i, urls, replace);
}
return count < urls.Count;
}
else
{
if (input is JObject)
{
foreach (string key in input.GetKeys())
{
if (!"@context".Equals(key))
{
FindContextUrls(((JObject)input)[key], urls, replace);
continue;
}
// get @context
JToken ctx = ((JObject)input)[key];
// array @context
if (ctx is JArray)
{
int length = ((JArray)ctx).Count;
for (int i = 0; i < length; i++)
{
JToken _ctx = ((JArray)ctx)[i];
if (_ctx.Type == JTokenType.String)
{
// replace w/@context if requested
if (replace)
{
_ctx = urls[(string)_ctx];
if (_ctx is JArray)
{
// add flattened context
((JArray)ctx).RemoveAt(i);
JsonLD.Collections.AddAllObj(((JArray)ctx), (ICollection)_ctx);
i += ((JArray)_ctx).Count;
length += ((JArray)_ctx).Count;
}
else
{
((JArray)ctx)[i] = _ctx;
}
}
else
{
// @context URL found
if (!urls.ContainsKey((string)_ctx))
{
urls[(string)_ctx] = false;
}
}
}
}
}
else
{
// string @context
if (ctx.Type == JTokenType.String)
{
// replace w/@context if requested
if (replace)
{
((JObject)input)[key] = urls[(string)ctx];
}
else
{
// @context URL found
if (!urls.ContainsKey((string)ctx))
{
urls[(string)ctx] = false;
}
}
}
}
}
return (count < urls.Count);
}
}
return false;
}
internal static JToken Clone(JToken value)
{
return value.DeepClone();
}
/// <summary>Returns true if the given value is a JSON-LD Array</summary>
/// <param name="v">the value to check.</param>
/// <returns></returns>
internal static bool IsArray(JToken v)
{
return (v is JArray);
}
/// <summary>Returns true if the given value is a JSON-LD List</summary>
/// <param name="v">the value to check.</param>
/// <returns></returns>
internal static bool IsList(JToken v)
{
return (v is JObject && ((IDictionary<string, JToken>)v).ContainsKey("@list")
);
}
/// <summary>Returns true if the given value is a JSON-LD Object</summary>
/// <param name="v">the value to check.</param>
/// <returns></returns>
internal static bool IsObject(JToken v)
{
return (v is JObject);
}
/// <summary>Returns true if the given value is a JSON-LD value</summary>
/// <param name="v">the value to check.</param>
/// <returns></returns>
internal static bool IsValue(JToken v)
{
return (v is JObject && ((IDictionary<string, JToken>)v).ContainsKey("@value"
));
}
/// <summary>Returns true if the given value is a JSON-LD string</summary>
/// <param name="v">the value to check.</param>
/// <returns></returns>
internal static bool IsString(JToken v)
{
// TODO: should this return true for arrays of strings as well?
return (v.Type == JTokenType.String);
}
}
}
| |
/*
* Copyright 2006 Jeremias Maerki.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
namespace ZXing.Datamatrix.Encoder
{
/// <summary>
/// Symbol Character Placement Program. Adapted from Annex M.1 in ISO/IEC 16022:2000(E).
/// </summary>
public class DefaultPlacement
{
private readonly String codewords;
private readonly int numrows;
private readonly int numcols;
private readonly byte[] bits;
/// <summary>
/// Main constructor
/// </summary>
/// <param name="codewords">the codewords to place</param>
/// <param name="numcols">the number of columns</param>
/// <param name="numrows">the number of rows</param>
public DefaultPlacement(String codewords, int numcols, int numrows)
{
this.codewords = codewords;
this.numcols = numcols;
this.numrows = numrows;
this.bits = new byte[numcols * numrows];
SupportClass.Fill(this.bits, (byte)2); //Initialize with "not set" value
}
/// <summary>
/// num rows
/// </summary>
public int Numrows
{
get { return numrows; }
}
/// <summary>
/// num cols
/// </summary>
public int Numcols
{
get { return numcols; }
}
/// <summary>
/// bits
/// </summary>
public byte[] Bits
{
get { return bits; }
}
/// <summary>
/// get a specific bit
/// </summary>
/// <param name="col"></param>
/// <param name="row"></param>
/// <returns></returns>
public bool getBit(int col, int row)
{
return bits[row * numcols + col] == 1;
}
private void setBit(int col, int row, bool bit)
{
bits[row * numcols + col] = (byte)(bit ? 1 : 0);
}
private bool noBit(int col, int row)
{
return bits[row * numcols + col] == 2;
}
/// <summary>
/// place
/// </summary>
public void place()
{
int pos = 0;
int row = 4;
int col = 0;
do
{
// repeatedly first check for one of the special corner cases, then...
if ((row == numrows) && (col == 0))
{
corner1(pos++);
}
if ((row == numrows - 2) && (col == 0) && ((numcols % 4) != 0))
{
corner2(pos++);
}
if ((row == numrows - 2) && (col == 0) && (numcols % 8 == 4))
{
corner3(pos++);
}
if ((row == numrows + 4) && (col == 2) && ((numcols % 8) == 0))
{
corner4(pos++);
}
// sweep upward diagonally, inserting successive characters...
do
{
if ((row < numrows) && (col >= 0) && noBit(col, row))
{
utah(row, col, pos++);
}
row -= 2;
col += 2;
} while (row >= 0 && (col < numcols));
row++;
col += 3;
// and then sweep downward diagonally, inserting successive characters, ...
do
{
if ((row >= 0) && (col < numcols) && noBit(col, row))
{
utah(row, col, pos++);
}
row += 2;
col -= 2;
} while ((row < numrows) && (col >= 0));
row += 3;
col++;
// ...until the entire array is scanned
} while ((row < numrows) || (col < numcols));
// Lastly, if the lower right-hand corner is untouched, fill in fixed pattern
if (noBit(numcols - 1, numrows - 1))
{
setBit(numcols - 1, numrows - 1, true);
setBit(numcols - 2, numrows - 2, true);
}
}
private void module(int row, int col, int pos, int bit)
{
if (row < 0)
{
row += numrows;
col += 4 - ((numrows + 4) % 8);
}
if (col < 0)
{
col += numcols;
row += 4 - ((numcols + 4) % 8);
}
// Note the conversion:
int v = codewords[pos];
v &= 1 << (8 - bit);
setBit(col, row, v != 0);
}
/// <summary>
/// Places the 8 bits of a utah-shaped symbol character in ECC200.
/// </summary>
/// <param name="row">The row.</param>
/// <param name="col">The col.</param>
/// <param name="pos">character position</param>
private void utah(int row, int col, int pos)
{
module(row - 2, col - 2, pos, 1);
module(row - 2, col - 1, pos, 2);
module(row - 1, col - 2, pos, 3);
module(row - 1, col - 1, pos, 4);
module(row - 1, col, pos, 5);
module(row, col - 2, pos, 6);
module(row, col - 1, pos, 7);
module(row, col, pos, 8);
}
private void corner1(int pos)
{
module(numrows - 1, 0, pos, 1);
module(numrows - 1, 1, pos, 2);
module(numrows - 1, 2, pos, 3);
module(0, numcols - 2, pos, 4);
module(0, numcols - 1, pos, 5);
module(1, numcols - 1, pos, 6);
module(2, numcols - 1, pos, 7);
module(3, numcols - 1, pos, 8);
}
private void corner2(int pos)
{
module(numrows - 3, 0, pos, 1);
module(numrows - 2, 0, pos, 2);
module(numrows - 1, 0, pos, 3);
module(0, numcols - 4, pos, 4);
module(0, numcols - 3, pos, 5);
module(0, numcols - 2, pos, 6);
module(0, numcols - 1, pos, 7);
module(1, numcols - 1, pos, 8);
}
private void corner3(int pos)
{
module(numrows - 3, 0, pos, 1);
module(numrows - 2, 0, pos, 2);
module(numrows - 1, 0, pos, 3);
module(0, numcols - 2, pos, 4);
module(0, numcols - 1, pos, 5);
module(1, numcols - 1, pos, 6);
module(2, numcols - 1, pos, 7);
module(3, numcols - 1, pos, 8);
}
private void corner4(int pos)
{
module(numrows - 1, 0, pos, 1);
module(numrows - 1, numcols - 1, pos, 2);
module(0, numcols - 3, pos, 3);
module(0, numcols - 2, pos, 4);
module(0, numcols - 1, pos, 5);
module(1, numcols - 3, pos, 6);
module(1, numcols - 2, pos, 7);
module(1, numcols - 1, pos, 8);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using Tester.PCL.Http.Request;
using Tester.PCL.Http.Response;
using unirest_net.http;
using UniHttpRequest = unirest_net.request.HttpRequest;
using UniHttpMethod = System.Net.Http.HttpMethod;
namespace Tester.PCL.Http.Client
{
public class UnirestClient: IHttpClient
{
public static IHttpClient SharedClient { get; set; }
static UnirestClient() {
SharedClient = new UnirestClient();
}
#region Execute methods
public HttpResponse ExecuteAsString(HttpRequest request)
{
//raise the on before request event
raiseOnBeforeHttpRequestEvent(request);
UniHttpRequest uniRequest = ConvertRequest(request);
HttpResponse response = ConvertResponse(uniRequest.asString());
//raise the on after response event
raiseOnAfterHttpResponseEvent(response);
return response;
}
public Task<HttpResponse> ExecuteAsStringAsync(HttpRequest request)
{
return Task.Factory.StartNew(() => ExecuteAsString(request));
}
public HttpResponse ExecuteAsBinary(HttpRequest request)
{
//raise the on before request event
raiseOnBeforeHttpRequestEvent(request);
UniHttpRequest uniRequest = ConvertRequest(request);
HttpResponse response = ConvertResponse(uniRequest.asBinary());
//raise the on after response event
raiseOnAfterHttpResponseEvent(response);
return response;
}
public Task<HttpResponse> ExecuteAsBinaryAsync(HttpRequest request)
{
return Task.Factory.StartNew(() => ExecuteAsString(request));
}
#endregion
#region Http request and response events
public event OnBeforeHttpRequestEventHandler OnBeforeHttpRequestEvent;
public event OnAfterHttpResponseEventHandler OnAfterHttpResponseEvent;
private void raiseOnBeforeHttpRequestEvent(HttpRequest request)
{
if ((null != OnBeforeHttpRequestEvent) && (null != request))
OnBeforeHttpRequestEvent(this, request);
}
private void raiseOnAfterHttpResponseEvent(HttpResponse response)
{
if ((null != OnAfterHttpResponseEvent) && (null != response))
OnAfterHttpResponseEvent(this, response);
}
#endregion
#region Http methods
public HttpRequest Get(string queryUrl, Dictionary<string, string> headers, string username = null, string password = null)
{
return new HttpRequest(HttpMethod.GET, queryUrl, headers, username, password);
}
public HttpRequest Get(string queryUrl)
{
return new HttpRequest(HttpMethod.GET, queryUrl);
}
public HttpRequest Post(string queryUrl)
{
return new HttpRequest(HttpMethod.POST, queryUrl);
}
public HttpRequest Put(string queryUrl)
{
return new HttpRequest(HttpMethod.PUT, queryUrl);
}
public HttpRequest Delete(string queryUrl)
{
return new HttpRequest(HttpMethod.DELETE, queryUrl);
}
public HttpRequest Patch(string queryUrl)
{
return new HttpRequest(HttpMethod.PATCH, queryUrl);
}
public HttpRequest Post(string queryUrl, Dictionary<string, string> headers, Dictionary<string, object> formParameters, string username = null,
string password = null)
{
return new HttpRequest(HttpMethod.POST, queryUrl, headers, formParameters, username, password);
}
public HttpRequest PostBody(string queryUrl, Dictionary<string, string> headers, string body, string username = null, string password = null)
{
return new HttpRequest(HttpMethod.POST, queryUrl, headers, body, username, password);
}
public HttpRequest Put(string queryUrl, Dictionary<string, string> headers, Dictionary<string, object> formParameters, string username = null,
string password = null)
{
return new HttpRequest(HttpMethod.PUT, queryUrl, headers, formParameters, username, password);
}
public HttpRequest PutBody(string queryUrl, Dictionary<string, string> headers, string body, string username = null, string password = null)
{
return new HttpRequest(HttpMethod.PUT, queryUrl, headers, body, username, password);
}
public HttpRequest Patch(string queryUrl, Dictionary<string, string> headers, Dictionary<string, object> formParameters, string username = null,
string password = null)
{
return new HttpRequest(HttpMethod.PATCH, queryUrl, headers, formParameters, username, password);
}
public HttpRequest PatchBody(string queryUrl, Dictionary<string, string> headers, string body, string username = null, string password = null)
{
return new HttpRequest(HttpMethod.PATCH, queryUrl, headers, body, username, password);
}
public HttpRequest Delete(string queryUrl, Dictionary<string, string> headers, Dictionary<string, object> formParameters, string username = null,
string password = null)
{
return new HttpRequest(HttpMethod.DELETE, queryUrl, headers, formParameters, username, password);
}
public HttpRequest DeleteBody(string queryUrl, Dictionary<string, string> headers, string body, string username = null, string password = null)
{
return new HttpRequest(HttpMethod.DELETE, queryUrl, headers, body, username, password);
}
#endregion
#region Helper methods
private static UniHttpMethod ConvertHttpMethod(HttpMethod method)
{
switch (method)
{
case HttpMethod.GET:
case HttpMethod.POST:
case HttpMethod.PUT:
case HttpMethod.PATCH:
case HttpMethod.DELETE:
return new UniHttpMethod(method.ToString().ToUpperInvariant());
default:
throw new ArgumentOutOfRangeException("Unkown method" + method.ToString());
}
}
private static UniHttpRequest ConvertRequest(HttpRequest request)
{
var uniMethod = ConvertHttpMethod(request.HttpMethod);
var queryUrl = request.QueryUrl;
//instantiate unirest request object
UniHttpRequest uniRequest = new UniHttpRequest(uniMethod,queryUrl);
//set request payload
if (request.Body != null)
{
uniRequest.body(request.Body);
}
else if (request.FormParameters != null)
{
if (request.FormParameters.Any(p => p.Value is Stream || p.Value is FileStreamInfo))
{
//multipart
foreach (var kvp in request.FormParameters)
{
if (kvp.Value is FileStreamInfo){
var fileInfo = (FileStreamInfo) kvp.Value;
uniRequest.field(kvp.Key,fileInfo.FileStream);
continue;
}
uniRequest.field(kvp.Key,kvp.Value);
}
}
else
{
//URL Encode params
var paramsString = string.Join("&",
request.FormParameters.Select(kvp =>
string.Format("{0}={1}", Uri.EscapeDataString(kvp.Key), Uri.EscapeDataString(kvp.Value.ToString()))));
uniRequest.body(paramsString);
uniRequest.header("Content-Type", "application/x-www-form-urlencoded");
}
}
//set request headers
Dictionary<string, Object> headers = request.Headers.ToDictionary(item=> item.Key,item=> (Object) item.Value);
uniRequest.headers(headers);
//Set basic auth credentials if any
if (!string.IsNullOrWhiteSpace(request.Username))
{
uniRequest.basicAuth(request.Username, request.Password);
}
return uniRequest;
}
private static HttpResponse ConvertResponse(HttpResponse<Stream> binaryResponse)
{
return new HttpResponse
{
Headers = binaryResponse.Headers,
RawBody = binaryResponse.Body,
StatusCode = binaryResponse.Code
};
}
private static HttpResponse ConvertResponse(HttpResponse<string> stringResponse)
{
return new HttpStringResponse
{
Headers = stringResponse.Headers,
RawBody = stringResponse.Raw,
Body = stringResponse.Body,
StatusCode = stringResponse.Code
};
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/*============================================================
**
**
**
**
** Purpose: Implements a generic, dynamically sized list as an
** array.
**
**
===========================================================*/
namespace System.Collections.Generic {
using System;
using System.Runtime;
using System.Runtime.Versioning;
using System.Diagnostics;
using System.Diagnostics.Contracts;
using System.Collections.ObjectModel;
using System.Security.Permissions;
// Implements a variable-size List that uses an array of objects to store the
// elements. A List has a capacity, which is the allocated length
// of the internal array. As elements are added to a List, the capacity
// of the List is automatically increased as required by reallocating the
// internal array.
//
[DebuggerTypeProxy(typeof(Mscorlib_CollectionDebugView<>))]
[DebuggerDisplay("Count = {Count}")]
[Serializable]
public class List<T> : IList<T>, System.Collections.IList, IReadOnlyList<T>
{
private const int _defaultCapacity = 4;
private T[] _items;
[ContractPublicPropertyName("Count")]
private int _size;
private int _version;
[NonSerialized]
private Object _syncRoot;
static readonly T[] _emptyArray = new T[0];
// Constructs a List. The list is initially empty and has a capacity
// of zero. Upon adding the first element to the list the capacity is
// increased to _defaultCapacity, and then increased in multiples of two
// as required.
public List() {
_items = _emptyArray;
}
// Constructs a List with a given initial capacity. The list is
// initially empty, but will have room for the given number of elements
// before any reallocations are required.
//
public List(int capacity) {
if (capacity < 0) ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.capacity, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
Contract.EndContractBlock();
if (capacity == 0)
_items = _emptyArray;
else
_items = new T[capacity];
}
// Constructs a List, copying the contents of the given collection. The
// size and capacity of the new list will both be equal to the size of the
// given collection.
//
public List(IEnumerable<T> collection) {
if (collection==null)
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.collection);
Contract.EndContractBlock();
ICollection<T> c = collection as ICollection<T>;
if( c != null) {
int count = c.Count;
if (count == 0)
{
_items = _emptyArray;
}
else {
_items = new T[count];
c.CopyTo(_items, 0);
_size = count;
}
}
else {
_size = 0;
_items = _emptyArray;
// This enumerable could be empty. Let Add allocate a new array, if needed.
// Note it will also go to _defaultCapacity first, not 1, then 2, etc.
using(IEnumerator<T> en = collection.GetEnumerator()) {
while(en.MoveNext()) {
Add(en.Current);
}
}
}
}
// Gets and sets the capacity of this list. The capacity is the size of
// the internal array used to hold items. When set, the internal
// array of the list is reallocated to the given capacity.
//
public int Capacity {
get {
Contract.Ensures(Contract.Result<int>() >= 0);
return _items.Length;
}
set {
if (value < _size) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.value, ExceptionResource.ArgumentOutOfRange_SmallCapacity);
}
Contract.EndContractBlock();
if (value != _items.Length) {
if (value > 0) {
T[] newItems = new T[value];
if (_size > 0) {
Array.Copy(_items, 0, newItems, 0, _size);
}
_items = newItems;
}
else {
_items = _emptyArray;
}
}
}
}
// Read-only property describing how many elements are in the List.
public int Count {
get {
Contract.Ensures(Contract.Result<int>() >= 0);
return _size;
}
}
bool System.Collections.IList.IsFixedSize {
get { return false; }
}
// Is this List read-only?
bool ICollection<T>.IsReadOnly {
get { return false; }
}
bool System.Collections.IList.IsReadOnly {
get { return false; }
}
// Is this List synchronized (thread-safe)?
bool System.Collections.ICollection.IsSynchronized {
get { return false; }
}
// Synchronization root for this object.
Object System.Collections.ICollection.SyncRoot {
get {
if( _syncRoot == null) {
System.Threading.Interlocked.CompareExchange<Object>(ref _syncRoot, new Object(), null);
}
return _syncRoot;
}
}
// Sets or Gets the element at the given index.
//
public T this[int index] {
get {
// Following trick can reduce the range check by one
if ((uint) index >= (uint)_size) {
ThrowHelper.ThrowArgumentOutOfRangeException();
}
Contract.EndContractBlock();
return _items[index];
}
set {
if ((uint) index >= (uint)_size) {
ThrowHelper.ThrowArgumentOutOfRangeException();
}
Contract.EndContractBlock();
_items[index] = value;
_version++;
}
}
private static bool IsCompatibleObject(object value) {
// Non-null values are fine. Only accept nulls if T is a class or Nullable<U>.
// Note that default(T) is not equal to null for value types except when T is Nullable<U>.
return ((value is T) || (value == null && default(T) == null));
}
Object System.Collections.IList.this[int index] {
get {
return this[index];
}
set {
ThrowHelper.IfNullAndNullsAreIllegalThenThrow<T>(value, ExceptionArgument.value);
try {
this[index] = (T)value;
}
catch (InvalidCastException) {
ThrowHelper.ThrowWrongValueTypeArgumentException(value, typeof(T));
}
}
}
// Adds the given object to the end of this list. The size of the list is
// increased by one. If required, the capacity of the list is doubled
// before adding the new element.
//
public void Add(T item) {
if (_size == _items.Length) EnsureCapacity(_size + 1);
_items[_size++] = item;
_version++;
}
int System.Collections.IList.Add(Object item)
{
ThrowHelper.IfNullAndNullsAreIllegalThenThrow<T>(item, ExceptionArgument.item);
try {
Add((T) item);
}
catch (InvalidCastException) {
ThrowHelper.ThrowWrongValueTypeArgumentException(item, typeof(T));
}
return Count - 1;
}
// Adds the elements of the given collection to the end of this list. If
// required, the capacity of the list is increased to twice the previous
// capacity or the new size, whichever is larger.
//
public void AddRange(IEnumerable<T> collection) {
Contract.Ensures(Count >= Contract.OldValue(Count));
InsertRange(_size, collection);
}
public ReadOnlyCollection<T> AsReadOnly() {
Contract.Ensures(Contract.Result<ReadOnlyCollection<T>>() != null);
return new ReadOnlyCollection<T>(this);
}
// Searches a section of the list for a given element using a binary search
// algorithm. Elements of the list are compared to the search value using
// the given IComparer interface. If comparer is null, elements of
// the list are compared to the search value using the IComparable
// interface, which in that case must be implemented by all elements of the
// list and the given search value. This method assumes that the given
// section of the list is already sorted; if this is not the case, the
// result will be incorrect.
//
// The method returns the index of the given value in the list. If the
// list does not contain the given value, the method returns a negative
// integer. The bitwise complement operator (~) can be applied to a
// negative result to produce the index of the first element (if any) that
// is larger than the given search value. This is also the index at which
// the search value should be inserted into the list in order for the list
// to remain sorted.
//
// The method uses the Array.BinarySearch method to perform the
// search.
//
public int BinarySearch(int index, int count, T item, IComparer<T> comparer) {
if (index < 0)
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
if (count < 0)
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.count, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
if (_size - index < count)
ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_InvalidOffLen);
Contract.Ensures(Contract.Result<int>() <= index + count);
Contract.EndContractBlock();
return Array.BinarySearch<T>(_items, index, count, item, comparer);
}
public int BinarySearch(T item)
{
Contract.Ensures(Contract.Result<int>() <= Count);
return BinarySearch(0, Count, item, null);
}
public int BinarySearch(T item, IComparer<T> comparer)
{
Contract.Ensures(Contract.Result<int>() <= Count);
return BinarySearch(0, Count, item, comparer);
}
// Clears the contents of List.
public void Clear() {
if (_size > 0)
{
Array.Clear(_items, 0, _size); // Don't need to doc this but we clear the elements so that the gc can reclaim the references.
_size = 0;
}
_version++;
}
// Contains returns true if the specified element is in the List.
// It does a linear, O(n) search. Equality is determined by calling
// item.Equals().
//
public bool Contains(T item) {
if ((Object) item == null) {
for(int i=0; i<_size; i++)
if ((Object) _items[i] == null)
return true;
return false;
}
else {
EqualityComparer<T> c = EqualityComparer<T>.Default;
for(int i=0; i<_size; i++) {
if (c.Equals(_items[i], item)) return true;
}
return false;
}
}
bool System.Collections.IList.Contains(Object item)
{
if(IsCompatibleObject(item)) {
return Contains((T) item);
}
return false;
}
public List<TOutput> ConvertAll<TOutput>(Converter<T,TOutput> converter) {
if( converter == null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.converter);
}
Contract.EndContractBlock();
List<TOutput> list = new List<TOutput>(_size);
for( int i = 0; i< _size; i++) {
list._items[i] = converter(_items[i]);
}
list._size = _size;
return list;
}
// Copies this List into array, which must be of a
// compatible array type.
//
public void CopyTo(T[] array) {
CopyTo(array, 0);
}
// Copies this List into array, which must be of a
// compatible array type.
//
void System.Collections.ICollection.CopyTo(Array array, int arrayIndex) {
if ((array != null) && (array.Rank != 1)) {
ThrowHelper.ThrowArgumentException(ExceptionResource.Arg_RankMultiDimNotSupported);
}
Contract.EndContractBlock();
try {
// Array.Copy will check for NULL.
Array.Copy(_items, 0, array, arrayIndex, _size);
}
catch(ArrayTypeMismatchException){
ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_InvalidArrayType);
}
}
// Copies a section of this list to the given array at the given index.
//
// The method uses the Array.Copy method to copy the elements.
//
public void CopyTo(int index, T[] array, int arrayIndex, int count) {
if (_size - index < count) {
ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_InvalidOffLen);
}
Contract.EndContractBlock();
// Delegate rest of error checking to Array.Copy.
Array.Copy(_items, index, array, arrayIndex, count);
}
public void CopyTo(T[] array, int arrayIndex) {
// Delegate rest of error checking to Array.Copy.
Array.Copy(_items, 0, array, arrayIndex, _size);
}
// Ensures that the capacity of this list is at least the given minimum
// value. If the currect capacity of the list is less than min, the
// capacity is increased to twice the current capacity or to min,
// whichever is larger.
private void EnsureCapacity(int min) {
if (_items.Length < min) {
int newCapacity = _items.Length == 0? _defaultCapacity : _items.Length * 2;
// Allow the list to grow to maximum possible capacity (~2G elements) before encountering overflow.
// Note that this check works even when _items.Length overflowed thanks to the (uint) cast
if ((uint)newCapacity > Array.MaxArrayLength) newCapacity = Array.MaxArrayLength;
if (newCapacity < min) newCapacity = min;
Capacity = newCapacity;
}
}
public bool Exists(Predicate<T> match) {
return FindIndex(match) != -1;
}
public T Find(Predicate<T> match) {
if( match == null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.match);
}
Contract.EndContractBlock();
for(int i = 0 ; i < _size; i++) {
if(match(_items[i])) {
return _items[i];
}
}
return default(T);
}
public List<T> FindAll(Predicate<T> match) {
if( match == null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.match);
}
Contract.EndContractBlock();
List<T> list = new List<T>();
for(int i = 0 ; i < _size; i++) {
if(match(_items[i])) {
list.Add(_items[i]);
}
}
return list;
}
public int FindIndex(Predicate<T> match) {
Contract.Ensures(Contract.Result<int>() >= -1);
Contract.Ensures(Contract.Result<int>() < Count);
return FindIndex(0, _size, match);
}
public int FindIndex(int startIndex, Predicate<T> match) {
Contract.Ensures(Contract.Result<int>() >= -1);
Contract.Ensures(Contract.Result<int>() < startIndex + Count);
return FindIndex(startIndex, _size - startIndex, match);
}
public int FindIndex(int startIndex, int count, Predicate<T> match) {
if( (uint)startIndex > (uint)_size ) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.startIndex, ExceptionResource.ArgumentOutOfRange_Index);
}
if (count < 0 || startIndex > _size - count) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.count, ExceptionResource.ArgumentOutOfRange_Count);
}
if( match == null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.match);
}
Contract.Ensures(Contract.Result<int>() >= -1);
Contract.Ensures(Contract.Result<int>() < startIndex + count);
Contract.EndContractBlock();
int endIndex = startIndex + count;
for( int i = startIndex; i < endIndex; i++) {
if( match(_items[i])) return i;
}
return -1;
}
public T FindLast(Predicate<T> match) {
if( match == null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.match);
}
Contract.EndContractBlock();
for(int i = _size - 1 ; i >= 0; i--) {
if(match(_items[i])) {
return _items[i];
}
}
return default(T);
}
public int FindLastIndex(Predicate<T> match) {
Contract.Ensures(Contract.Result<int>() >= -1);
Contract.Ensures(Contract.Result<int>() < Count);
return FindLastIndex(_size - 1, _size, match);
}
public int FindLastIndex(int startIndex, Predicate<T> match) {
Contract.Ensures(Contract.Result<int>() >= -1);
Contract.Ensures(Contract.Result<int>() <= startIndex);
return FindLastIndex(startIndex, startIndex + 1, match);
}
public int FindLastIndex(int startIndex, int count, Predicate<T> match) {
if( match == null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.match);
}
Contract.Ensures(Contract.Result<int>() >= -1);
Contract.Ensures(Contract.Result<int>() <= startIndex);
Contract.EndContractBlock();
if(_size == 0) {
// Special case for 0 length List
if( startIndex != -1) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.startIndex, ExceptionResource.ArgumentOutOfRange_Index);
}
}
else {
// Make sure we're not out of range
if ( (uint)startIndex >= (uint)_size) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.startIndex, ExceptionResource.ArgumentOutOfRange_Index);
}
}
// 2nd have of this also catches when startIndex == MAXINT, so MAXINT - 0 + 1 == -1, which is < 0.
if (count < 0 || startIndex - count + 1 < 0) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.count, ExceptionResource.ArgumentOutOfRange_Count);
}
int endIndex = startIndex - count;
for( int i = startIndex; i > endIndex; i--) {
if( match(_items[i])) {
return i;
}
}
return -1;
}
public void ForEach(Action<T> action) {
if( action == null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.action);
}
Contract.EndContractBlock();
int version = _version;
for(int i = 0 ; i < _size; i++) {
if (version != _version && BinaryCompatibility.TargetsAtLeast_Desktop_V4_5) {
break;
}
action(_items[i]);
}
if (version != _version && BinaryCompatibility.TargetsAtLeast_Desktop_V4_5)
ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumFailedVersion);
}
// Returns an enumerator for this list with the given
// permission for removal of elements. If modifications made to the list
// while an enumeration is in progress, the MoveNext and
// GetObject methods of the enumerator will throw an exception.
//
public Enumerator GetEnumerator() {
return new Enumerator(this);
}
/// <internalonly/>
IEnumerator<T> IEnumerable<T>.GetEnumerator() {
return new Enumerator(this);
}
System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() {
return new Enumerator(this);
}
public List<T> GetRange(int index, int count) {
if (index < 0) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
if (count < 0) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.count, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
if (_size - index < count) {
ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_InvalidOffLen);
}
Contract.Ensures(Contract.Result<List<T>>() != null);
Contract.EndContractBlock();
List<T> list = new List<T>(count);
Array.Copy(_items, index, list._items, 0, count);
list._size = count;
return list;
}
// Returns the index of the first occurrence of a given value in a range of
// this list. The list is searched forwards from beginning to end.
// The elements of the list are compared to the given value using the
// Object.Equals method.
//
// This method uses the Array.IndexOf method to perform the
// search.
//
public int IndexOf(T item) {
Contract.Ensures(Contract.Result<int>() >= -1);
Contract.Ensures(Contract.Result<int>() < Count);
return Array.IndexOf(_items, item, 0, _size);
}
int System.Collections.IList.IndexOf(Object item)
{
if(IsCompatibleObject(item)) {
return IndexOf((T)item);
}
return -1;
}
// Returns the index of the first occurrence of a given value in a range of
// this list. The list is searched forwards, starting at index
// index and ending at count number of elements. The
// elements of the list are compared to the given value using the
// Object.Equals method.
//
// This method uses the Array.IndexOf method to perform the
// search.
//
public int IndexOf(T item, int index) {
if (index > _size)
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_Index);
Contract.Ensures(Contract.Result<int>() >= -1);
Contract.Ensures(Contract.Result<int>() < Count);
Contract.EndContractBlock();
return Array.IndexOf(_items, item, index, _size - index);
}
// Returns the index of the first occurrence of a given value in a range of
// this list. The list is searched forwards, starting at index
// index and upto count number of elements. The
// elements of the list are compared to the given value using the
// Object.Equals method.
//
// This method uses the Array.IndexOf method to perform the
// search.
//
public int IndexOf(T item, int index, int count) {
if (index > _size)
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_Index);
if (count <0 || index > _size - count) ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.count, ExceptionResource.ArgumentOutOfRange_Count);
Contract.Ensures(Contract.Result<int>() >= -1);
Contract.Ensures(Contract.Result<int>() < Count);
Contract.EndContractBlock();
return Array.IndexOf(_items, item, index, count);
}
// Inserts an element into this list at a given index. The size of the list
// is increased by one. If required, the capacity of the list is doubled
// before inserting the new element.
//
public void Insert(int index, T item) {
// Note that insertions at the end are legal.
if ((uint) index > (uint)_size) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_ListInsert);
}
Contract.EndContractBlock();
if (_size == _items.Length) EnsureCapacity(_size + 1);
if (index < _size) {
Array.Copy(_items, index, _items, index + 1, _size - index);
}
_items[index] = item;
_size++;
_version++;
}
void System.Collections.IList.Insert(int index, Object item)
{
ThrowHelper.IfNullAndNullsAreIllegalThenThrow<T>(item, ExceptionArgument.item);
try {
Insert(index, (T) item);
}
catch (InvalidCastException) {
ThrowHelper.ThrowWrongValueTypeArgumentException(item, typeof(T));
}
}
// Inserts the elements of the given collection at a given index. If
// required, the capacity of the list is increased to twice the previous
// capacity or the new size, whichever is larger. Ranges may be added
// to the end of the list by setting index to the List's size.
//
public void InsertRange(int index, IEnumerable<T> collection) {
if (collection==null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.collection);
}
if ((uint)index > (uint)_size) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_Index);
}
Contract.EndContractBlock();
ICollection<T> c = collection as ICollection<T>;
if( c != null ) { // if collection is ICollection<T>
int count = c.Count;
if (count > 0) {
EnsureCapacity(_size + count);
if (index < _size) {
Array.Copy(_items, index, _items, index + count, _size - index);
}
// If we're inserting a List into itself, we want to be able to deal with that.
if (this == c) {
// Copy first part of _items to insert location
Array.Copy(_items, 0, _items, index, index);
// Copy last part of _items back to inserted location
Array.Copy(_items, index+count, _items, index*2, _size-index);
}
else {
T[] itemsToInsert = new T[count];
c.CopyTo(itemsToInsert, 0);
itemsToInsert.CopyTo(_items, index);
}
_size += count;
}
}
else {
using(IEnumerator<T> en = collection.GetEnumerator()) {
while(en.MoveNext()) {
Insert(index++, en.Current);
}
}
}
_version++;
}
// Returns the index of the last occurrence of a given value in a range of
// this list. The list is searched backwards, starting at the end
// and ending at the first element in the list. The elements of the list
// are compared to the given value using the Object.Equals method.
//
// This method uses the Array.LastIndexOf method to perform the
// search.
//
public int LastIndexOf(T item)
{
Contract.Ensures(Contract.Result<int>() >= -1);
Contract.Ensures(Contract.Result<int>() < Count);
if (_size == 0) { // Special case for empty list
return -1;
}
else {
return LastIndexOf(item, _size - 1, _size);
}
}
// Returns the index of the last occurrence of a given value in a range of
// this list. The list is searched backwards, starting at index
// index and ending at the first element in the list. The
// elements of the list are compared to the given value using the
// Object.Equals method.
//
// This method uses the Array.LastIndexOf method to perform the
// search.
//
public int LastIndexOf(T item, int index)
{
if (index >= _size)
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_Index);
Contract.Ensures(Contract.Result<int>() >= -1);
Contract.Ensures(((Count == 0) && (Contract.Result<int>() == -1)) || ((Count > 0) && (Contract.Result<int>() <= index)));
Contract.EndContractBlock();
return LastIndexOf(item, index, index + 1);
}
// Returns the index of the last occurrence of a given value in a range of
// this list. The list is searched backwards, starting at index
// index and upto count elements. The elements of
// the list are compared to the given value using the Object.Equals
// method.
//
// This method uses the Array.LastIndexOf method to perform the
// search.
//
public int LastIndexOf(T item, int index, int count) {
if ((Count != 0) && (index < 0)) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
if ((Count !=0) && (count < 0)) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.count, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
Contract.Ensures(Contract.Result<int>() >= -1);
Contract.Ensures(((Count == 0) && (Contract.Result<int>() == -1)) || ((Count > 0) && (Contract.Result<int>() <= index)));
Contract.EndContractBlock();
if (_size == 0) { // Special case for empty list
return -1;
}
if (index >= _size) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_BiggerThanCollection);
}
if (count > index + 1) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.count, ExceptionResource.ArgumentOutOfRange_BiggerThanCollection);
}
return Array.LastIndexOf(_items, item, index, count);
}
// Removes the element at the given index. The size of the list is
// decreased by one.
//
public bool Remove(T item) {
int index = IndexOf(item);
if (index >= 0) {
RemoveAt(index);
return true;
}
return false;
}
void System.Collections.IList.Remove(Object item)
{
if(IsCompatibleObject(item)) {
Remove((T) item);
}
}
// This method removes all items which matches the predicate.
// The complexity is O(n).
public int RemoveAll(Predicate<T> match) {
if( match == null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.match);
}
Contract.Ensures(Contract.Result<int>() >= 0);
Contract.Ensures(Contract.Result<int>() <= Contract.OldValue(Count));
Contract.EndContractBlock();
int freeIndex = 0; // the first free slot in items array
// Find the first item which needs to be removed.
while( freeIndex < _size && !match(_items[freeIndex])) freeIndex++;
if( freeIndex >= _size) return 0;
int current = freeIndex + 1;
while( current < _size) {
// Find the first item which needs to be kept.
while( current < _size && match(_items[current])) current++;
if( current < _size) {
// copy item to the free slot.
_items[freeIndex++] = _items[current++];
}
}
Array.Clear(_items, freeIndex, _size - freeIndex);
int result = _size - freeIndex;
_size = freeIndex;
_version++;
return result;
}
// Removes the element at the given index. The size of the list is
// decreased by one.
//
public void RemoveAt(int index) {
if ((uint)index >= (uint)_size) {
ThrowHelper.ThrowArgumentOutOfRangeException();
}
Contract.EndContractBlock();
_size--;
if (index < _size) {
Array.Copy(_items, index + 1, _items, index, _size - index);
}
_items[_size] = default(T);
_version++;
}
// Removes a range of elements from this list.
//
public void RemoveRange(int index, int count) {
if (index < 0) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
if (count < 0) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.count, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
if (_size - index < count)
ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_InvalidOffLen);
Contract.EndContractBlock();
if (count > 0) {
int i = _size;
_size -= count;
if (index < _size) {
Array.Copy(_items, index + count, _items, index, _size - index);
}
Array.Clear(_items, _size, count);
_version++;
}
}
// Reverses the elements in this list.
public void Reverse() {
Reverse(0, Count);
}
// Reverses the elements in a range of this list. Following a call to this
// method, an element in the range given by index and count
// which was previously located at index i will now be located at
// index index + (index + count - i - 1).
//
public void Reverse(int index, int count) {
if (index < 0) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
if (count < 0) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.count, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
if (_size - index < count)
ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_InvalidOffLen);
Contract.EndContractBlock();
// The non-generic Array.Reverse is not used because it does not perform
// well for non-primitive value types.
// If/when a generic Array.Reverse<T> becomes available, the below code
// can be deleted and replaced with a call to Array.Reverse<T>.
int i = index;
int j = index + count - 1;
T[] array = _items;
while (i < j)
{
T temp = array[i];
array[i] = array[j];
array[j] = temp;
i++;
j--;
}
_version++;
}
// Sorts the elements in this list. Uses the default comparer and
// Array.Sort.
public void Sort()
{
Sort(0, Count, null);
}
// Sorts the elements in this list. Uses Array.Sort with the
// provided comparer.
public void Sort(IComparer<T> comparer)
{
Sort(0, Count, comparer);
}
// Sorts the elements in a section of this list. The sort compares the
// elements to each other using the given IComparer interface. If
// comparer is null, the elements are compared to each other using
// the IComparable interface, which in that case must be implemented by all
// elements of the list.
//
// This method uses the Array.Sort method to sort the elements.
//
public void Sort(int index, int count, IComparer<T> comparer) {
if (index < 0) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
if (count < 0) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.count, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
if (_size - index < count)
ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_InvalidOffLen);
Contract.EndContractBlock();
Array.Sort<T>(_items, index, count, comparer);
_version++;
}
public void Sort(Comparison<T> comparison) {
if( comparison == null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.comparison);
}
Contract.EndContractBlock();
if( _size > 0) {
IComparer<T> comparer = Comparer<T>.Create(comparison);
Array.Sort(_items, 0, _size, comparer);
}
}
// ToArray returns an array containing the contents of the List.
// This requires copying the List, which is an O(n) operation.
public T[] ToArray() {
Contract.Ensures(Contract.Result<T[]>() != null);
Contract.Ensures(Contract.Result<T[]>().Length == Count);
#if FEATURE_CORECLR
if (_size == 0)
{
return _emptyArray;
}
#endif
T[] array = new T[_size];
Array.Copy(_items, 0, array, 0, _size);
return array;
}
// Sets the capacity of this list to the size of the list. This method can
// be used to minimize a list's memory overhead once it is known that no
// new elements will be added to the list. To completely clear a list and
// release all memory referenced by the list, execute the following
// statements:
//
// list.Clear();
// list.TrimExcess();
//
public void TrimExcess() {
int threshold = (int)(((double)_items.Length) * 0.9);
if( _size < threshold ) {
Capacity = _size;
}
}
public bool TrueForAll(Predicate<T> match) {
if( match == null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.match);
}
Contract.EndContractBlock();
for(int i = 0 ; i < _size; i++) {
if( !match(_items[i])) {
return false;
}
}
return true;
}
[Serializable]
public struct Enumerator : IEnumerator<T>, System.Collections.IEnumerator
{
private List<T> list;
private int index;
private int version;
private T current;
internal Enumerator(List<T> list) {
this.list = list;
index = 0;
version = list._version;
current = default(T);
}
public void Dispose() {
}
public bool MoveNext() {
List<T> localList = list;
if (version == localList._version && ((uint)index < (uint)localList._size))
{
current = localList._items[index];
index++;
return true;
}
return MoveNextRare();
}
private bool MoveNextRare()
{
if (version != list._version) {
ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumFailedVersion);
}
index = list._size + 1;
current = default(T);
return false;
}
public T Current {
get {
return current;
}
}
Object System.Collections.IEnumerator.Current {
get {
if( index == 0 || index == list._size + 1) {
ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumOpCantHappen);
}
return Current;
}
}
void System.Collections.IEnumerator.Reset() {
if (version != list._version) {
ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumFailedVersion);
}
index = 0;
current = default(T);
}
}
}
}
| |
using System;
using System.Collections;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using PatchKit.Api;
using PatchKit.Unity.Patcher.AppUpdater;
using PatchKit.Unity.Patcher.AppUpdater.Commands;
using PatchKit.Unity.Patcher.Cancellation;
using PatchKit.Unity.Utilities;
using PatchKit.Unity.Patcher.Debug;
using PatchKit.Unity.Patcher.UI.Dialogs;
using UniRx;
using UnityEngine;
using CancellationToken = PatchKit.Unity.Patcher.Cancellation.CancellationToken;
using System.IO;
using PatchKit.Api.Models.Main;
using PatchKit.Network;
using PatchKit.Unity.Patcher.AppData;
using PatchKit.Unity.Patcher.AppData.FileSystem;
using PatchKit.Unity.Patcher.AppData.Local;
using PatchKit.Unity.Patcher.AppUpdater.Status;
namespace PatchKit.Unity.Patcher
{
// Assumptions:
// - this component is always enabled (coroutines are always executed)
// - this component is destroyed only when application quits
public class Patcher : MonoBehaviour
{
public const string EditorAllowedSecret = "94987833c3b51565ce7bf85e9a747571";
public enum UserDecision
{
None,
RepairApp,
StartApp,
StartAppAutomatically,
InstallApp,
InstallAppAutomatically,
CheckForAppUpdates,
CheckForAppUpdatesAutomatically,
UninstallApp,
VerifyFiles
}
private static readonly DebugLogger DebugLogger = new DebugLogger(typeof(Patcher));
private static Patcher _instance;
public static Patcher Instance
{
get
{
if (_instance == null)
{
_instance = FindObjectOfType<Patcher>();
}
return _instance;
}
}
private bool _canStartThread = true;
private readonly PatchKit.Unity.Patcher.Cancellation.CancellationTokenSource _threadCancellationTokenSource = new PatchKit.Unity.Patcher.Cancellation.CancellationTokenSource();
private Thread _thread;
private bool _isForceQuitting;
private App _app;
private PatcherConfiguration _configuration;
private UserDecision _userDecision = UserDecision.None;
private readonly ManualResetEvent _userDecisionSetEvent = new ManualResetEvent(false);
private readonly IRequestTimeoutCalculator _requestTimeoutCalculator = new SimpleRequestTimeoutCalculator();
private bool _hasAutomaticallyInstalledApp;
private bool _hasAutomaticallyCheckedForAppUpdate;
private bool _hasAutomaticallyStartedApp;
private bool _wasUpdateSuccessfulOrNotNecessary = false;
private bool _hasGameBeenStarted = false;
private FileStream _lockFileStream;
private PatchKit.Unity.Patcher.Cancellation.CancellationTokenSource _updateAppCancellationTokenSource;
public string TraceableAppSecret
{
get
{
if (_traceableAppSecret == null)
{
_traceableAppSecret = BuildTraceableAppSecret();
}
return _traceableAppSecret;
}
}
private string _traceableAppSecret;
public string AppSecret { get; private set; }
public ErrorDialog ErrorDialog;
public string EditorAppSecret;
public int EditorOverrideLatestVersionId;
public PatcherConfiguration DefaultConfiguration;
public string StartAppCustomArgs { get; set; }
private readonly ReactiveProperty<IReadOnlyUpdaterStatus> _updaterStatus = new ReactiveProperty<IReadOnlyUpdaterStatus>();
public IReadOnlyReactiveProperty<IReadOnlyUpdaterStatus> UpdaterStatus
{
get { return _updaterStatus; }
}
private readonly BoolReactiveProperty _canRepairApp = new BoolReactiveProperty(false);
public IReadOnlyReactiveProperty<bool> CanRepairApp
{
get { return _canRepairApp; }
}
private readonly BoolReactiveProperty _canStartApp = new BoolReactiveProperty(false);
public IReadOnlyReactiveProperty<bool> CanStartApp
{
get { return _canStartApp; }
}
private readonly BoolReactiveProperty _isAppInstalled = new BoolReactiveProperty(false);
public IReadOnlyReactiveProperty<bool> IsAppInstalled
{
get { return _isAppInstalled; }
}
private readonly BoolReactiveProperty _canInstallApp = new BoolReactiveProperty(false);
public IReadOnlyReactiveProperty<bool> CanInstallApp
{
get { return _canInstallApp; }
}
private readonly BoolReactiveProperty _canCheckForAppUpdates = new BoolReactiveProperty(false);
public IReadOnlyReactiveProperty<bool> CanCheckForAppUpdates
{
get { return _canCheckForAppUpdates; }
}
private readonly ReactiveProperty<PatcherState> _state = new ReactiveProperty<PatcherState>(PatcherState.None);
public IReadOnlyReactiveProperty<PatcherState> State
{
get { return _state; }
}
private readonly ReactiveProperty<PatcherData> _data = new ReactiveProperty<PatcherData>();
public IReadOnlyReactiveProperty<PatcherData> Data
{
get { return _data; }
}
private readonly ReactiveProperty<string> _warning = new ReactiveProperty<string>();
public IReadOnlyReactiveProperty<string> Warning
{
get { return _warning; }
}
private readonly ReactiveProperty<int?> _remoteVersionId = new ReactiveProperty<int?>();
public IReadOnlyReactiveProperty<int?> RemoteVersionId
{
get { return _remoteVersionId; }
}
private readonly ReactiveProperty<int?> _localVersionId = new ReactiveProperty<int?>();
public IReadOnlyReactiveProperty<int?> LocalVersionId
{
get { return _localVersionId; }
}
private readonly ReactiveProperty<Api.Models.Main.App> _appInfo = new ReactiveProperty<Api.Models.Main.App>();
public IReadOnlyReactiveProperty<Api.Models.Main.App> AppInfo
{
get { return _appInfo; }
}
public void SetUserDecision(UserDecision userDecision)
{
DebugLogger.Log(string.Format("User deicision set to {0}.", userDecision));
_userDecision = userDecision;
_userDecisionSetEvent.Set();
}
public void CancelUpdateApp()
{
if (_updateAppCancellationTokenSource != null)
{
DebugLogger.Log("Cancelling update app execution.");
_updateAppCancellationTokenSource.Cancel();
}
}
public void Quit()
{
DebugLogger.Log("Quitting application.");
#if UNITY_EDITOR
if (Application.isEditor)
{
UnityEditor.EditorApplication.isPlaying = false;
}
else
#endif
{
Application.Quit();
}
}
private void CloseLockFile()
{
try
{
if (_lockFileStream != null)
{
_lockFileStream.Close();
DebugLogger.Log("Deleting the lock file.");
if (File.Exists(_data.Value.LockFilePath))
{
FileOperations.Delete(_data.Value.LockFilePath, CancellationToken.Empty);
}
}
}
catch(Exception e)
{
DebugLogger.LogWarning("Lock file closing error - " + e);
}
}
private void Awake()
{
UnityEngine.Assertions.Assert.raiseExceptions = true;
Assert.IsNull(_instance, "There must be only one instance of Patcher component.");
Assert.IsNotNull(ErrorDialog, "ErrorDialog must be set.");
_instance = this;
UnityDispatcher.Initialize();
Application.runInBackground = true;
DebugLogger.LogFormat("patchkit-patcher-unity: {0}", Version.Value);
DebugLogger.LogFormat("System version: {0}", EnvironmentInfo.GetSystemVersion());
DebugLogger.LogFormat("Runtime version: {0}", EnvironmentInfo.GetSystemVersion());
// In .NET API ProcessPriorityClass.Idle is really 'Low' process priority
Process.GetCurrentProcess().PriorityClass = System.Diagnostics.ProcessPriorityClass.Idle;
DebugLogger.LogFormat("Process priority has been set to Low");
CheckEditorAppSecretSecure();
if (_canStartThread)
{
StartThread();
}
gameObject.AddComponent<DebugMenu>();
}
/// <summary>
/// During patcher testing somebody may replace the secret with real game secret. If that would happen,
/// patcher should quit immediatelly with following error.
/// </summary>
private void CheckEditorAppSecretSecure()
{
if (!Application.isEditor)
{
if (!string.IsNullOrEmpty(EditorAppSecret) && EditorAppSecret.Trim() != EditorAllowedSecret)
{
DebugLogger.LogError("Security issue: EditorAppSecret is set to not allowed value. " +
"Please change it inside Unity editor to " + EditorAllowedSecret +
" and build the project again.");
Quit();
}
}
}
private void Update()
{
if (_thread == null || !_thread.IsAlive)
{
DebugLogger.Log("Quitting application because patcher thread is not alive.");
Quit();
}
}
private void OnApplicationQuit()
{
Application.CancelQuit();
StartCoroutine(ForceQuit());
}
private IEnumerator ForceQuit()
{
if (_isForceQuitting)
{
yield break;
}
_isForceQuitting = true;
try
{
_canStartThread = false;
CloseLockFile();
yield return StartCoroutine(KillThread());
if (_wasUpdateSuccessfulOrNotNecessary && !_hasGameBeenStarted)
{
yield return StartCoroutine(PatcherStatistics.SendEvent(PatcherStatistics.Event.PatcherSucceededClosed));
}
if (!Application.isEditor)
{
Process.GetCurrentProcess().Kill();
}
}
finally
{
_isForceQuitting = false;
}
}
private IEnumerator KillThread()
{
if (_thread == null)
{
yield break;
}
while (_thread.IsAlive)
{
CancelThread();
float startWaitTime = Time.unscaledTime;
while (Time.unscaledTime - startWaitTime < 1.0f && _thread.IsAlive)
{
yield return null;
}
if (!_thread.IsAlive)
{
break;
}
InterruptThread();
startWaitTime = Time.unscaledTime;
while (Time.unscaledTime - startWaitTime < 1.0f && _thread.IsAlive)
{
yield return null;
}
if (!_thread.IsAlive)
{
break;
}
AbortThread();
startWaitTime = Time.unscaledTime;
while (Time.unscaledTime - startWaitTime < 1.0f && _thread.IsAlive)
{
yield return null;
}
}
_thread = null;
}
private void StartThread()
{
DebugLogger.Log("Starting patcher thread...");
_thread = new Thread(() => ThreadExecution(_threadCancellationTokenSource.Token))
{
IsBackground = true
};
_thread.Start();
}
private void CancelThread()
{
DebugLogger.Log("Cancelling patcher thread...");
_threadCancellationTokenSource.Cancel();
}
private void InterruptThread()
{
DebugLogger.Log("Interrupting patcher thread...");
_thread.Interrupt();
}
private void AbortThread()
{
DebugLogger.Log("Aborting patcher thread...");
_thread.Abort();
}
private void ThreadExecution(CancellationToken cancellationToken)
{
try
{
_state.Value = PatcherState.None;
DebugLogger.Log("Patcher thread started.");
try
{
ThreadLoadPatcherData();
}
catch (NonLauncherExecutionException)
{
try
{
LauncherUtilities.ExecuteLauncher();
return;
}
catch (ApplicationException)
{
ThreadDisplayError(PatcherError.NonLauncherExecution(), cancellationToken);
return;
}
finally
{
Quit();
}
}
EnsureSingleInstance();
ThreadLoadPatcherConfiguration();
UnityDispatcher.Invoke(() => _app = new App(_data.Value.AppDataPath, _data.Value.AppSecret, _data.Value.OverrideLatestVersionId, _requestTimeoutCalculator)).WaitOne();
PatcherStatistics.TryDispatchSendEvent(PatcherStatistics.Event.PatcherStarted);
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
ThreadWaitForUserDecision(cancellationToken);
cancellationToken.ThrowIfCancellationRequested();
ThreadExecuteUserDecision(cancellationToken);
}
}
catch (OperationCanceledException)
{
DebugLogger.Log("Patcher thread finished: thread has been cancelled.");
}
catch (ThreadInterruptedException)
{
DebugLogger.Log("Patcher thread finished: thread has been interrupted.");
}
catch (ThreadAbortException)
{
DebugLogger.Log("Patcher thread finished: thread has been aborted.");
}
catch (MultipleInstancesException exception)
{
DebugLogger.LogException(exception);
Quit();
}
catch (Exception exception)
{
DebugLogger.LogError("Patcher thread failed: an exception has occured.");
DebugLogger.LogException(exception);
}
finally
{
_state.Value = PatcherState.None;
}
}
private void ThreadLoadPatcherData()
{
try
{
DebugLogger.Log("Loading patcher data...");
_state.Value = PatcherState.LoadingPatcherData;
#if UNITY_EDITOR
UnityDispatcher.Invoke(() =>
{
DebugLogger.Log("Using Unity Editor patcher data.");
_data.Value = new PatcherData
{
AppSecret = EditorAppSecret,
AppDataPath =
Application.dataPath.Replace("/Assets",
string.Format("/Temp/PatcherApp{0}", EditorAppSecret)),
OverrideLatestVersionId = EditorOverrideLatestVersionId,
IsOnline = null
};
}).WaitOne();
#else
DebugLogger.Log("Using command line patcher data reader.");
var inputArgumentsPatcherDataReader = new InputArgumentsPatcherDataReader();
_data.Value = inputArgumentsPatcherDataReader.Read();
#endif
AppSecret = _data.Value.AppSecret;
DebugLogger.LogVariable(_data.Value.AppSecret, "Data.AppSecret");
DebugLogger.LogVariable(_data.Value.AppDataPath, "Data.AppDataPath");
DebugLogger.LogVariable(_data.Value.OverrideLatestVersionId, "Data.OverrideLatestVersionId");
DebugLogger.LogVariable(_data.Value.LockFilePath, "Data.LockFilePath");
DebugLogger.LogVariable(_data.Value.IsOnline, "Data.IsOnline");
if (_data.Value.IsOnline.HasValue &&
!_data.Value.IsOnline.Value)
{
DebugLogger.Log("Disabling auto-updating because patcher is in offline mode.");
_hasAutomaticallyInstalledApp = true;
_hasAutomaticallyCheckedForAppUpdate = true;
_warning.Value = "Working in offline mode";
}
DebugLogger.Log("Patcher data loaded.");
}
catch (ThreadInterruptedException)
{
DebugLogger.Log("Loading patcher data interrupted: thread has been interrupted. Rethrowing exception.");
throw;
}
catch (ThreadAbortException)
{
DebugLogger.Log("Loading patcher data aborted: thread has been aborted. Rethrowing exception.");
throw;
}
catch (Exception)
{
DebugLogger.LogError("Error while loading patcher data: an exception has occured. Rethrowing exception.");
throw;
}
}
private void EnsureSingleInstance()
{
string lockFilePath = Data.Value.LockFilePath;
DebugLogger.LogFormat("Opening lock file: {0}", lockFilePath);
if (!string.IsNullOrEmpty(lockFilePath))
{
try
{
_lockFileStream = File.Open(lockFilePath, FileMode.Append);
DebugLogger.Log("Lock file open success");
}
catch (UnauthorizedAccessException exception)
{
DebugLogger.LogError("Patcher does not have permission to create the .lock file");
DebugLogger.LogException(exception);
}
catch
{
throw new MultipleInstancesException("Another instance of Patcher spotted");
}
}
else
{
DebugLogger.LogWarning("LockFile is missing");
}
}
private void ThreadLoadPatcherConfiguration()
{
try
{
DebugLogger.Log("Loading patcher configuration...");
_state.Value = PatcherState.LoadingPatcherConfiguration;
// TODO: Use PatcherConfigurationReader
_configuration = DefaultConfiguration;
DebugLogger.Log("Patcher configuration loaded.");
}
catch (ThreadInterruptedException)
{
DebugLogger.Log("Loading patcher configuration interrupted: thread has been interrupted. Rethrowing exception.");
throw;
}
catch (ThreadAbortException)
{
DebugLogger.Log("Loading patcher configuration aborted: thread has been aborted. Rethrowing exception.");
throw;
}
catch (Exception)
{
DebugLogger.LogError("Error while loading patcher configuration: an exception has occured. Rethrowing exception.");
throw;
}
}
private void ThreadWaitForUserDecision(CancellationToken cancellationToken)
{
try
{
DebugLogger.Log("Waiting for user decision...");
_state.Value = PatcherState.WaitingForUserDecision;
bool isInstalled = _app.IsFullyInstalled();
DebugLogger.LogVariable(isInstalled, "isInstalled");
bool canRepairApp = false; // not implemented
bool canInstallApp = !isInstalled;
bool canCheckForAppUpdates = isInstalled;
bool canStartApp = isInstalled;
_isAppInstalled.Value = isInstalled;
_canRepairApp.Value = false;
_canInstallApp.Value = false;
_canCheckForAppUpdates.Value = false;
_canStartApp.Value = false;
if (canInstallApp && _configuration.AutomaticallyInstallApp && !_hasAutomaticallyInstalledApp)
{
DebugLogger.Log("Automatically deciding to install app.");
_hasAutomaticallyInstalledApp = true;
_hasAutomaticallyCheckedForAppUpdate = true;
_userDecision = UserDecision.InstallAppAutomatically;
return;
}
if (canCheckForAppUpdates && _configuration.AutomaticallyCheckForAppUpdates &&
!_hasAutomaticallyCheckedForAppUpdate)
{
DebugLogger.Log("Automatically deciding to check for app updates.");
_hasAutomaticallyInstalledApp = true;
_hasAutomaticallyCheckedForAppUpdate = true;
_userDecision = UserDecision.CheckForAppUpdatesAutomatically;
return;
}
if (canStartApp && _configuration.AutomaticallyStartApp && !_hasAutomaticallyStartedApp)
{
DebugLogger.Log("Automatically deciding to start app.");
_hasAutomaticallyStartedApp = true;
_userDecision = UserDecision.StartAppAutomatically;
return;
}
_canRepairApp.Value = canRepairApp;
_canInstallApp.Value = canInstallApp;
_canCheckForAppUpdates.Value = canCheckForAppUpdates;
_canStartApp.Value = canStartApp;
_userDecisionSetEvent.Reset();
using (cancellationToken.Register(() => _userDecisionSetEvent.Set()))
{
cancellationToken.ThrowIfCancellationRequested();
_userDecisionSetEvent.WaitOne();
}
_canRepairApp.Value = false;
_canInstallApp.Value = false;
_canCheckForAppUpdates.Value = false;
_canStartApp.Value = false;
cancellationToken.ThrowIfCancellationRequested();
DebugLogger.Log(string.Format("Waiting for user decision result: {0}.", _userDecision));
}
catch (OperationCanceledException)
{
DebugLogger.Log("Waiting for user decision cancelled.");
}
catch (ThreadInterruptedException)
{
DebugLogger.Log("Waiting for user decision interrupted: thread has been interrupted. Rethrowing exception.");
throw;
}
catch (ThreadAbortException)
{
DebugLogger.Log("Waiting for user decision aborted: thread has been aborted. Rethrowing exception.");
throw;
}
catch (Exception)
{
DebugLogger.LogWarning("Error while waiting for user decision: an exception has occured. Rethrowing exception.");
throw;
}
}
private void ThreadExecuteUserDecision(CancellationToken cancellationToken)
{
bool displayWarningInsteadOfError = false;
try
{
_warning.Value = string.Empty;
DebugLogger.Log(string.Format("Executing user decision {0}...", _userDecision));
switch (_userDecision)
{
case UserDecision.None:
break;
case UserDecision.RepairApp:
break;
case UserDecision.StartAppAutomatically:
case UserDecision.StartApp:
ThreadStartApp();
break;
case UserDecision.InstallAppAutomatically:
displayWarningInsteadOfError = _app.IsFullyInstalled();
ThreadUpdateApp(true, cancellationToken);
break;
case UserDecision.InstallApp:
ThreadUpdateApp(false, cancellationToken);
break;
case UserDecision.CheckForAppUpdatesAutomatically:
displayWarningInsteadOfError = _app.IsFullyInstalled();
ThreadUpdateApp(true, cancellationToken);
break;
case UserDecision.CheckForAppUpdates:
ThreadUpdateApp(false, cancellationToken);
break;
case UserDecision.UninstallApp:
ThreadUninstallApp(cancellationToken);
break;
case UserDecision.VerifyFiles:
ThreadVerifyAllAppFiles(cancellationToken);
break;
}
DebugLogger.Log(string.Format("User decision {0} execution done.", _userDecision));
}
catch (OperationCanceledException)
{
DebugLogger.Log(string.Format("User decision {0} execution cancelled.", _userDecision));
}
catch (UnauthorizedAccessException e)
{
DebugLogger.Log(string.Format("User decision {0} execution issue: permissions failure.",
_userDecision));
DebugLogger.LogException(e);
if (ThreadTryRestartWithRequestForPermissions())
{
UnityDispatcher.Invoke(Quit);
}
else
{
ThreadDisplayError(PatcherError.NoPermissions(), cancellationToken);
}
}
catch (ApiConnectionException e)
{
DebugLogger.LogException(e);
if (displayWarningInsteadOfError)
{
_warning.Value = "Unable to check for updates. Please check your internet connection.";
}
else
{
ThreadDisplayError(PatcherError.NoInternetConnection(), cancellationToken);
}
}
catch (NotEnoughtDiskSpaceException e)
{
DebugLogger.LogException(e);
ThreadDisplayError(PatcherError.NotEnoughDiskSpace(e.RequiredSpace - e.AvailableSpace), cancellationToken);
}
catch (CannotRepairDiskFilesException e)
{
DebugLogger.LogException(e);
ThreadDisplayError(PatcherError.CannotRepairDiskFilesException(), cancellationToken);
}
catch (FilePathTooLongException e)
{
DebugLogger.LogException(e);
ThreadDisplayError(PatcherError.FilePathTooLong(), cancellationToken);
}
catch (ThreadInterruptedException)
{
DebugLogger.Log(string.Format(
"User decision {0} execution interrupted: thread has been interrupted. Rethrowing exception.",
_userDecision));
throw;
}
catch (ThreadAbortException)
{
DebugLogger.Log(string.Format(
"User decision {0} execution aborted: thread has been aborted. Rethrowing exception.",
_userDecision));
throw;
}
catch (Exception exception)
{
DebugLogger.LogWarning(string.Format(
"Error while executing user decision {0}: an exception has occured.", _userDecision));
DebugLogger.LogException(exception);
if (displayWarningInsteadOfError)
{
_warning.Value = "Unable to check for updates. Please check your internet connection.";
}
else
{
ThreadDisplayError(PatcherError.Other(), cancellationToken);
}
}
}
private void ThreadDisplayError(PatcherError error, CancellationToken cancellationToken)
{
PatcherStatistics.DispatchSendEvent(PatcherStatistics.Event.PatcherFailed);
try
{
_state.Value = PatcherState.DisplayingError;
DebugLogger.Log(string.Format("Displaying patcher error {0}...", error));
ErrorDialog.Display(error, cancellationToken);
DebugLogger.Log(string.Format("Patcher error {0} displayed.", error));
}
catch (OperationCanceledException)
{
DebugLogger.Log(string.Format("Displaying patcher error {0} cancelled.", _userDecision));
}
catch (ThreadInterruptedException)
{
DebugLogger.Log(string.Format("Displaying patcher error {0} interrupted: thread has been interrupted. Rethrowing exception.", error));
throw;
}
catch (ThreadAbortException)
{
DebugLogger.Log(string.Format("Displaying patcher error {0} aborted: thread has been aborted. Rethrowing exception.", error));
throw;
}
catch (Exception)
{
DebugLogger.LogWarning(string.Format("Error while displaying patcher error {0}: an exception has occured. Rethrowing exception.", error));
throw;
}
}
private void ThreadStartApp()
{
_state.Value = PatcherState.StartingApp;
var appStarter = new AppStarter(_app);
appStarter.Start(StartAppCustomArgs);
PatcherStatistics.DispatchSendEvent(PatcherStatistics.Event.PatcherSucceededGameStarted);
_hasGameBeenStarted = true;
UnityDispatcher.Invoke(Quit);
}
private void ThreadUpdateApp(bool automatically, CancellationToken cancellationToken)
{
_state.Value = PatcherState.Connecting;
_updateAppCancellationTokenSource = new PatchKit.Unity.Patcher.Cancellation.CancellationTokenSource();
using (cancellationToken.Register(() => _updateAppCancellationTokenSource.Cancel()))
{
_appInfo.Value = _app.RemoteMetaData.GetAppInfo(!automatically, _updateAppCancellationTokenSource.Token);
_remoteVersionId.Value = _app.GetLatestVersionId(!automatically, _updateAppCancellationTokenSource.Token);
if (_app.IsFullyInstalled())
{
_localVersionId.Value = _app.GetInstalledVersionId();
}
var appUpdater = new AppUpdater.AppUpdater( new AppUpdaterContext( _app, _configuration.AppUpdaterConfiguration ) );
try
{
_updaterStatus.Value = appUpdater.Status;
using (_updaterStatus.Take(1).Subscribe((status) => _state.Value = PatcherState.UpdatingApp))
{
appUpdater.Update(_updateAppCancellationTokenSource.Token);
_wasUpdateSuccessfulOrNotNecessary = true;
}
AppVersion latestAppVersion =
_app.RemoteMetaData.GetAppVersionInfo(_remoteVersionId.Value.Value, false, cancellationToken);
_app.LocalMetaData.SetMainExecutableAndArgs(latestAppVersion.MainExecutable, latestAppVersion.MainExecutableArgs);
}
catch (OperationCanceledException)
{
PatcherStatistics.DispatchSendEvent(PatcherStatistics.Event.PatcherCanceled);
throw;
}
finally
{
_state.Value = PatcherState.None;
_updaterStatus.Value = null;
_updateAppCancellationTokenSource = null;
}
}
}
private void ThreadVerifyAllAppFiles(CancellationToken cancellationToken)
{
// TODO: Introduce here a new state
_state.Value = PatcherState.UpdatingApp;
_updateAppCancellationTokenSource = new PatchKit.Unity.Patcher.Cancellation.CancellationTokenSource();
using (cancellationToken.Register(() => _updateAppCancellationTokenSource.Cancel()))
{
_isAppInstalled.Value = false;
var appUpdater = new AppUpdater.AppUpdater(new AppUpdaterContext(_app, _configuration.AppUpdaterConfiguration));
try
{
_updaterStatus.Value = appUpdater.Status;
using (_updaterStatus.Take(1).Subscribe((status) => _state.Value = PatcherState.UpdatingApp))
{
appUpdater.VerifyFiles(cancellationToken);
}
}
catch (OperationCanceledException)
{
PatcherStatistics.DispatchSendEvent(PatcherStatistics.Event.PatcherCanceled);
throw;
}
finally
{
_state.Value = PatcherState.None;
_updaterStatus.Value = null;
_updateAppCancellationTokenSource = null;
}
}
}
private void ThreadUninstallApp(CancellationToken cancellationToken)
{
// TODO: Introduce here a new state
_state.Value = PatcherState.UpdatingApp;
_updateAppCancellationTokenSource = new PatchKit.Unity.Patcher.Cancellation.CancellationTokenSource();
using (cancellationToken.Register(() => _updateAppCancellationTokenSource.Cancel()))
{
_isAppInstalled.Value = false;
var appUpdater = new AppUpdater.AppUpdater(new AppUpdaterContext(_app, _configuration.AppUpdaterConfiguration));
try
{
_updaterStatus.Value = appUpdater.Status;
using (_updaterStatus.Take(1).Subscribe((status) => _state.Value = PatcherState.UpdatingApp))
{
appUpdater.Uninstall(cancellationToken);
}
}
catch (OperationCanceledException)
{
PatcherStatistics.DispatchSendEvent(PatcherStatistics.Event.PatcherCanceled);
throw;
}
finally
{
_state.Value = PatcherState.None;
_updaterStatus.Value = null;
_updateAppCancellationTokenSource = null;
}
}
}
private bool ThreadTryRestartWithRequestForPermissions()
{
DebugLogger.Log("Restarting patcher with request for permissions.");
try
{
RuntimePlatform applicationPlatform = default(RuntimePlatform);
string applicationDataPath = string.Empty;
UnityDispatcher.Invoke(() =>
{
applicationPlatform = Application.platform;
applicationDataPath = Application.dataPath;
}).WaitOne();
if (applicationPlatform == RuntimePlatform.WindowsPlayer)
{
var info = new ProcessStartInfo
{
FileName = applicationDataPath.Replace("_Data", ".exe"),
Arguments =
string.Join(" ", Environment.GetCommandLineArgs().Select(s => "\"" + s + "\"").ToArray()),
UseShellExecute = true,
Verb = "runas"
};
Process.Start(info);
DebugLogger.Log("Patcher restarted with request for permissions.");
return true;
}
DebugLogger.Log(string.Format("Restarting patcher with request for permissions not possible: unsupported platform {0}.", applicationPlatform));
return false;
}
catch (ThreadInterruptedException)
{
DebugLogger.Log("Restarting patcher with request for permissions interrupted: thread has been interrupted. Rethrowing exception.");
throw;
}
catch (ThreadAbortException)
{
DebugLogger.Log("Restarting patcher with request for permissions aborted: thread has been aborted. Rethrowing exception.");
throw;
}
catch (Exception exception)
{
DebugLogger.LogWarning("Error while restarting patcher with request for permissions: an exception has occured.");
DebugLogger.LogException(exception);
return false;
}
}
private string BuildTraceableAppSecret()
{
return string.Format("{0}...{1}", AppSecret.Substring(0, 6),
AppSecret.Substring(AppSecret.Length - 6));
}
}
}
| |
// MIT License
//
// Copyright (c) 2009-2017 Luca Piccioni
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//
// This file is automatically generated
#pragma warning disable 649, 1572, 1573
// ReSharper disable RedundantUsingDirective
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Security;
using System.Text;
using Khronos;
// ReSharper disable CheckNamespace
// ReSharper disable InconsistentNaming
// ReSharper disable JoinDeclarationAndInitializer
namespace OpenGL
{
public partial class Glx
{
/// <summary>
/// [GLX] Value of GLX_EXTENSION_NAME symbol.
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const string EXTENSION_NAME = "GLX";
/// <summary>
/// [GLX] Value of GLX_PbufferClobber symbol.
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int PbufferClobber = 0;
/// <summary>
/// [GLX] Value of GLX_BufferSwapComplete symbol.
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int BufferSwapComplete = 1;
/// <summary>
/// [GLX] Value of __GLX_NUMBER_EVENTS symbol.
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int __GLX_NUMBER_EVENTS = 17;
/// <summary>
/// [GLX] Value of GLX_BAD_SCREEN symbol.
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int BAD_SCREEN = 1;
/// <summary>
/// [GLX] Value of GLX_BAD_ATTRIBUTE symbol.
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int BAD_ATTRIBUTE = 2;
/// <summary>
/// [GLX] Value of GLX_NO_EXTENSION symbol.
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int NO_EXTENSION = 3;
/// <summary>
/// [GLX] Value of GLX_BAD_VISUAL symbol.
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int BAD_VISUAL = 4;
/// <summary>
/// [GLX] Value of GLX_BAD_CONTEXT symbol.
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int BAD_CONTEXT = 5;
/// <summary>
/// [GLX] Value of GLX_BAD_VALUE symbol.
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int BAD_VALUE = 6;
/// <summary>
/// [GLX] Value of GLX_BAD_ENUM symbol.
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int BAD_ENUM = 7;
/// <summary>
/// <para>
/// [GL2.1] Glx.ChooseVisualCore: Ignored. Only visuals that can be rendered with GLX are considered.
/// </para>
/// <para>
/// [GL2.1] Glx.GetConfig: Glx. if OpenGL rendering is supported by this visual, Glx.e otherwise.
/// </para>
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int USE_GL = 1;
/// <summary>
/// <para>
/// [GL2.1] Glx.ChooseFBConfig: Must be followed by a nonnegative integer that indicates the desired color index buffer
/// size. The smallest index buffer of at least the specified size is preferred. This attribute is ignored if
/// Glx.COLOR_INDEX_BIT is not set in Glx.RENDER_TYPE. The default value is 0.
/// </para>
/// <para>
/// [GL2.1] Glx.ChooseVisualCore: Must be followed by a nonnegative integer that indicates the desired color index buffer
/// size. The smallest index buffer of at least the specified size is preferred. Ignored if Glx.RGBA is asserted.
/// </para>
/// <para>
/// [GL2.1] Glx.GetConfig: Number of bits per color buffer. For RGBA visuals, Glx.BUFFER_SIZE is the sum of Glx.RED_SIZE,
/// Glx.GREEN_SIZE, Glx.BLUE_SIZE, and Glx.ALPHA_SIZE. For color index visuals, Glx.BUFFER_SIZE is the size of the color
/// indexes.
/// </para>
/// <para>
/// [GL2.1] Glx.GetFBConfigAttrib: Number of bits per color buffer. If the frame buffer configuration supports RGBA
/// contexts, then Glx.BUFFER_SIZE is the sum of Glx.RED_SIZE, Glx.GREEN_SIZE, Glx.BLUE_SIZE, and Glx.ALPHA_SIZE. If the
/// frame buffer configuration supports only color index contexts, Glx.BUFFER_SIZE is the size of the color indexes.
/// </para>
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int BUFFER_SIZE = 2;
/// <summary>
/// <para>
/// [GL2.1] Glx.ChooseFBConfig: Must be followed by an integer buffer-level specification. This specification is honored
/// exactly. Buffer level 0 corresponds to the default frame buffer of the display. Buffer level 1 is the first overlay
/// frame buffer, level two the second overlay frame buffer, and so on. Negative buffer levels correspond to underlay frame
/// buffers. The default value is 0.
/// </para>
/// <para>
/// [GL2.1] Glx.ChooseVisualCore: Must be followed by an integer buffer-level specification. This specification is honored
/// exactly. Buffer level zero corresponds to the main frame buffer of the display. Buffer level one is the first overlay
/// frame buffer, level two the second overlay frame buffer, and so on. Negative buffer levels correspond to underlay frame
/// buffers.
/// </para>
/// <para>
/// [GL2.1] Glx.GetConfig: Frame buffer level of the visual. Level zero is the default frame buffer. Positive levels
/// correspond to frame buffers that overlay the default buffer, and negative levels correspond to frame buffers that
/// underlay the default buffer.
/// </para>
/// <para>
/// [GL2.1] Glx.GetFBConfigAttrib: Frame buffer level of the configuration. Level zero is the default frame buffer. Positive
/// levels correspond to frame buffers that overlay the default buffer, and negative levels correspond to frame buffers that
/// underlie the default buffer.
/// </para>
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int LEVEL = 3;
/// <summary>
/// <para>
/// [GL2.1] Glx.ChooseVisualCore: If present, only TrueColor and DirectColor visuals are considered. Otherwise, only
/// PseudoColor and StaticColor visuals are considered.
/// </para>
/// <para>
/// [GL2.1] Glx.GetConfig: Glx. if color buffers store red, green, blue, and alpha values. Glx.e if they store color
/// indexes.
/// </para>
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int RGBA = 4;
/// <summary>
/// <para>
/// [GL2.1] Glx.ChooseFBConfig: Must be followed by Glx. or Glx.e. If Glx. is specified, then only double-buffered frame
/// buffer configurations are considered; if Glx.e is specified, then only single-buffered frame buffer configurations are
/// considered. The default value is Glx.DONT_CARE.
/// </para>
/// <para>
/// [GL2.1] Glx.ChooseVisualCore: If present, only double-buffered visuals are considered. Otherwise, only single-buffered
/// visuals are considered.
/// </para>
/// <para>
/// [GL2.1] Glx.GetConfig: Glx. if color buffers exist in front/back pairs that can be swapped, Glx.e otherwise.
/// </para>
/// <para>
/// [GL2.1] Glx.GetFBConfigAttrib: Glx. if color buffers exist in front/back pairs that can be swapped, Glx.e otherwise.
/// </para>
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int DOUBLEBUFFER = 5;
/// <summary>
/// <para>
/// [GL2.1] Glx.ChooseFBConfig: Must be followed by Glx. or Glx.e. If Glx. is specified, then only stereo frame buffer
/// configurations are considered; if Glx.e is specified, then only monoscopic frame buffer configurations are considered.
/// The default value is Glx.e.
/// </para>
/// <para>
/// [GL2.1] Glx.ChooseVisualCore: If present, only stereo visuals are considered. Otherwise, only monoscopic visuals are
/// considered.
/// </para>
/// <para>
/// [GL2.1] Glx.GetConfig: Glx. if color buffers exist in left/right pairs, Glx.e otherwise.
/// </para>
/// <para>
/// [GL2.1] Glx.GetFBConfigAttrib: Glx. if color buffers exist in left/right pairs, Glx.e otherwise.
/// </para>
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int STEREO = 6;
/// <summary>
/// <para>
/// [GL2.1] Glx.ChooseFBConfig: Must be followed by a nonnegative integer that indicates the desired number of auxiliary
/// buffers. Configurations with the smallest number of auxiliary buffers that meet or exceed the specified number are
/// preferred. The default value is 0.
/// </para>
/// <para>
/// [GL2.1] Glx.ChooseVisualCore: Must be followed by a nonnegative integer that indicates the desired number of auxiliary
/// buffers. Visuals with the smallest number of auxiliary buffers that meets or exceeds the specified number are preferred.
/// </para>
/// <para>
/// [GL2.1] Glx.GetConfig: Number of auxiliary color buffers that are available. Zero indicates that no auxiliary color
/// buffers exist.
/// </para>
/// <para>
/// [GL2.1] Glx.GetFBConfigAttrib: Number of auxiliary color buffers that are available. Zero indicates that no auxiliary
/// color buffers exist.
/// </para>
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int AUX_BUFFERS = 7;
/// <summary>
/// <para>
/// [GL2.1] Glx.ChooseFBConfig: Each attribute, if present, must be followed by a nonnegative minimum size specification or
/// Glx.DONT_CARE. The largest available total RGBA color buffer size (sum of Glx.RED_SIZE, Glx.GREEN_SIZE, Glx.BLUE_SIZE,
/// and Glx.ALPHA_SIZE) of at least the minimum size specified for each color component is preferred. If the requested
/// number of bits for a color component is 0 or Glx.DONT_CARE, it is not considered. The default value for each color
/// component is 0.
/// </para>
/// <para>
/// [GL2.1] Glx.ChooseVisualCore: Must be followed by a nonnegative minimum size specification. If this value is zero, the
/// smallest available red buffer is preferred. Otherwise, the largest available red buffer of at least the minimum size is
/// preferred.
/// </para>
/// <para>
/// [GL2.1] Glx.GetConfig: Number of bits of red stored in each color buffer. Undefined if Glx.RGBA is Glx.e.
/// </para>
/// <para>
/// [GL2.1] Glx.GetFBConfigAttrib: Number of bits of red stored in each color buffer. Undefined if RGBA contexts are not
/// supported by the frame buffer configuration.
/// </para>
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int RED_SIZE = 8;
/// <summary>
/// <para>
/// [GL2.1] Glx.ChooseVisualCore: Must be followed by a nonnegative minimum size specification. If this value is zero, the
/// smallest available green buffer is preferred. Otherwise, the largest available green buffer of at least the minimum size
/// is preferred.
/// </para>
/// <para>
/// [GL2.1] Glx.GetConfig: Number of bits of green stored in each color buffer. Undefined if Glx.RGBA is Glx.e.
/// </para>
/// <para>
/// [GL2.1] Glx.GetFBConfigAttrib: Number of bits of green stored in each color buffer. Undefined if RGBA contexts are not
/// supported by the frame buffer configuration.
/// </para>
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int GREEN_SIZE = 9;
/// <summary>
/// <para>
/// [GL2.1] Glx.ChooseVisualCore: Must be followed by a nonnegative minimum size specification. If this value is zero, the
/// smallest available blue buffer is preferred. Otherwise, the largest available blue buffer of at least the minimum size
/// is preferred.
/// </para>
/// <para>
/// [GL2.1] Glx.GetConfig: Number of bits of blue stored in each color buffer. Undefined if Glx.RGBA is Glx.e.
/// </para>
/// <para>
/// [GL2.1] Glx.GetFBConfigAttrib: Number of bits of blue stored in each color buffer. Undefined if RGBA contexts are not
/// supported by the frame buffer configuration.
/// </para>
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int BLUE_SIZE = 10;
/// <summary>
/// <para>
/// [GL2.1] Glx.ChooseVisualCore: Must be followed by a nonnegative minimum size specification. If this value is zero, the
/// smallest available alpha buffer is preferred. Otherwise, the largest available alpha buffer of at least the minimum size
/// is preferred.
/// </para>
/// <para>
/// [GL2.1] Glx.GetConfig: Number of bits of alpha stored in each color buffer. Undefined if Glx.RGBA is Glx.e.
/// </para>
/// <para>
/// [GL2.1] Glx.GetFBConfigAttrib: Number of bits of alpha stored in each color buffer. Undefined if RGBA contexts are not
/// supported by the frame buffer configuration.
/// </para>
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int ALPHA_SIZE = 11;
/// <summary>
/// <para>
/// [GL2.1] Glx.ChooseFBConfig: Must be followed by a nonnegative minimum size specification. If this value is zero, frame
/// buffer configurations with no depth buffer are preferred. Otherwise, the largest available depth buffer of at least the
/// minimum size is preferred. The default value is 0.
/// </para>
/// <para>
/// [GL2.1] Glx.ChooseVisualCore: Must be followed by a nonnegative minimum size specification. If this value is zero,
/// visuals with no depth buffer are preferred. Otherwise, the largest available depth buffer of at least the minimum size
/// is preferred.
/// </para>
/// <para>
/// [GL2.1] Glx.GetConfig: Number of bits in the depth buffer.
/// </para>
/// <para>
/// [GL2.1] Glx.GetFBConfigAttrib: Number of bits in the depth buffer.
/// </para>
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int DEPTH_SIZE = 12;
/// <summary>
/// <para>
/// [GL2.1] Glx.ChooseFBConfig: Must be followed by a nonnegative integer that indicates the desired number of stencil
/// bitplanes. The smallest stencil buffer of at least the specified size is preferred. If the desired value is zero, frame
/// buffer configurations with no stencil buffer are preferred. The default value is 0.
/// </para>
/// <para>
/// [GL2.1] Glx.ChooseVisualCore: Must be followed by a nonnegative integer that indicates the desired number of stencil
/// bitplanes. The smallest stencil buffer of at least the specified size is preferred. If the desired value is zero,
/// visuals with no stencil buffer are preferred.
/// </para>
/// <para>
/// [GL2.1] Glx.GetConfig: Number of bits in the stencil buffer.
/// </para>
/// <para>
/// [GL2.1] Glx.GetFBConfigAttrib: Number of bits in the stencil buffer.
/// </para>
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int STENCIL_SIZE = 13;
/// <summary>
/// <para>
/// [GL2.1] Glx.ChooseFBConfig: Must be followed by a nonnegative minimum size specification. If this value is zero, frame
/// buffer configurations with no red accumulation buffer are preferred. Otherwise, the largest possible red accumulation
/// buffer of at least the minimum size is preferred. The default value is 0.
/// </para>
/// <para>
/// [GL2.1] Glx.ChooseVisualCore: Must be followed by a nonnegative minimum size specification. If this value is zero,
/// visuals with no red accumulation buffer are preferred. Otherwise, the largest possible red accumulation buffer of at
/// least the minimum size is preferred.
/// </para>
/// <para>
/// [GL2.1] Glx.GetConfig: Number of bits of red stored in the accumulation buffer.
/// </para>
/// <para>
/// [GL2.1] Glx.GetFBConfigAttrib: Number of bits of red stored in the accumulation buffer.
/// </para>
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int ACCUM_RED_SIZE = 14;
/// <summary>
/// <para>
/// [GL2.1] Glx.ChooseFBConfig: Must be followed by a nonnegative minimum size specification. If this value is zero, frame
/// buffer configurations with no green accumulation buffer are preferred. Otherwise, the largest possible green
/// accumulation buffer of at least the minimum size is preferred. The default value is 0.
/// </para>
/// <para>
/// [GL2.1] Glx.ChooseVisualCore: Must be followed by a nonnegative minimum size specification. If this value is zero,
/// visuals with no green accumulation buffer are preferred. Otherwise, the largest possible green accumulation buffer of at
/// least the minimum size is preferred.
/// </para>
/// <para>
/// [GL2.1] Glx.GetConfig: Number of bits of green stored in the accumulation buffer.
/// </para>
/// <para>
/// [GL2.1] Glx.GetFBConfigAttrib: Number of bits of green stored in the accumulation buffer.
/// </para>
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int ACCUM_GREEN_SIZE = 15;
/// <summary>
/// <para>
/// [GL2.1] Glx.ChooseFBConfig: Must be followed by a nonnegative minimum size specification. If this value is zero, frame
/// buffer configurations with no blue accumulation buffer are preferred. Otherwise, the largest possible blue accumulation
/// buffer of at least the minimum size is preferred. The default value is 0.
/// </para>
/// <para>
/// [GL2.1] Glx.ChooseVisualCore: Must be followed by a nonnegative minimum size specification. If this value is zero,
/// visuals with no blue accumulation buffer are preferred. Otherwise, the largest possible blue accumulation buffer of at
/// least the minimum size is preferred.
/// </para>
/// <para>
/// [GL2.1] Glx.GetConfig: Number of bits of blue stored in the accumulation buffer.
/// </para>
/// <para>
/// [GL2.1] Glx.GetFBConfigAttrib: Number of bits of blue stored in the accumulation buffer.
/// </para>
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int ACCUM_BLUE_SIZE = 16;
/// <summary>
/// <para>
/// [GL2.1] Glx.ChooseFBConfig: Must be followed by a nonnegative minimum size specification. If this value is zero, frame
/// buffer configurations with no alpha accumulation buffer are preferred. Otherwise, the largest possible alpha
/// accumulation buffer of at least the minimum size is preferred. The default value is 0.
/// </para>
/// <para>
/// [GL2.1] Glx.ChooseVisualCore: Must be followed by a nonnegative minimum size specification. If this value is zero,
/// visuals with no alpha accumulation buffer are preferred. Otherwise, the largest possible alpha accumulation buffer of at
/// least the minimum size is preferred.
/// </para>
/// <para>
/// [GL2.1] Glx.GetConfig: Number of bits of alpha stored in the accumulation buffer.
/// </para>
/// <para>
/// [GL2.1] Glx.GetFBConfigAttrib: Number of bits of alpha stored in the accumulation buffer.
/// </para>
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public const int ACCUM_ALPHA_SIZE = 17;
/// <summary>
/// [GL2.1] glXChooseVisual: return a visual that matches specified attributes
/// </summary>
/// <param name="dpy">
/// Specifies the connection to the X server.
/// </param>
/// <param name="screen">
/// Specifies the screen number.
/// </param>
/// <param name="attribList">
/// Specifies a list of boolean attributes and integer attribute/value pairs. The last attribute must be Glx..
/// </param>
[RequiredByFeature("GLX_VERSION_1_0")]
private static IntPtr ChooseVisualCore(IntPtr dpy, int screen, int[] attribList)
{
IntPtr retValue;
unsafe {
fixed (int* p_attribList = attribList)
{
Debug.Assert(Delegates.pglXChooseVisual != null, "pglXChooseVisual not implemented");
retValue = Delegates.pglXChooseVisual(dpy, screen, p_attribList);
LogCommand("glXChooseVisual", retValue, dpy, screen, attribList );
}
}
DebugCheckErrors(retValue);
return (retValue);
}
/// <summary>
/// [GL2.1] glXCreateContext: create a new GLX rendering context
/// </summary>
/// <param name="dpy">
/// Specifies the connection to the X server.
/// </param>
/// <param name="vis">
/// Specifies the visual that defines the frame buffer resources available to the rendering context. It is a pointer to an
/// Glx.ualInfo structure, not a visual ID or a pointer to a Glx.al.
/// </param>
/// <param name="shareList">
/// Specifies the context with which to share display lists. Glx. indicates that no sharing is to take place.
/// </param>
/// <param name="direct">
/// Specifies whether rendering is to be done with a direct connection to the graphics system if possible (Glx.) or through
/// the X server (Glx.e).
/// </param>
[RequiredByFeature("GLX_VERSION_1_0")]
private static IntPtr CreateContext(IntPtr dpy, IntPtr vis, IntPtr shareList, bool direct)
{
IntPtr retValue;
Debug.Assert(Delegates.pglXCreateContext != null, "pglXCreateContext not implemented");
retValue = Delegates.pglXCreateContext(dpy, vis, shareList, direct);
LogCommand("glXCreateContext", retValue, dpy, vis, shareList, direct );
DebugCheckErrors(retValue);
return (retValue);
}
/// <summary>
/// [GL2.1] glXDestroyContext: destroy a GLX context
/// </summary>
/// <param name="dpy">
/// Specifies the connection to the X server.
/// </param>
/// <param name="ctx">
/// Specifies the GLX context to be destroyed.
/// </param>
[RequiredByFeature("GLX_VERSION_1_0")]
public static void DestroyContext(IntPtr dpy, IntPtr ctx)
{
Debug.Assert(Delegates.pglXDestroyContext != null, "pglXDestroyContext not implemented");
Delegates.pglXDestroyContext(dpy, ctx);
LogCommand("glXDestroyContext", null, dpy, ctx );
DebugCheckErrors(null);
}
/// <summary>
/// [GL2.1] glXMakeCurrent: attach a GLX context to a window or a GLX pixmap
/// </summary>
/// <param name="dpy">
/// Specifies the connection to the X server.
/// </param>
/// <param name="drawable">
/// Specifies a GLX drawable. Must be either an X window ID or a GLX pixmap ID.
/// </param>
/// <param name="ctx">
/// Specifies a GLX rendering context that is to be attached to <paramref name="drawable"/>.
/// </param>
[RequiredByFeature("GLX_VERSION_1_0")]
public static bool MakeCurrent(IntPtr dpy, IntPtr drawable, IntPtr ctx)
{
bool retValue;
Debug.Assert(Delegates.pglXMakeCurrent != null, "pglXMakeCurrent not implemented");
retValue = Delegates.pglXMakeCurrent(dpy, drawable, ctx);
LogCommand("glXMakeCurrent", retValue, dpy, drawable, ctx );
DebugCheckErrors(retValue);
return (retValue);
}
/// <summary>
/// [GL2.1] glXCopyContext: copy state from one rendering context to another
/// </summary>
/// <param name="dpy">
/// Specifies the connection to the X server.
/// </param>
/// <param name="src">
/// Specifies the source context.
/// </param>
/// <param name="dst">
/// Specifies the destination context.
/// </param>
/// <param name="mask">
/// Specifies which portions of <paramref name="src"/> state are to be copied to <paramref name="dst"/>.
/// </param>
[RequiredByFeature("GLX_VERSION_1_0")]
public static void CopyContext(IntPtr dpy, IntPtr src, IntPtr dst, uint mask)
{
Debug.Assert(Delegates.pglXCopyContext != null, "pglXCopyContext not implemented");
Delegates.pglXCopyContext(dpy, src, dst, mask);
LogCommand("glXCopyContext", null, dpy, src, dst, mask );
DebugCheckErrors(null);
}
/// <summary>
/// [GL2.1] glXSwapBuffers: exchange front and back buffers
/// </summary>
/// <param name="dpy">
/// Specifies the connection to the X server.
/// </param>
/// <param name="drawable">
/// Specifies the drawable whose buffers are to be swapped.
/// </param>
[RequiredByFeature("GLX_VERSION_1_0")]
public static void SwapBuffers(IntPtr dpy, IntPtr drawable)
{
Debug.Assert(Delegates.pglXSwapBuffers != null, "pglXSwapBuffers not implemented");
Delegates.pglXSwapBuffers(dpy, drawable);
LogCommand("glXSwapBuffers", null, dpy, drawable );
DebugCheckErrors(null);
}
/// <summary>
/// [GL2.1] glXCreateGLXPixmap: create an off-screen GLX rendering area
/// </summary>
/// <param name="dpy">
/// Specifies the connection to the X server.
/// </param>
/// <param name="visual">
/// A <see cref="T:IntPtr"/>.
/// </param>
/// <param name="pixmap">
/// Specifies the X pixmap that will be used as the front left color buffer of the off-screen rendering area.
/// </param>
[RequiredByFeature("GLX_VERSION_1_0")]
private static IntPtr CreateGLXPixmap(IntPtr dpy, IntPtr visual, IntPtr pixmap)
{
IntPtr retValue;
Debug.Assert(Delegates.pglXCreateGLXPixmap != null, "pglXCreateGLXPixmap not implemented");
retValue = Delegates.pglXCreateGLXPixmap(dpy, visual, pixmap);
LogCommand("glXCreateGLXPixmap", retValue, dpy, visual, pixmap );
DebugCheckErrors(retValue);
return (retValue);
}
/// <summary>
/// [GL2.1] glXDestroyGLXPixmap: destroy a GLX pixmap
/// </summary>
/// <param name="dpy">
/// Specifies the connection to the X server.
/// </param>
/// <param name="pixmap">
/// A <see cref="T:IntPtr"/>.
/// </param>
[RequiredByFeature("GLX_VERSION_1_0")]
public static void DestroyGLXPixmap(IntPtr dpy, IntPtr pixmap)
{
Debug.Assert(Delegates.pglXDestroyGLXPixmap != null, "pglXDestroyGLXPixmap not implemented");
Delegates.pglXDestroyGLXPixmap(dpy, pixmap);
LogCommand("glXDestroyGLXPixmap", null, dpy, pixmap );
DebugCheckErrors(null);
}
/// <summary>
/// [GL2.1] glXQueryExtension: indicate whether the GLX extension is supported
/// </summary>
/// <param name="dpy">
/// Specifies the connection to the X server.
/// </param>
/// <param name="errorb">
/// A <see cref="T:int[]"/>.
/// </param>
/// <param name="event">
/// A <see cref="T:int[]"/>.
/// </param>
[RequiredByFeature("GLX_VERSION_1_0")]
public static bool Query(IntPtr dpy, int[] errorb, int[] @event)
{
bool retValue;
unsafe {
fixed (int* p_errorb = errorb)
fixed (int* p_event = @event)
{
Debug.Assert(Delegates.pglXQueryExtension != null, "pglXQueryExtension not implemented");
retValue = Delegates.pglXQueryExtension(dpy, p_errorb, p_event);
LogCommand("glXQueryExtension", retValue, dpy, errorb, @event );
}
}
DebugCheckErrors(retValue);
return (retValue);
}
/// <summary>
/// [GL2.1] glXQueryVersion: return the version numbers of the GLX extension
/// </summary>
/// <param name="dpy">
/// Specifies the connection to the X server.
/// </param>
/// <param name="maj">
/// A <see cref="T:int[]"/>.
/// </param>
/// <param name="min">
/// A <see cref="T:int[]"/>.
/// </param>
[RequiredByFeature("GLX_VERSION_1_0")]
public static bool QueryVersion(IntPtr dpy, int[] maj, int[] min)
{
bool retValue;
unsafe {
fixed (int* p_maj = maj)
fixed (int* p_min = min)
{
Debug.Assert(Delegates.pglXQueryVersion != null, "pglXQueryVersion not implemented");
retValue = Delegates.pglXQueryVersion(dpy, p_maj, p_min);
LogCommand("glXQueryVersion", retValue, dpy, maj, min );
}
}
DebugCheckErrors(retValue);
return (retValue);
}
/// <summary>
/// [GL2.1] glXIsDirect: indicate whether direct rendering is enabled
/// </summary>
/// <param name="dpy">
/// Specifies the connection to the X server.
/// </param>
/// <param name="ctx">
/// Specifies the GLX context that is being queried.
/// </param>
[RequiredByFeature("GLX_VERSION_1_0")]
public static bool IsDirect(IntPtr dpy, IntPtr ctx)
{
bool retValue;
Debug.Assert(Delegates.pglXIsDirect != null, "pglXIsDirect not implemented");
retValue = Delegates.pglXIsDirect(dpy, ctx);
LogCommand("glXIsDirect", retValue, dpy, ctx );
DebugCheckErrors(retValue);
return (retValue);
}
/// <summary>
/// [GL2.1] glXGetConfig: return information about GLX visuals
/// </summary>
/// <param name="dpy">
/// Specifies the connection to the X server.
/// </param>
/// <param name="visual">
/// A <see cref="T:IntPtr"/>.
/// </param>
/// <param name="attrib">
/// Specifies the visual attribute to be returned.
/// </param>
/// <param name="value">
/// Returns the requested value.
/// </param>
[RequiredByFeature("GLX_VERSION_1_0")]
private static int GetConfig(IntPtr dpy, IntPtr visual, int attrib, [Out] int[] value)
{
int retValue;
unsafe {
fixed (int* p_value = value)
{
Debug.Assert(Delegates.pglXGetConfig != null, "pglXGetConfig not implemented");
retValue = Delegates.pglXGetConfig(dpy, visual, attrib, p_value);
LogCommand("glXGetConfig", retValue, dpy, visual, attrib, value );
}
}
DebugCheckErrors(retValue);
return (retValue);
}
/// <summary>
/// [GL2.1] glXGetCurrentContext: return the current context
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public static IntPtr GetCurrentContext()
{
IntPtr retValue;
Debug.Assert(Delegates.pglXGetCurrentContext != null, "pglXGetCurrentContext not implemented");
retValue = Delegates.pglXGetCurrentContext();
LogCommand("glXGetCurrentContext", retValue );
DebugCheckErrors(retValue);
return (retValue);
}
/// <summary>
/// [GL2.1] glXGetCurrentDrawable: return the current drawable
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public static IntPtr GetCurrentDrawable()
{
IntPtr retValue;
Debug.Assert(Delegates.pglXGetCurrentDrawable != null, "pglXGetCurrentDrawable not implemented");
retValue = Delegates.pglXGetCurrentDrawable();
LogCommand("glXGetCurrentDrawable", retValue );
DebugCheckErrors(retValue);
return (retValue);
}
/// <summary>
/// [GL2.1] glXWaitGL: complete GL execution prior to subsequent X calls
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public static void WaitGL()
{
Debug.Assert(Delegates.pglXWaitGL != null, "pglXWaitGL not implemented");
Delegates.pglXWaitGL();
LogCommand("glXWaitGL", null );
DebugCheckErrors(null);
}
/// <summary>
/// [GL2.1] glXWaitX: complete X execution prior to subsequent GL calls
/// </summary>
[RequiredByFeature("GLX_VERSION_1_0")]
public static void WaitX()
{
Debug.Assert(Delegates.pglXWaitX != null, "pglXWaitX not implemented");
Delegates.pglXWaitX();
LogCommand("glXWaitX", null );
DebugCheckErrors(null);
}
/// <summary>
/// [GL2.1] glXUseXFont: create bitmap display lists from an X font
/// </summary>
/// <param name="font">
/// Specifies the font from which character glyphs are to be taken.
/// </param>
/// <param name="first">
/// Specifies the index of the first glyph to be taken.
/// </param>
/// <param name="count">
/// Specifies the number of glyphs to be taken.
/// </param>
/// <param name="list">
/// A <see cref="T:int"/>.
/// </param>
[RequiredByFeature("GLX_VERSION_1_0")]
public static void UseXFont(int font, int first, int count, int list)
{
Debug.Assert(Delegates.pglXUseXFont != null, "pglXUseXFont not implemented");
Delegates.pglXUseXFont(font, first, count, list);
LogCommand("glXUseXFont", null, font, first, count, list );
DebugCheckErrors(null);
}
internal static unsafe partial class Delegates
{
[RequiredByFeature("GLX_VERSION_1_0")]
[SuppressUnmanagedCodeSecurity]
internal delegate IntPtr glXChooseVisual(IntPtr dpy, int screen, int* attribList);
[RequiredByFeature("GLX_VERSION_1_0")]
internal static glXChooseVisual pglXChooseVisual;
[RequiredByFeature("GLX_VERSION_1_0")]
[SuppressUnmanagedCodeSecurity]
internal delegate IntPtr glXCreateContext(IntPtr dpy, IntPtr vis, IntPtr shareList, bool direct);
[RequiredByFeature("GLX_VERSION_1_0")]
internal static glXCreateContext pglXCreateContext;
[RequiredByFeature("GLX_VERSION_1_0")]
[SuppressUnmanagedCodeSecurity]
internal delegate void glXDestroyContext(IntPtr dpy, IntPtr ctx);
[RequiredByFeature("GLX_VERSION_1_0")]
internal static glXDestroyContext pglXDestroyContext;
[RequiredByFeature("GLX_VERSION_1_0")]
[SuppressUnmanagedCodeSecurity]
internal delegate bool glXMakeCurrent(IntPtr dpy, IntPtr drawable, IntPtr ctx);
[RequiredByFeature("GLX_VERSION_1_0")]
internal static glXMakeCurrent pglXMakeCurrent;
[RequiredByFeature("GLX_VERSION_1_0")]
[SuppressUnmanagedCodeSecurity]
internal delegate void glXCopyContext(IntPtr dpy, IntPtr src, IntPtr dst, uint mask);
[RequiredByFeature("GLX_VERSION_1_0")]
internal static glXCopyContext pglXCopyContext;
[RequiredByFeature("GLX_VERSION_1_0")]
[SuppressUnmanagedCodeSecurity]
internal delegate void glXSwapBuffers(IntPtr dpy, IntPtr drawable);
[RequiredByFeature("GLX_VERSION_1_0")]
internal static glXSwapBuffers pglXSwapBuffers;
[RequiredByFeature("GLX_VERSION_1_0")]
[SuppressUnmanagedCodeSecurity]
internal delegate IntPtr glXCreateGLXPixmap(IntPtr dpy, IntPtr visual, IntPtr pixmap);
[RequiredByFeature("GLX_VERSION_1_0")]
internal static glXCreateGLXPixmap pglXCreateGLXPixmap;
[RequiredByFeature("GLX_VERSION_1_0")]
[SuppressUnmanagedCodeSecurity]
internal delegate void glXDestroyGLXPixmap(IntPtr dpy, IntPtr pixmap);
[RequiredByFeature("GLX_VERSION_1_0")]
internal static glXDestroyGLXPixmap pglXDestroyGLXPixmap;
[RequiredByFeature("GLX_VERSION_1_0")]
[SuppressUnmanagedCodeSecurity]
internal delegate bool glXQueryExtension(IntPtr dpy, int* errorb, int* @event);
[RequiredByFeature("GLX_VERSION_1_0")]
internal static glXQueryExtension pglXQueryExtension;
[RequiredByFeature("GLX_VERSION_1_0")]
[SuppressUnmanagedCodeSecurity]
internal delegate bool glXQueryVersion(IntPtr dpy, int* maj, int* min);
[RequiredByFeature("GLX_VERSION_1_0")]
internal static glXQueryVersion pglXQueryVersion;
[RequiredByFeature("GLX_VERSION_1_0")]
[SuppressUnmanagedCodeSecurity]
internal delegate bool glXIsDirect(IntPtr dpy, IntPtr ctx);
[RequiredByFeature("GLX_VERSION_1_0")]
internal static glXIsDirect pglXIsDirect;
[RequiredByFeature("GLX_VERSION_1_0")]
[SuppressUnmanagedCodeSecurity]
internal delegate int glXGetConfig(IntPtr dpy, IntPtr visual, int attrib, int* value);
[RequiredByFeature("GLX_VERSION_1_0")]
internal static glXGetConfig pglXGetConfig;
[RequiredByFeature("GLX_VERSION_1_0")]
[SuppressUnmanagedCodeSecurity]
internal delegate IntPtr glXGetCurrentContext();
[RequiredByFeature("GLX_VERSION_1_0")]
internal static glXGetCurrentContext pglXGetCurrentContext;
[RequiredByFeature("GLX_VERSION_1_0")]
[SuppressUnmanagedCodeSecurity]
internal delegate IntPtr glXGetCurrentDrawable();
[RequiredByFeature("GLX_VERSION_1_0")]
internal static glXGetCurrentDrawable pglXGetCurrentDrawable;
[RequiredByFeature("GLX_VERSION_1_0")]
[SuppressUnmanagedCodeSecurity]
internal delegate void glXWaitGL();
[RequiredByFeature("GLX_VERSION_1_0")]
internal static glXWaitGL pglXWaitGL;
[RequiredByFeature("GLX_VERSION_1_0")]
[SuppressUnmanagedCodeSecurity]
internal delegate void glXWaitX();
[RequiredByFeature("GLX_VERSION_1_0")]
internal static glXWaitX pglXWaitX;
[RequiredByFeature("GLX_VERSION_1_0")]
[SuppressUnmanagedCodeSecurity]
internal delegate void glXUseXFont(int font, int first, int count, int list);
[RequiredByFeature("GLX_VERSION_1_0")]
internal static glXUseXFont pglXUseXFont;
}
}
}
| |
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.SceneManagement;
namespace SpaceMarbles.V5
{
public static class Canvas
{
public static GameObject canvas;
}
public class GameManager :MonoBehaviour
{
static GameObject canvas;
GameGUI myGameGUI;
GameObject myCapsule;
public static bool levelOver = false;
#pragma warning disable CS0414 // Variable is assigned but its value is never used
public static bool gameOver = false;
#pragma warning restore CS0414 // Variable is assigned but its value is never used
public static GameObject cameraMain;
public static GameObject cameraWide;
public const string cameraTag = "MainCamera";
public const string cameraMainName = "Main Camera";
public const string cameraWideName = "Wide Camera";
public static List<GameObject> targetsList = new List<GameObject>();
public static List<GameObject> targetsHitList = new List<GameObject>();
public static List<float> targetsStartsList = new List<float>(); // only checking the x coordinate
public const string targetSphereTag = "TargetSphere";
public static string currentLevel = "";
public static int coins = 0;
public static bool isMenuScene = true;
public static bool hasMenuLoadedBefore = false;
public static GameManager Instance
{
get; private set;
}
// currently there are multiple when changing scenes
void Awake()
{
if (Instance == null)
{
Instance = this;
}
else
{
//Debug.Log("Warning: multiple " + this + " in scene!");
Destroy(gameObject); //this.enabled = false;
}
DontDestroyOnLoad(this);
//if (!GameObject.Find("Canvas") && canvas != null)
//{
// Instantiate(canvas);
// print("canvas created");
//}
//if (canvas != null)
//{
// print(canvas.name);
//}
//if (canvas == null)
//{
// //print("canvas not set");
//}
LevelLoad();
}
void LevelLoad()
{
IsMenuSceneCheck();
currentLevel = SceneManager.GetActiveScene().name;
//print($"loaded...!{currentLevel}/ {SceneManager.GetActiveScene().name}");
levelOver = false;
gameOver = false;
myGameGUI = GameObject.Find("GameManager").GetComponent<GameGUI>();
if (!isMenuScene) //(currentLevel != ButtonsActions.mainMenuName)
{
//myCapsule = GameObject.Find("Capsule");
FindCameras(); // TODO: this not running? error in buttons on game zooming
ListTargetSpheres();
//print("scene = not main menu");
//print("level reset!");
}
GunScript.shotPower = 0;
}
void Start()
{
//canvas = GameObject.Find("Canvas");
//if (canvas != null)
//{
// //print(canvas.name + " set");
//}
}
void FindCameras()
{
foreach (GameObject camera in GameObject.FindGameObjectsWithTag(cameraTag))
{
if (camera.name == cameraMainName)
{
cameraMain = camera;
}
else if (camera.name == cameraWideName)
{
cameraWide = camera;
}
}
}
private void ListTargetSpheres()
{
targetsList.Clear();
targetsHitList.Clear();
targetsStartsList.Clear();
int count = 0;
foreach (GameObject targetSphere in GameObject.FindGameObjectsWithTag(targetSphereTag))
{
count++;
targetsList.Add(targetSphere);
targetsHitList.Add(targetSphere);
targetsStartsList.Add(targetSphere.transform.position.x);
}
if (count == 0)
{
LevelOver();
}
}
public static int frame = 0;
public static int frameCheck = 50; // 50 calls per second normally
public static bool minimumFramesRun = false;
public static void FrameReset()
{
if (frame > frameCheck)
{
minimumFramesRun = true;
IsMenuSceneCheck();
print("Min frames run");
frame = 0;
}
frame++;
}
private void FixedUpdate()
{
// when only level is over
if (levelOver && gameOver == false)
{
myGameGUI.LevelOver();
}
// when last level is over (level and game)
if (levelOver && gameOver)
{
myGameGUI.GameOver();
}
if (currentLevel != SceneManager.GetActiveScene().name)
{
LevelLoad();
}
if (!levelOver && !isMenuScene)//SceneManager.GetActiveScene().name != ButtonsActions.mainMenuName)
{
if (targetsList.Count == 0)
{
ListTargetSpheres(); // and check level end
//FindCameras();
}
if (targetsList.Count != 0)
{
CheckTargetSpheresMoved();
}
//if (myCapsule != null)
//{
// myCapsule.transform.Rotate(new Vector3(0, 0, capsuleRotateSpeed)); // Rotate capsule.
//}
}
FrameReset();
}
private void CheckTargetSpheresMoved()
{
//error
if (targetsList != null && targetsList[0] == null)
{
ListTargetSpheres();
//print("list regen");
}
foreach (GameObject targetSphere in targetsList)
{
try
{
if (targetsHitList.Count == 0)
{
//print("No targets left to hit");
LevelOver();
return;
}
if (targetSphere.transform.position.x != targetsStartsList[targetsList.IndexOf(targetSphere)])
{
targetsHitList.Remove(targetSphere);
Destroy(targetSphere, 2f);
}
}
catch (System.Exception)
{
print("Failed to remove hit target");
}
}
}
void LevelOver()
{
// Level Over
//if (GameObject.FindGameObjectsWithTag("Finish").Length == 0)
//gameOver = true;
levelOver = true;
print("LevelOver");
if (SceneManager.GetActiveScene().name == ButtonsActions.lastLevel)
{
gameOver = true;
print("GameOver");
}
}
//foreach target on level
//if (targetSphere.transform.position.x != targetOrigin)
// {
// Destroy(targetSphere.gameObject, 2f);
//hitChecker = "Hit";
// }
public static void MaxSpheresPlus()
{
if (GunScript.maxSpheres < GunScript.maxSpheresMaximum)
{
GunScript.maxSpheres++;
}
}
public static void MaxSpheresMinus()
{
if (GunScript.maxSpheres > GunScript.maxSpheresMinimum)
{
GunScript.maxSpheres--;
}
}
public static void IsMenuSceneCheck()
{
if (minimumFramesRun)
{
isMenuScene = SceneManager.GetActiveScene().name == ButtonsActions.mainMenuName;
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Runtime.InteropServices;
using Test.Cryptography;
using Xunit;
namespace System.Security.Cryptography.X509Certificates.Tests
{
internal static class TestData
{
public static byte[] MsCertificate = (
"308204ec308203d4a003020102021333000000b011af0a8bd03b9fdd00010000" +
"00b0300d06092a864886f70d01010505003079310b3009060355040613025553" +
"311330110603550408130a57617368696e67746f6e3110300e06035504071307" +
"5265646d6f6e64311e301c060355040a13154d6963726f736f667420436f7270" +
"6f726174696f6e312330210603550403131a4d6963726f736f667420436f6465" +
"205369676e696e6720504341301e170d3133303132343232333333395a170d31" +
"34303432343232333333395a308183310b300906035504061302555331133011" +
"0603550408130a57617368696e67746f6e3110300e060355040713075265646d" +
"6f6e64311e301c060355040a13154d6963726f736f667420436f72706f726174" +
"696f6e310d300b060355040b13044d4f5052311e301c060355040313154d6963" +
"726f736f667420436f72706f726174696f6e30820122300d06092a864886f70d" +
"01010105000382010f003082010a0282010100e8af5ca2200df8287cbc057b7f" +
"adeeeb76ac28533f3adb407db38e33e6573fa551153454a5cfb48ba93fa837e1" +
"2d50ed35164eef4d7adb137688b02cf0595ca9ebe1d72975e41b85279bf3f82d" +
"9e41362b0b40fbbe3bbab95c759316524bca33c537b0f3eb7ea8f541155c0865" +
"1d2137f02cba220b10b1109d772285847c4fb91b90b0f5a3fe8bf40c9a4ea0f5" +
"c90a21e2aae3013647fd2f826a8103f5a935dc94579dfb4bd40e82db388f12fe" +
"e3d67a748864e162c4252e2aae9d181f0e1eb6c2af24b40e50bcde1c935c49a6" +
"79b5b6dbcef9707b280184b82a29cfbfa90505e1e00f714dfdad5c238329ebc7" +
"c54ac8e82784d37ec6430b950005b14f6571c50203010001a38201603082015c" +
"30130603551d25040c300a06082b06010505070303301d0603551d0e04160414" +
"5971a65a334dda980780ff841ebe87f9723241f230510603551d11044a3048a4" +
"463044310d300b060355040b13044d4f5052313330310603550405132a333135" +
"39352b34666166306237312d616433372d346161332d613637312d3736626330" +
"35323334346164301f0603551d23041830168014cb11e8cad2b4165801c9372e" +
"331616b94c9a0a1f30560603551d1f044f304d304ba049a0478645687474703a" +
"2f2f63726c2e6d6963726f736f66742e636f6d2f706b692f63726c2f70726f64" +
"756374732f4d6963436f645369675043415f30382d33312d323031302e63726c" +
"305a06082b06010505070101044e304c304a06082b06010505073002863e6874" +
"74703a2f2f7777772e6d6963726f736f66742e636f6d2f706b692f6365727473" +
"2f4d6963436f645369675043415f30382d33312d323031302e637274300d0609" +
"2a864886f70d0101050500038201010031d76e2a12573381d59dc6ebf93ad444" +
"4d089eee5edf6a5bb779cf029cbc76689e90a19c0bc37fa28cf14dba9539fb0d" +
"e0e19bf45d240f1b8d88153a7cdbadceb3c96cba392c457d24115426300d0dff" +
"47ea0307e5e4665d2c7b9d1da910fa1cb074f24f696b9ea92484daed96a0df73" +
"a4ef6a1aac4b629ef17cc0147f48cd4db244f9f03c936d42d8e87ce617a09b68" +
"680928f90297ef1103ba6752adc1e9b373a6d263cd4ae23ee4f34efdffa1e0bb" +
"02133b5d20de553fa3ae9040313875285e04a9466de6f57a7940bd1fcde845d5" +
"aee25d3ef575c7e6666360ccd59a84878d2430f7ef34d0631db142674a0e4bbf" +
"3a0eefb6953aa738e4259208a6886682").HexToByteArray();
public static readonly byte[] MsCertificatePemBytes = ByteUtils.AsciiBytes(
@"-----BEGIN CERTIFICATE-----
MIIE7DCCA9SgAwIBAgITMwAAALARrwqL0Duf3QABAAAAsDANBgkqhkiG9w0BAQUF
ADB5MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMH
UmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSMwIQYDVQQD
ExpNaWNyb3NvZnQgQ29kZSBTaWduaW5nIFBDQTAeFw0xMzAxMjQyMjMzMzlaFw0x
NDA0MjQyMjMzMzlaMIGDMQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3Rv
bjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0
aW9uMQ0wCwYDVQQLEwRNT1BSMR4wHAYDVQQDExVNaWNyb3NvZnQgQ29ycG9yYXRp
b24wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDor1yiIA34KHy8BXt/
re7rdqwoUz8620B9s44z5lc/pVEVNFSlz7SLqT+oN+EtUO01Fk7vTXrbE3aIsCzw
WVyp6+HXKXXkG4Unm/P4LZ5BNisLQPu+O7q5XHWTFlJLyjPFN7Dz636o9UEVXAhl
HSE38Cy6IgsQsRCddyKFhHxPuRuQsPWj/ov0DJpOoPXJCiHiquMBNkf9L4JqgQP1
qTXclFed+0vUDoLbOI8S/uPWenSIZOFixCUuKq6dGB8OHrbCryS0DlC83hyTXEmm
ebW22875cHsoAYS4KinPv6kFBeHgD3FN/a1cI4Mp68fFSsjoJ4TTfsZDC5UABbFP
ZXHFAgMBAAGjggFgMIIBXDATBgNVHSUEDDAKBggrBgEFBQcDAzAdBgNVHQ4EFgQU
WXGmWjNN2pgHgP+EHr6H+XIyQfIwUQYDVR0RBEowSKRGMEQxDTALBgNVBAsTBE1P
UFIxMzAxBgNVBAUTKjMxNTk1KzRmYWYwYjcxLWFkMzctNGFhMy1hNjcxLTc2YmMw
NTIzNDRhZDAfBgNVHSMEGDAWgBTLEejK0rQWWAHJNy4zFha5TJoKHzBWBgNVHR8E
TzBNMEugSaBHhkVodHRwOi8vY3JsLm1pY3Jvc29mdC5jb20vcGtpL2NybC9wcm9k
dWN0cy9NaWNDb2RTaWdQQ0FfMDgtMzEtMjAxMC5jcmwwWgYIKwYBBQUHAQEETjBM
MEoGCCsGAQUFBzAChj5odHRwOi8vd3d3Lm1pY3Jvc29mdC5jb20vcGtpL2NlcnRz
L01pY0NvZFNpZ1BDQV8wOC0zMS0yMDEwLmNydDANBgkqhkiG9w0BAQUFAAOCAQEA
MdduKhJXM4HVncbr+TrURE0Inu5e32pbt3nPApy8dmiekKGcC8N/oozxTbqVOfsN
4OGb9F0kDxuNiBU6fNutzrPJbLo5LEV9JBFUJjANDf9H6gMH5eRmXSx7nR2pEPoc
sHTyT2lrnqkkhNrtlqDfc6TvahqsS2Ke8XzAFH9IzU2yRPnwPJNtQtjofOYXoJto
aAko+QKX7xEDumdSrcHps3Om0mPNSuI+5PNO/f+h4LsCEztdIN5VP6OukEAxOHUo
XgSpRm3m9Xp5QL0fzehF1a7iXT71dcfmZmNgzNWahIeNJDD37zTQYx2xQmdKDku/
Og7vtpU6pzjkJZIIpohmgg==
-----END CERTIFICATE-----
");
public const string PfxDataPassword = "12345";
public static byte[] PfxData = (
"308206a20201033082065e06092a864886f70d010701a082064f0482064b3082" +
"0647308203c006092a864886f70d010701a08203b1048203ad308203a9308203" +
"a5060b2a864886f70d010c0a0102a08202b6308202b2301c060a2a864886f70d" +
"010c0103300e04085052002c7da2c2a6020207d0048202907d485e3bfc6e6457" +
"c811394c145d0e8a18325646854e4ff0097bc5a98547f5ad616c8efda8505aa8" +
"7564ed4800a3139759497c60c6688b51f376acae906429c8771cb1428226b68a" +
"6297207bcc9dd7f9563478dd83880aab2304b545759b2275305df4eff9fac24a" +
"3cc9d3b2d672efe45d8f48e24a16506c1d7566fc6d1b269fbf201b3ac3309d3e" +
"bc6fd606257a7a707aa2f790ea3fe7a94a51138540c5319010cba6de9fb9d85f" +
"cdc78da60e33df2f21c46fb9a8554b4f82e0a6edba4db5585d77d331d35daaed" +
"51b6a5a3e000a299880fb799182c8ca3004b7837a9feb8bfc76778089993f3d1" +
"1d70233608af7c50722d680623d2bf54bd4b1e7a604184d9f44e0af8099ffa47" +
"1e5536e7902793829db9902ddb61264a62962950ad274ea516b2d44be9036530" +
"016e607b73f341aeefed2211f6330364738b435b0d2ed6c57747f6c8230a053f" +
"78c4dd65db83b26c6a47836a6cbbab92cbb262c6fb6d08632b4457f5fa8eabfa" +
"65db34157e1d301e9085cc443582cdd15404314872748545eb3fc3c574882655" +
"8c9a85f966e315775bbe9da34d1e8b6dadc3c9e120c6d6a2e1cffe4eb014c3ce" +
"fbc19356ce33dac60f93d67a4de247b0dae13cd8b8c9f15604cc0ec9968e3ad7" +
"f57c9f53c45e2ecb0a0945ec0ba04baa15b48d8596edc9f5fe9165a5d21949fb" +
"5fe30a920ad2c0f78799f6443c300629b8ca4dca19b9dbf1e27aab7b12271228" +
"119a95c9822be6439414beeae24002b46eb97e030e18bd810ade0bcf4213a355" +
"038b56584b2fbcc3f5ea215d0cf667ffd823ea03ab62c3b193dfb4450aabb50b" +
"af306e8088ee7384fa2fdff03e0dd7acd61832223e806a94d46e196462522808" +
"3163f1caf333fdbbe2d54ca86968867ce0b6dd5e5b7f0633c6fab4a19cc14f64" +
"5ec14d0b1436f7623181db301306092a864886f70d0109153106040401000000" +
"305d06092b060104018237110131501e4e004d006900630072006f0073006f00" +
"6600740020005300740072006f006e0067002000430072007900700074006f00" +
"67007200610070006800690063002000500072006f0076006900640065007230" +
"6506092a864886f70d01091431581e5600500076006b0054006d0070003a0034" +
"0064006100340061003100650036002d0066003700380062002d003400360061" +
"0035002d0039003800380033002d003500320063003800330032006100340063" +
"0030006100623082027f06092a864886f70d010706a08202703082026c020100" +
"3082026506092a864886f70d010701301c060a2a864886f70d010c0106300e04" +
"08e0c117e67a75d8eb020207d080820238292882408b31826f0dc635f9bbe7c1" +
"99a48a3b4fefc729dbf95508d6a7d04805a8dd612427f93124f522ac7d3c6f4d" +
"db74d937f57823b5b1e8cfae4ece4a1fffd801558d77ba31985aa7f747d834cb" +
"e84464ef777718c9865c819d6c9daa0fa25e2a2a80b3f2aaa67d40e382eb084c" +
"ca85e314ea40c3ef3ed1593904d7a16f37807c99af06c917093f6c5aaebb12a6" +
"c58c9956d4fbbdde1f1e389989c36e19dd38d4b978d6f47131e458ab68e237e4" +
"0cb6a87f21c8773de845780b50995a51f041106f47c740b3bd946038984f1ac9" +
"e91230616480962f11b0683f8802173c596c4bd554642f51a76f9dfff9053def" +
"7b3c3f759fc7eeac3f2386106c4b8cb669589e004fb235f0357ea5cf0b5a6fc7" +
"8a6d941a3ae44af7b601b59d15cd1ec61bccc481fbb83eae2f83153b41e71ef7" +
"6a2814ab59347f116ab3e9c1621668a573013d34d13d3854e604286733c6bad0" +
"f511d7f8fd6356f7c3198d0cb771af27f4b5a3c3b571fdd083fd68a9a1eea783" +
"152c436f7513613a7e399a1da48d7e55db7504dc47d1145df8d7b6d32eaa4cce" +
"e06f98bb3dda2cc0d0564a962f86dfb122e4f7e2ed6f1b509c58d4a3b2d0a687" +
"88f7e313aecfbdef456c31b96fc13586e02aeb65807ed83bb0cb7c28f157bc95" +
"c9c593c9194691539ae3c620ed1d4d4af0177f6b9483a5341d7b084bc5b425af" +
"b658168ee2d8fb2bfab07a3ba061687a5ecd1f8da9001dd3e7be793923094abb" +
"0f2cf4d24cb071b9e568b18336bb4dc541352c9785c48d0f0e53066eb2009efc" +
"b3e5644ed12252c1bc303b301f300706052b0e03021a0414f90827ae93fd3a91" +
"54c3c0840d7950b0e30ffbaf0414e147930b932899741c92d765226893877025" +
"4a2b020207d0").HexToByteArray();
public static byte[] StoreSavedAsPfxData = (
"3082070406092a864886f70d010702a08206f5308206f10201013100300b0609" +
"2a864886f70d010701a08206d9308201e530820152a0030201020210d5b5bc1c" +
"458a558845bff51cb4dff31c300906052b0e03021d05003011310f300d060355" +
"040313064d794e616d65301e170d3130303430313038303030305a170d313130" +
"3430313038303030305a3011310f300d060355040313064d794e616d6530819f" +
"300d06092a864886f70d010101050003818d0030818902818100b11e30ea8742" +
"4a371e30227e933ce6be0e65ff1c189d0d888ec8ff13aa7b42b68056128322b2" +
"1f2b6976609b62b6bc4cf2e55ff5ae64e9b68c78a3c2dacc916a1bc7322dd353" +
"b32898675cfb5b298b176d978b1f12313e3d865bc53465a11cca106870a4b5d5" +
"0a2c410938240e92b64902baea23eb093d9599e9e372e48336730203010001a3" +
"46304430420603551d01043b3039801024859ebf125e76af3f0d7979b4ac7a96" +
"a1133011310f300d060355040313064d794e616d658210d5b5bc1c458a558845" +
"bff51cb4dff31c300906052b0e03021d0500038181009bf6e2cf830ed485b86d" +
"6b9e8dffdcd65efc7ec145cb9348923710666791fcfa3ab59d689ffd7234b787" +
"2611c5c23e5e0714531abadb5de492d2c736e1c929e648a65cc9eb63cd84e57b" +
"5909dd5ddf5dbbba4a6498b9ca225b6e368b94913bfc24de6b2bd9a26b192b95" +
"7304b89531e902ffc91b54b237bb228be8afcda26476308204ec308203d4a003" +
"020102021333000000b011af0a8bd03b9fdd0001000000b0300d06092a864886" +
"f70d01010505003079310b300906035504061302555331133011060355040813" +
"0a57617368696e67746f6e3110300e060355040713075265646d6f6e64311e30" +
"1c060355040a13154d6963726f736f667420436f72706f726174696f6e312330" +
"210603550403131a4d6963726f736f667420436f6465205369676e696e672050" +
"4341301e170d3133303132343232333333395a170d3134303432343232333333" +
"395a308183310b3009060355040613025553311330110603550408130a576173" +
"68696e67746f6e3110300e060355040713075265646d6f6e64311e301c060355" +
"040a13154d6963726f736f667420436f72706f726174696f6e310d300b060355" +
"040b13044d4f5052311e301c060355040313154d6963726f736f667420436f72" +
"706f726174696f6e30820122300d06092a864886f70d01010105000382010f00" +
"3082010a0282010100e8af5ca2200df8287cbc057b7fadeeeb76ac28533f3adb" +
"407db38e33e6573fa551153454a5cfb48ba93fa837e12d50ed35164eef4d7adb" +
"137688b02cf0595ca9ebe1d72975e41b85279bf3f82d9e41362b0b40fbbe3bba" +
"b95c759316524bca33c537b0f3eb7ea8f541155c08651d2137f02cba220b10b1" +
"109d772285847c4fb91b90b0f5a3fe8bf40c9a4ea0f5c90a21e2aae3013647fd" +
"2f826a8103f5a935dc94579dfb4bd40e82db388f12fee3d67a748864e162c425" +
"2e2aae9d181f0e1eb6c2af24b40e50bcde1c935c49a679b5b6dbcef9707b2801" +
"84b82a29cfbfa90505e1e00f714dfdad5c238329ebc7c54ac8e82784d37ec643" +
"0b950005b14f6571c50203010001a38201603082015c30130603551d25040c30" +
"0a06082b06010505070303301d0603551d0e041604145971a65a334dda980780" +
"ff841ebe87f9723241f230510603551d11044a3048a4463044310d300b060355" +
"040b13044d4f5052313330310603550405132a33313539352b34666166306237" +
"312d616433372d346161332d613637312d373662633035323334346164301f06" +
"03551d23041830168014cb11e8cad2b4165801c9372e331616b94c9a0a1f3056" +
"0603551d1f044f304d304ba049a0478645687474703a2f2f63726c2e6d696372" +
"6f736f66742e636f6d2f706b692f63726c2f70726f64756374732f4d6963436f" +
"645369675043415f30382d33312d323031302e63726c305a06082b0601050507" +
"0101044e304c304a06082b06010505073002863e687474703a2f2f7777772e6d" +
"6963726f736f66742e636f6d2f706b692f63657274732f4d6963436f64536967" +
"5043415f30382d33312d323031302e637274300d06092a864886f70d01010505" +
"00038201010031d76e2a12573381d59dc6ebf93ad4444d089eee5edf6a5bb779" +
"cf029cbc76689e90a19c0bc37fa28cf14dba9539fb0de0e19bf45d240f1b8d88" +
"153a7cdbadceb3c96cba392c457d24115426300d0dff47ea0307e5e4665d2c7b" +
"9d1da910fa1cb074f24f696b9ea92484daed96a0df73a4ef6a1aac4b629ef17c" +
"c0147f48cd4db244f9f03c936d42d8e87ce617a09b68680928f90297ef1103ba" +
"6752adc1e9b373a6d263cd4ae23ee4f34efdffa1e0bb02133b5d20de553fa3ae" +
"9040313875285e04a9466de6f57a7940bd1fcde845d5aee25d3ef575c7e66663" +
"60ccd59a84878d2430f7ef34d0631db142674a0e4bbf3a0eefb6953aa738e425" +
"9208a68866823100").HexToByteArray();
public static byte[] StoreSavedAsCerData = (
"308201e530820152a0030201020210d5b5bc1c458a558845bff51cb4dff31c30" +
"0906052b0e03021d05003011310f300d060355040313064d794e616d65301e17" +
"0d3130303430313038303030305a170d3131303430313038303030305a301131" +
"0f300d060355040313064d794e616d6530819f300d06092a864886f70d010101" +
"050003818d0030818902818100b11e30ea87424a371e30227e933ce6be0e65ff" +
"1c189d0d888ec8ff13aa7b42b68056128322b21f2b6976609b62b6bc4cf2e55f" +
"f5ae64e9b68c78a3c2dacc916a1bc7322dd353b32898675cfb5b298b176d978b" +
"1f12313e3d865bc53465a11cca106870a4b5d50a2c410938240e92b64902baea" +
"23eb093d9599e9e372e48336730203010001a346304430420603551d01043b30" +
"39801024859ebf125e76af3f0d7979b4ac7a96a1133011310f300d0603550403" +
"13064d794e616d658210d5b5bc1c458a558845bff51cb4dff31c300906052b0e" +
"03021d0500038181009bf6e2cf830ed485b86d6b9e8dffdcd65efc7ec145cb93" +
"48923710666791fcfa3ab59d689ffd7234b7872611c5c23e5e0714531abadb5d" +
"e492d2c736e1c929e648a65cc9eb63cd84e57b5909dd5ddf5dbbba4a6498b9ca" +
"225b6e368b94913bfc24de6b2bd9a26b192b957304b89531e902ffc91b54b237" +
"bb228be8afcda26476").HexToByteArray();
public static byte[] StoreSavedAsSerializedCerData = (
"0200000001000000bc0000001c0000006c000000010000000000000000000000" +
"00000000020000007b00370037004500420044003000320044002d0044003800" +
"440045002d0034003700350041002d0038003800360037002d00440032003000" +
"4200300030003600340045003400390046007d00000000004d00690063007200" +
"6f0073006f006600740020005300740072006f006e0067002000430072007900" +
"700074006f0067007200610070006800690063002000500072006f0076006900" +
"64006500720000002000000001000000e9010000308201e530820152a0030201" +
"020210d5b5bc1c458a558845bff51cb4dff31c300906052b0e03021d05003011" +
"310f300d060355040313064d794e616d65301e170d3130303430313038303030" +
"305a170d3131303430313038303030305a3011310f300d060355040313064d79" +
"4e616d6530819f300d06092a864886f70d010101050003818d00308189028181" +
"00b11e30ea87424a371e30227e933ce6be0e65ff1c189d0d888ec8ff13aa7b42" +
"b68056128322b21f2b6976609b62b6bc4cf2e55ff5ae64e9b68c78a3c2dacc91" +
"6a1bc7322dd353b32898675cfb5b298b176d978b1f12313e3d865bc53465a11c" +
"ca106870a4b5d50a2c410938240e92b64902baea23eb093d9599e9e372e48336" +
"730203010001a346304430420603551d01043b3039801024859ebf125e76af3f" +
"0d7979b4ac7a96a1133011310f300d060355040313064d794e616d658210d5b5" +
"bc1c458a558845bff51cb4dff31c300906052b0e03021d0500038181009bf6e2" +
"cf830ed485b86d6b9e8dffdcd65efc7ec145cb9348923710666791fcfa3ab59d" +
"689ffd7234b7872611c5c23e5e0714531abadb5de492d2c736e1c929e648a65c" +
"c9eb63cd84e57b5909dd5ddf5dbbba4a6498b9ca225b6e368b94913bfc24de6b" +
"2bd9a26b192b957304b89531e902ffc91b54b237bb228be8afcda26476").HexToByteArray();
public static byte[] StoreSavedAsSerializedStoreData = (
"00000000434552540200000001000000bc0000001c0000006c00000001000000" +
"000000000000000000000000020000007b003700370045004200440030003200" +
"44002d0044003800440045002d0034003700350041002d003800380036003700" +
"2d004400320030004200300030003600340045003400390046007d0000000000" +
"4d006900630072006f0073006f006600740020005300740072006f006e006700" +
"2000430072007900700074006f00670072006100700068006900630020005000" +
"72006f007600690064006500720000002000000001000000e9010000308201e5" +
"30820152a0030201020210d5b5bc1c458a558845bff51cb4dff31c300906052b" +
"0e03021d05003011310f300d060355040313064d794e616d65301e170d313030" +
"3430313038303030305a170d3131303430313038303030305a3011310f300d06" +
"0355040313064d794e616d6530819f300d06092a864886f70d01010105000381" +
"8d0030818902818100b11e30ea87424a371e30227e933ce6be0e65ff1c189d0d" +
"888ec8ff13aa7b42b68056128322b21f2b6976609b62b6bc4cf2e55ff5ae64e9" +
"b68c78a3c2dacc916a1bc7322dd353b32898675cfb5b298b176d978b1f12313e" +
"3d865bc53465a11cca106870a4b5d50a2c410938240e92b64902baea23eb093d" +
"9599e9e372e48336730203010001a346304430420603551d01043b3039801024" +
"859ebf125e76af3f0d7979b4ac7a96a1133011310f300d060355040313064d79" +
"4e616d658210d5b5bc1c458a558845bff51cb4dff31c300906052b0e03021d05" +
"00038181009bf6e2cf830ed485b86d6b9e8dffdcd65efc7ec145cb9348923710" +
"666791fcfa3ab59d689ffd7234b7872611c5c23e5e0714531abadb5de492d2c7" +
"36e1c929e648a65cc9eb63cd84e57b5909dd5ddf5dbbba4a6498b9ca225b6e36" +
"8b94913bfc24de6b2bd9a26b192b957304b89531e902ffc91b54b237bb228be8" +
"afcda264762000000001000000f0040000308204ec308203d4a0030201020213" +
"33000000b011af0a8bd03b9fdd0001000000b0300d06092a864886f70d010105" +
"05003079310b3009060355040613025553311330110603550408130a57617368" +
"696e67746f6e3110300e060355040713075265646d6f6e64311e301c06035504" +
"0a13154d6963726f736f667420436f72706f726174696f6e3123302106035504" +
"03131a4d6963726f736f667420436f6465205369676e696e6720504341301e17" +
"0d3133303132343232333333395a170d3134303432343232333333395a308183" +
"310b3009060355040613025553311330110603550408130a57617368696e6774" +
"6f6e3110300e060355040713075265646d6f6e64311e301c060355040a13154d" +
"6963726f736f667420436f72706f726174696f6e310d300b060355040b13044d" +
"4f5052311e301c060355040313154d6963726f736f667420436f72706f726174" +
"696f6e30820122300d06092a864886f70d01010105000382010f003082010a02" +
"82010100e8af5ca2200df8287cbc057b7fadeeeb76ac28533f3adb407db38e33" +
"e6573fa551153454a5cfb48ba93fa837e12d50ed35164eef4d7adb137688b02c" +
"f0595ca9ebe1d72975e41b85279bf3f82d9e41362b0b40fbbe3bbab95c759316" +
"524bca33c537b0f3eb7ea8f541155c08651d2137f02cba220b10b1109d772285" +
"847c4fb91b90b0f5a3fe8bf40c9a4ea0f5c90a21e2aae3013647fd2f826a8103" +
"f5a935dc94579dfb4bd40e82db388f12fee3d67a748864e162c4252e2aae9d18" +
"1f0e1eb6c2af24b40e50bcde1c935c49a679b5b6dbcef9707b280184b82a29cf" +
"bfa90505e1e00f714dfdad5c238329ebc7c54ac8e82784d37ec6430b950005b1" +
"4f6571c50203010001a38201603082015c30130603551d25040c300a06082b06" +
"010505070303301d0603551d0e041604145971a65a334dda980780ff841ebe87" +
"f9723241f230510603551d11044a3048a4463044310d300b060355040b13044d" +
"4f5052313330310603550405132a33313539352b34666166306237312d616433" +
"372d346161332d613637312d373662633035323334346164301f0603551d2304" +
"1830168014cb11e8cad2b4165801c9372e331616b94c9a0a1f30560603551d1f" +
"044f304d304ba049a0478645687474703a2f2f63726c2e6d6963726f736f6674" +
"2e636f6d2f706b692f63726c2f70726f64756374732f4d6963436f6453696750" +
"43415f30382d33312d323031302e63726c305a06082b06010505070101044e30" +
"4c304a06082b06010505073002863e687474703a2f2f7777772e6d6963726f73" +
"6f66742e636f6d2f706b692f63657274732f4d6963436f645369675043415f30" +
"382d33312d323031302e637274300d06092a864886f70d010105050003820101" +
"0031d76e2a12573381d59dc6ebf93ad4444d089eee5edf6a5bb779cf029cbc76" +
"689e90a19c0bc37fa28cf14dba9539fb0de0e19bf45d240f1b8d88153a7cdbad" +
"ceb3c96cba392c457d24115426300d0dff47ea0307e5e4665d2c7b9d1da910fa" +
"1cb074f24f696b9ea92484daed96a0df73a4ef6a1aac4b629ef17cc0147f48cd" +
"4db244f9f03c936d42d8e87ce617a09b68680928f90297ef1103ba6752adc1e9" +
"b373a6d263cd4ae23ee4f34efdffa1e0bb02133b5d20de553fa3ae9040313875" +
"285e04a9466de6f57a7940bd1fcde845d5aee25d3ef575c7e6666360ccd59a84" +
"878d2430f7ef34d0631db142674a0e4bbf3a0eefb6953aa738e4259208a68866" +
"82000000000000000000000000").HexToByteArray();
public static byte[] DssCer = (
"3082025d3082021da00302010202101e9ae1e91e07de8640ac7af21ac22e8030" +
"0906072a8648ce380403300e310c300a06035504031303466f6f301e170d3135" +
"303232343232313734375a170d3136303232343232313734375a300e310c300a" +
"06035504031303466f6f308201b73082012c06072a8648ce3804013082011f02" +
"818100871018cc42552d14a5a9286af283f3cfba959b8835ec2180511d0dceb8" +
"b979285708c800fc10cb15337a4ac1a48ed31394072015a7a6b525986b49e5e1" +
"139737a794833c1aa1e0eaaa7e9d4efeb1e37a65dbc79f51269ba41e8f0763aa" +
"613e29c81c3b977aeeb3d3c3f6feb25c270cdcb6aee8cd205928dfb33c44d2f2" +
"dbe819021500e241edcf37c1c0e20aadb7b4e8ff7aa8fde4e75d02818100859b" +
"5aeb351cf8ad3fabac22ae0350148fd1d55128472691709ec08481584413e9e5" +
"e2f61345043b05d3519d88c021582ccef808af8f4b15bd901a310fefd518af90" +
"aba6f85f6563db47ae214a84d0b7740c9394aa8e3c7bfef1beedd0dafda079bf" +
"75b2ae4edb7480c18b9cdfa22e68a06c0685785f5cfb09c2b80b1d05431d0381" +
"8400028180089a43f439b924bef3529d8d6206d1fca56a55caf52b41d6ce371e" +
"bf07bda132c8eadc040007fcf4da06c1f30504ebd8a77d301f5a4702f01f0d2a" +
"0707ac1da38dd3251883286e12456234da62eda0df5fe2fa07cd5b16f3638bec" +
"ca7786312da7d3594a4bb14e353884da0e9aecb86e3c9bdb66fca78ea85e1cc3" +
"f2f8bf0963300906072a8648ce380403032f00302c021461f6d143a47a4f7e0e" +
"0ef9848b7f83eacbf83ffd021420e2ac47e656874633e01b0d207a99280c1127" +
"01").HexToByteArray();
public static byte[] CertWithPolicies = (
"308201f33082015ca0030201020210134fb7082cf69bbb4930bfc8e1ca446130" +
"0d06092a864886f70d0101050500300e310c300a06035504031303466f6f301e" +
"170d3135303330313232343735385a170d3136303330313034343735385a300e" +
"310c300a06035504031303466f6f30819f300d06092a864886f70d0101010500" +
"03818d0030818902818100c252d52fb96658ddbb7d19dd9caaf203ec0376f77c" +
"3012bd93e14bb22a6ff2b5ce8060a197e3fd8289fbff826746baae0db8d68b47" +
"a1cf13678717d7db9a16dab028927173a3e843b3a7df8c5a4ff675957ea20703" +
"6389a60a83d643108bd1293e2135a672a1cff10b7d5b3c78ab44d35e20ca6a5c" +
"5b6f714c5bfd66ed4307070203010001a3523050301b06092b06010401823714" +
"02040e1e0c00480065006c006c006f0000301a06092b0601040182371507040d" +
"300b060357080902010302010230150603551d20040e300c3004060262133004" +
"06027021300d06092a864886f70d0101050500038181001be04e59fbea63acfb" +
"c8b6fd3d02dd7442532344cfbc124e924c0bacf23865e4ce2f442ad60ae457d8" +
"4f7a1f05d50fb867c20e778e412a25237054555669ced01c1ce1ba8e8e57510f" +
"73e1167c920f78aa5415dc5281f0c761fb25bb1ebc707bc003dd90911e649915" +
"918cfe4f3176972f8afdc1cccd9705e7fb307a0c17d273").HexToByteArray();
public static byte[] CertWithTemplateData = (
"308201dc30820145a00302010202105101b8242daf6cae4c53bac68a948b0130" +
"0d06092a864886f70d0101050500300e310c300a06035504031303466f6f301e" +
"170d3135303330313232333133395a170d3136303330313034333133395a300e" +
"310c300a06035504031303466f6f30819f300d06092a864886f70d0101010500" +
"03818d0030818902818100a6dcff50bd1fe420301fea5fa56be93a7a53f2599c" +
"e453cf3422bec797bac0ed78a03090a3754569e6494bcd585ac16a5ea5086344" +
"3f25521085ca09580579cf0b46bd6e50015319fba5d2bd3724c53b20cdddf604" +
"74bd7ef426aead9ca5ffea275a4b2b1b6f87c203ab8783559b75e319722886fb" +
"eb784f5f06823906b2a9950203010001a33b3039301b06092b06010401823714" +
"02040e1e0c00480065006c006c006f0000301a06092b0601040182371507040d" +
"300b0603570809020103020102300d06092a864886f70d010105050003818100" +
"962594da079523c26e2d3fc573fd17189ca33bedbeb2c38c92508fc2a865973b" +
"e85ba686f765101aea0a0391b22fcfa6c0760eece91a0eb75501bf6871553f8d" +
"6b089cf2ea63c872e0b4a178795b71826c4569857b45994977895e506dfb8075" +
"ed1b1096987f2c8f65f2d6bbc788b1847b6ba13bee17ef6cb9c6a3392e13003f").HexToByteArray();
public static byte[] ComplexNameInfoCert = (
"308204BE30820427A00302010202080123456789ABCDEF300D06092A864886F70" +
"D01010505003081A43110300E06035504061307436F756E747279310E300C0603" +
"550408130553746174653111300F060355040713084C6F63616C6974793111300" +
"F060355040A13084578616D706C654F31123010060355040B13094578616D706C" +
"654F55311E301C06035504031315636E2E6973737565722E6578616D706C652E6" +
"F72673126302406092A864886F70D0109011617697373756572656D61696C4065" +
"78616D706C652E6F7267301E170D3133313131323134313531365A170D3134313" +
"231333135313631375A3081A63110300E06035504061307436F756E747279310E" +
"300C0603550408130553746174653111300F060355040713084C6F63616C69747" +
"93111300F060355040A13084578616D706C654F31123010060355040B13094578" +
"616D706C654F55311F301D06035504031316636E2E7375626A6563742E6578616" +
"D706C652E6F72673127302506092A864886F70D01090116187375626A65637465" +
"6D61696C406578616D706C652E6F7267305C300D06092A864886F70D010101050" +
"0034B003048024100DC6FBBDA0300520DFBC9F046CC865D8876AEAC353807EA84" +
"F58F92FE45EE03C22E970CAF41031D47F97C8A5117C62718482911A8A31B58D92" +
"328BA3CF9E605230203010001A382023730820233300B0603551D0F0404030200" +
"B0301D0603551D250416301406082B0601050507030106082B060105050703023" +
"081FD0603551D120481F53081F28217646E73312E6973737565722E6578616D70" +
"6C652E6F72678217646E73322E6973737565722E6578616D706C652E6F7267811" +
"569616E656D61696C31406578616D706C652E6F7267811569616E656D61696C32" +
"406578616D706C652E6F7267A026060A2B060104018237140203A0180C1669737" +
"375657275706E31406578616D706C652E6F7267A026060A2B0601040182371402" +
"03A0180C1669737375657275706E32406578616D706C652E6F7267861F6874747" +
"03A2F2F757269312E6973737565722E6578616D706C652E6F72672F861F687474" +
"703A2F2F757269322E6973737565722E6578616D706C652E6F72672F308201030" +
"603551D110481FB3081F88218646E73312E7375626A6563742E6578616D706C65" +
"2E6F72678218646E73322E7375626A6563742E6578616D706C652E6F726781157" +
"3616E656D61696C31406578616D706C652E6F7267811573616E656D61696C3240" +
"6578616D706C652E6F7267A027060A2B060104018237140203A0190C177375626" +
"A65637475706E31406578616D706C652E6F7267A027060A2B0601040182371402" +
"03A0190C177375626A65637475706E32406578616D706C652E6F7267862068747" +
"4703A2F2F757269312E7375626A6563742E6578616D706C652E6F72672F862068" +
"7474703A2F2F757269322E7375626A6563742E6578616D706C652E6F72672F300" +
"D06092A864886F70D0101050500038181005CD44A247FF4DFBF2246CC04D7D57C" +
"EF2B6D3A4BC83FF685F6B5196B65AFC8F992BE19B688E53E353EEA8B63951EC40" +
"29008DE8B851E2C30B6BF73F219BCE651E5972E62D651BA171D1DA9831A449D99" +
"AF4E2F4B9EE3FD0991EF305ADDA633C44EB5E4979751280B3F54F9CCD561AC27D" +
"3426BC6FF32E8E1AAF9F7C0150A726B").HexToByteArray();
internal static readonly byte[] MultiPrivateKeyPfx = (
"30820FD102010330820F9106092A864886F70D010701A0820F8204820F7E3082" +
"0F7A3082076306092A864886F70D010701A0820754048207503082074C308203" +
"A9060B2A864886F70D010C0A0102A08202B6308202B2301C060A2A864886F70D" +
"010C0103300E0408ED42EEFCD77BB2EB020207D00482029048F341D409492D23" +
"D89C0C01DEE7EFFB6715B15D2BB558E9045D635CADFFFEC85C10A4849AB0657D" +
"A17FE7EC578F779BA2DC129FA959664DC7E85DFD13CAC673E487208FE457223A" +
"75732915FFCF3FF70F557B0846D62AD507300EA1770EDED82F7D8E6E75075728" +
"A29D3BF829E75F09EF283A9DDEDDFBABC2E25698DA8C24E4FE34CD43C87554BF" +
"55B1D4B2B0979F399AEC95B781C62CBE9E412329F9A9BCABF20F716A95F1D795" +
"7C379A27587F6BBFA44A0B75FAAC15CA3730629C55E87990EE521BC4657EE2A4" +
"41AF099A226D31707685A89A28EB27CA65512B70DEC09231369AA1A265D4F5C3" +
"C5D17CB11DB54C70AB83EA28F4740D1F79D490F46F926FB267D5F0E4B2FE096D" +
"F161A4FF9E9AC068EFCA999B3ED0A3BD05D8D1E3B67CF51E6A478154B427D87D" +
"C861D0FE2A7A42600483D7B979DC71E8A00D0E805E3BB86E8673234DC1D14987" +
"99272754A5FD5FEC118CF1E2B2A539B604FED5486A4E4D73FAAFF69023263B84" +
"6870D6B8DB01E31CB3A1E4BA3588C1FA81C786745A33B95573D5381AB307827A" +
"549A36AF535FD05E1247BB92C6C6FCB0E76E87F2E4C8136F37C9C19BE3001F59" +
"FC5CB459C620B8E73711BF102D78F665F40E4D1A341370BC1FB7A5567C29359C" +
"FFB938237002904BE59F5605AF96E8A670E2248AB71D27FE63E327077144F095" +
"4CA815E0284E2FF5E1A11B2946276A99B91BF138A79B057436798AF72FD86842" +
"881C5A5ECDA8A961A21553CC930703047F1F45699CEFEF26AAB6B7DBC65C8C62" +
"4CA3286094596F2AA48268B9F5411058613185507332833AFB312D5780CEFF96" +
"6DD05A2CB6E1B252D9656D8E92E63E6C0360F119232E954E11DE777D2DE1C208" +
"F704DDB16E1351F49B42A859E3B6B2D94E1E2B3CD97F06B1123E9CCA049201E6" +
"DB7273C0BDE63CC93181DF301306092A864886F70D0109153106040401000000" +
"305B06092A864886F70D010914314E1E4C007B00310036004200340039004300" +
"320045002D0036004400390043002D0034003200440042002D00410034004500" +
"39002D003000320036003000430030004100450032003600300034007D306B06" +
"092B0601040182371101315E1E5C004D006900630072006F0073006F00660074" +
"00200045006E00680061006E006300650064002000430072007900700074006F" +
"0067007200610070006800690063002000500072006F00760069006400650072" +
"002000760031002E00303082039B060B2A864886F70D010C0A0102A08202B630" +
"8202B2301C060A2A864886F70D010C0103300E04081F85B7ED57F6F934020207" +
"D00482029051A5ADA683AAE06A699761CCF05CB081A4398A7B1256A25084DBE1" +
"115BFAB07A5A9146BC22F2E4223FF25BCA1836AE218691815F20A27A1B98D1FC" +
"78F84AFA7E90A55954EE5BEA47FFA35928A990CB47346767F6F4212DBCD03FFF" +
"1E4D137979006B46B19A9FC3BC9B5036ED6F8582E2007D08DB94B2B576E15471" +
"9CAC90DFB6F238CA875FCBEBCF9E9F933E4451E6A2B60C2A0A8A35B5FD20E5DD" +
"A000008DCCE95BBDF604A8F93001F594E402FF8649A6582DE5901EDF9DED7D6F" +
"9657C5A184D82690EFCFB2F25BFCE02BC56F0FF00595996EBF1BA25475AB6134" +
"61280DD641186237D8A3AB257BD6FB1BDC3768B00719D233E0D5FD26D08BA6EA" +
"B29D732B990FB9423E643E4663ABBA0D8885DD2A276EE02C92778261C7853F70" +
"8E2B9AF8D2E96416F676D0191BD24D0C8430BD419049F43C8E2A0C32F862207B" +
"3DA661577CE5933460D0EF69FAD7323098B55FEF3A9955FE632FBCE8452BB5F3" +
"430AE2A9021EBF756CC7FDFC3E63581C8B0D7AB77760F447F868B5923614DAA9" +
"C36AEBC67DC854B93C38E8A6D3AC11B1EE1D02855CE96ADEB840B626BFC4B3BF" +
"D6487C9073F8A15F55BA945D58AD1636A7AED476EBDB5227A71144BF8745192E" +
"F5CD177818F61836717ED9EB0A83BEEE582ADEDD407035E453083B17E7C23700" +
"9D9F04F355CEAB0C0E9AD6F13A3B54459FA05B19E02275FE2588258B63A125F5" +
"49D1B44C827CDC94260A02F4A1B42A30E675B9760D876685D6CA05C25803BDE1" +
"F33D325CF6020A662B0F5DCCC8D77B941B273AC462F0D3E050CEB5AEF7107C45" +
"372F7063EF1AB420CA555A6C9BE6E1067966755584346CDDE7C05B6132E553B1" +
"1C374DB90B54E5C096062349A1F6CB78A1A2D995C483541750CFA956DEA0EB36" +
"67DE7AD78931C65B6E039B5DE461810B68C344D2723181D1301306092A864886" +
"F70D0109153106040402000000305B06092A864886F70D010914314E1E4C007B" +
"00390044004500340033003500380036002D0039003100320043002D00340036" +
"00370036002D0042003500410041002D00420046004200360030003900370030" +
"0035003800350041007D305D06092B060104018237110131501E4E004D006900" +
"630072006F0073006F006600740020005300740072006F006E00670020004300" +
"72007900700074006F0067007200610070006800690063002000500072006F00" +
"7600690064006500723082080F06092A864886F70D010706A0820800308207FC" +
"020100308207F506092A864886F70D010701301C060A2A864886F70D010C0106" +
"300E04089ADEE71816BCD023020207D0808207C851AA1EA533FECABB26D3846F" +
"AEE8DEDB919C29F8B98BBBF785BC306C12A8ACB1437786C4689161683718BB7E" +
"40EB60D9BE0C87056B5ECF20ACCB8BF7F36033B8FCB84ED1474E97DE0A8709B5" +
"63B6CF8E69DF4B3F970C92324946723C32D08B7C3A76C871C6B6C8C56F2D3C4C" +
"00B8A809E65A4EB5EFECC011E2B10F0E44ECDA07B325417B24924080844F6D7F" +
"1F6E420346EA85825EB830C7E05A5383412A9502A51F1AC07F315ADE357F1F9F" +
"B2E6427976E78B8FF9CD6C2F9841F2D84658AC8747694EFD0C451B7AC5B83D5F" +
"0780808417501666BB452B53CEB0698162D94541DE181A7968DB139F17A1076E" +
"DEB70B38B8881DBC6DE2B694070A5A1AA71E4CDFBF7F4D5DBCF16646768364D3" +
"C74FA212E40CBE3BE7C51A74D271164D00E89F997FD418C51A7C2D73130D7C6F" +
"CAA2CA65082CE38BFB753BB30CC71656529E8DBA4C4D0B7E1A79CF2A052FFEFA" +
"2DEE3373115472AFD1F40A80B23AA6141D5CDE0A378FE6210D4EE69B8771D3E1" +
"92FD989AEC14C26EA4845D261B8A45ABC1C8FA305449DCDEDA9882DD4DDC69B2" +
"DE315645FBC3EE52090907E7687A22A63F538E030AB5A5413CA415F1D70E70CB" +
"567261FB892A8B3BAFC72D632CD2FDCC0559E01D5C246CC27C934863CCFA5249" +
"0E1F01D8D2D0AF2587E4D04011140A494FFA3CA42C5F645B94EE30100DE019B2" +
"7F66FFC035E49A65B2A3F6CB14EB1E2FFF1F25B5C87481BD8506F307E0B042A2" +
"C85B99ECA520B4AAC7DFF2B11C1213E4128A01765DDB27B867336B8CCF148CE7" +
"38465D46E7A0BEA466CD8BBCCE2E11B16E0F9D24FF2F2D7C9F852779ADBB818F" +
"87E4AFF7C21A9C2BC20D38209322A34B0B393B187C96583D3D73D9440F994B2F" +
"320D3274848AB7167942179CFF725C2C7556CCC289A5E788C5B863E6FCDD5E4B" +
"87E41458BEB3F43D14C7E5196C38CA36322F8B83064862178D58925AEF34F444" +
"A31A4FB18431D7D37C65ED519643BC7BD025F801390430022253AAFCEA670726" +
"512C3532EA9F410DB8AA6628CC455E4AB3F478A6981DB9180B7A2A24B365F375" +
"54CE04B08F22B3539D98BF9A1AC623BBF9A08DBEC951E9730C131802B2C40750" +
"AAE6A791B3219A96A5BAC7AE17A2F7EA02FF66D6FB36C2E6B6AB90D821A6322B" +
"F3E8D82969756A474551DB9EAA8C587FC878F996F5FA1E1C39E983F164B0A678" +
"97EB3755C378807FFDFE964C5C0F290784A08E8C925E85775A9B892E278F68C3" +
"C1DE72622AC10EA56D88C909EF4AC9F47ED61376737C1E43DBF0F89337F0684F" +
"A0B96E7A993EC328A6A5FBCDCB809ACBFDAE4ECE192A45480104ED12820238AB" +
"6AC9C88CC9A82585FD29A81A7BC5BC591738A4D49A86D06B4E18BDC83DFFAA60" +
"D8A0D4F70CC63D4E83812CB6753F3744545592D04223793E5B305125AAD8807A" +
"753D235769BD0280E2DE808B0CEE2B98B0F5562FF9EF68161A6B7E08C8B10576" +
"6EBCFC44AC858B1A89E34C099B194A8B24D1DBABC13909EFAF5B9A9E77AEAF7D" +
"D9BE772FA01AB9518EB8864AE6D07D7DD7451797541D2F723BC71A9C14ED1D81" +
"1594E2C4A57017D4CB90FD82C195FA9B823DF1E2FFD965E3139F9A6E8AAC36FA" +
"39CFA4C52E85D2A661F9D0D466720C5AB7ECDE968FF51B535B019A3E9C76058E" +
"6F673A49CDD89EA7EC998BDADE71186EA084020A897A328753B72E213A9D8244" +
"3F7E34D94508199A2A63E71A12BD441C132201E9A3829B2727F23E65C519F4DA" +
"2C40162A3A501B1BD57568ED75447FEAF8B42988CE25407644BFA0B76059D275" +
"EC994BB336055E271751B32233D79A6E5E3AA700F3803CCA50586D28934E3D41" +
"35FA043AF7DFAB977477283602B1739C4AF40E3856E75C34EB98C69A928ADE05" +
"B67A679630EFA14E64B2957EDD1AB4EC0B0E7BC38D4851EBF6792833EACB62FB" +
"6C862B089E3066AE5EAAFD2A8B7FC712DE9BD2F488222EEB1FB91B4E57C2D240" +
"92818965621C123280453EDCFA2EC9D9B50AFA437D1ED09EC36FD232B169ED30" +
"1E0DB0BABE562B67130F90EBC85D325A90931A5B5A94736A4B3AADB8CA295F59" +
"AF7FF08CCFADE5AFBBC2346BC6D78D9E5F470E9BDFF547F2574B10A48DD9D56B" +
"5B03E9E24D65C367B6E342A26A344111A66B1908EDAECD0834930DA74E1CFE2E" +
"4B0636A7C18E51A27AD21992A2DCF466BAACAC227B90B5E61BED799C97DEE7ED" +
"B33CCAF5DAD7AAD3CACCDE59478CF69AE64B9065FCB436E1993514C42872DD48" +
"6ABB75A07A4ED46CDF0E12C0D73FAB83564CF1A814791971EC9C7C6A08A13CE0" +
"453C2C3236C8B2E146D242E3D37A3ECF6C350D0B2AB956CB21057FDC630750A7" +
"1C61C66DE3D4A6DB187BEE2F86DEB93E723C5943EA17E699E93555756920416B" +
"D6B267A4CFAC4EE90E96A6419302B4C0A3B9705509CA09EE92F184FD2817BA09" +
"BE29E465909DB6C93E3C1CAF6DC29E1A5838F3C32CCB220235EF829CD21D1B3E" +
"960518A80D08AE7FF08D3AFB7451C823E9B8D49DAF66F503E4AE5399FECFC958" +
"429D758C06EFF8338BC02457F6FE5053AA3C2F27D360058FD935663B55F026B5" +
"04E39D86E7CE15F04B1C62BBFA0B1CA5E64FF0BD088D94FB1518E05B2F40BF9D" +
"71C61FC43E3AF8440570C44030F59D14B8858B7B8506B136E7E39BB04F9AFEAF" +
"2FA292D28A8822046CEFDE381F2399370BDE9B97BC700418585C31E9C353635A" +
"DAA6A00A833899D0EDA8F5FFC558D822AEB99C7E35526F5297F333F9E758D4CD" +
"53277316608B1F7DB6AC71309A8542A356D407531BA1D3071BA9DC02AE91C7DF" +
"2561AEBC3845A118B00D21913B4A401DDDC40CE983178EF26C4A41343037301F" +
"300706052B0E03021A041438351C5D7948F9BEA3ACECC0F54AF460EC01093B04" +
"14B610EC75D16EA23BF253AAD061FAC376E1EAF684").HexToByteArray();
internal static readonly byte[] EmptyPfx = (
"304F020103301106092A864886F70D010701A004040230003037301F30070605" +
"2B0E03021A0414822078BC83E955E314BDA908D76D4C5177CC94EB0414711018" +
"F2897A44A90E92779CB655EA11814EC598").HexToByteArray();
internal const string ChainPfxPassword = "test";
internal static readonly byte[] ChainPfxBytes = (
"308213D80201033082139406092A864886F70D010701A0821385048213813082" +
"137D308203C606092A864886F70D010701A08203B7048203B3308203AF308203" +
"AB060B2A864886F70D010C0A0102A08202B6308202B2301C060A2A864886F70D" +
"010C0103300E040811E8B9808BA6E96C020207D004820290D11DA8713602105C" +
"95792D65BCDFC1B7E3708483BF6CD83008082F89DAE4D003F86081B153BD4D4A" +
"C122E802752DEA29F07D0B7E8F0FB8A762B4CAA63360F9F72CA5846771980A6F" +
"AE2643CD412E6E4A101625371BBD48CC6E2D25191D256B531B06DB7CDAC04DF3" +
"E10C6DC556D5FE907ABF32F2966A561C988A544C19B46DF1BE531906F2CC2263" +
"A301302A857075C7A9C48A395241925C6A369B60D176419D75E320008D5EFD91" +
"5257B160F6CD643953E85F19EBE4E4F72B9B787CF93E95F819D1E43EF01CCFA7" +
"48F0E7260734EA9BC6039BA7557BE6328C0149718A1D9ECF3355082DE697B6CD" +
"630A9C224D831B7786C7E904F1EF2D9D004E0E825DD74AC4A576CDFCA7CECD14" +
"D8E2E6CCAA3A302871AE0BA979BB25559215D771FAE647905878E797BBA9FC62" +
"50F30F518A8008F5A12B35CE526E31032B56EFE5A4121E1E39DC7339A0CE8023" +
"24CDDB7E9497BA37D8B9F8D826F901C52708935B4CA5B0D4D760A9FB33B0442D" +
"008444D5AEB16E5C32187C7038F29160DD1A2D4DB1F9E9A6C035CF5BCED45287" +
"C5DEBAB18743AAF90E77201FEA67485BA3BBCE90CEA4180C447EE588AC19C855" +
"638B9552D47933D2760351174D9C3493DCCE9708B3EFE4BE398BA64051BF52B7" +
"C1DCA44D2D0ED5A6CFB116DDA41995FA99373C254F3F3EBF0F0049F1159A8A76" +
"4CFE9F9CC56C5489DD0F4E924158C9B1B626030CB492489F6AD0A9DCAF3E141D" +
"B4D4821B2D8A384110B6B0B522F62A9DC0C1315A2A73A7F25F96C530E2F700F9" +
"86829A839B944AE6758B8DD1A1E9257F91C160878A255E299C18424EB9983EDE" +
"6DD1C5F4D5453DD5A56AC87DB1EFA0806E3DBFF10A9623FBAA0BAF352F50AB5D" +
"B16AB1171145860D21E2AB20B45C8865B48390A66057DE3A1ABE45EA65376EF6" +
"A96FE36285C2328C3181E1301306092A864886F70D0109153106040401000000" +
"305D06092A864886F70D01091431501E4E006C0065002D006100340034003100" +
"30003300610064002D0033003500620032002D0034003800340061002D003900" +
"3600610036002D00650030006600610036006600330035006500650065003230" +
"6B06092B0601040182371101315E1E5C004D006900630072006F0073006F0066" +
"007400200045006E00680061006E006300650064002000430072007900700074" +
"006F0067007200610070006800690063002000500072006F0076006900640065" +
"0072002000760031002E003030820FAF06092A864886F70D010706A0820FA030" +
"820F9C02010030820F9506092A864886F70D010701301C060A2A864886F70D01" +
"0C0106300E0408FFCC41FD8C8414F6020207D080820F68092C6010873CF9EC54" +
"D4676BCFB5FA5F523D03C981CB4A3DC096074E7D04365DDD1E80BF366B8F9EC4" +
"BC056E8CE0CAB516B9C28D17B55E1EB744C43829D0E06217852FA99CCF549617" +
"6DEF9A48967C1EEB4A384DB7783E643E35B5B9A50533B76B8D53581F02086B78" +
"2895097860D6CA512514E10D004165C85E561DF5F9AEFD2D89B64F178A7385C7" +
"FA40ECCA899B4B09AE40EE60DAE65B31FF2D1EE204669EFF309A1C7C8D7B0751" +
"AE57276D1D0FB3E8344A801AC5226EA4ED97FCD9399A4EB2E778918B81B17FE4" +
"F65B502595195C79E6B0E37EB8BA36DB12435587E10037D31173285D45304F6B" +
"0056512B3E147D7B5C397709A64E1D74F505D2BD72ED99055161BC57B6200F2F" +
"48CF128229EFBEBFC2707678C0A8C51E3C373271CB4FD8EF34A1345696BF3950" +
"E8CE9831F667D68184F67FE4D30332E24E5C429957694AF23620EA7742F08A38" +
"C9A517A7491083A367B31C60748D697DFA29635548C605F898B64551A48311CB" +
"2A05B1ACA8033128D48E4A5AA263D970FE59FBA49017F29049CF80FFDBD19295" +
"B421FEFF6036B37D2F8DC8A6E36C4F5D707FB05274CC0D8D94AFCC8C6AF546A0" +
"CF49FBD3A67FB6D20B9FE6FDA6321E8ABF5F7CC794CFCC46005DC57A7BAFA899" +
"54E43230402C8100789F11277D9F05C78DF0509ECFBF3A85114FD35F4F17E798" +
"D60C0008064E2557BA7BF0B6F8663A6C014E0220693AE29E2AB4BDE5418B6108" +
"89EC02FF5480BD1B344C87D73E6E4DB98C73F881B22C7D298059FE9D7ADA2192" +
"BB6C87F8D25F323A70D234E382F6C332FEF31BB11C37E41903B9A59ADEA5E0CB" +
"AB06DFB835257ABC179A897DEAD9F19B7DF861BE94C655DC73F628E065F921E5" +
"DE98FFCBDF2A54AC01E677E365DD8B932B5BDA761A0032CE2127AB2A2B9DCB63" +
"F1EA8A51FC360AB5BC0AD435F21F9B6842980D795A6734FDB27A4FA8209F7362" +
"DD632FC5FB1F6DE762473D6EA68BFC4BCF983865E66E6D93159EFACC40AB31AA" +
"178806CF893A76CAAA3279C988824A33AF734FAF8E21020D988640FAB6DB10DF" +
"21D93D01776EEA5DAECF695E0C690ED27AD386E6F2D9C9482EA38946008CCB8F" +
"0BD08F9D5058CF8057CA3AD50BB537116A110F3B3ACD9360322DB4D242CC1A6E" +
"15FA2A95192FC65886BE2672031D04A4FB0B1F43AE8476CF82638B61B416AA97" +
"925A0110B736B4D83D7977456F35D947B3D6C9571D8E2DA0E9DEE1E665A84425" +
"9C17E01E044FAB898AA170F99157F7B525D524B01BD0710D23A7689A6157038A" +
"0697BD48FFE0253ABD6F862093574B2FC9BA38E1A6EC60AF187F10D79FF71F7C" +
"50E87A07CC0A51099899F7336FE742ADEF25E720B8E0F8781EC7957D414CF5D4" +
"4D6998E7E35D2433AFD86442CCA637A1513BE3020B5334614277B3101ED7AD22" +
"AFE50DE99A2AD0E690596C93B881E2962D7E52EE0A770FAF6917106A8FF0298D" +
"F38D6DE926C30834C5D96854FFD053BDB020F7827FB81AD04C8BC2C773B2A59F" +
"DD6DDF7298A052B3486E03FECA5AA909479DDC7FED972192792888F49C40F391" +
"0140C5BE264D3D07BEBF3275117AF51A80C9F66C7028A2C3155414CF93999726" +
"8A1F0AA9059CC3AA7C8BBEF880187E3D1BA8978CBB046E43289A020CAE11B251" +
"40E2247C15A32CF70C7AA186CBB68B258CF2397D2971F1632F6EBC4846444DE4" +
"45673B942F1F110C7D586B6728ECA5B0A62D77696BF25E21ED9196226E5BDA5A" +
"80ECCC785BEEDE917EBC6FFDC2F7124FE8F719B0A937E35E9A720BB9ED72D212" +
"13E68F058D80E9F8D7162625B35CEC4863BD47BC2D8D80E9B9048811BDD8CBB7" +
"0AB215962CD9C40D56AE50B7003630AE26341C6E243B3D12D5933F73F78F15B0" +
"14C5B1C36B6C9F410A77CA997931C8BD5CCB94C332F6723D53A4CCC630BFC9DE" +
"96EFA7FDB66FA519F967D6A2DB1B4898BB188DEB98A41FFA7907AE7601DDE230" +
"E241779A0FDF551FB84D80AAEE3D979F0510CD026D4AE2ED2EFB7468418CCDB3" +
"BD2A29CD7C7DC6419B4637412304D5DA2DC178C0B4669CA8330B9713A812E652" +
"E812135D807E361167F2A6814CEF2A8A9591EFE2C18216A517473B9C3BF2B751" +
"E47844893DA30F7DCD4222D1A55D570C1B6F6A99AD1F9213BA8F84C0B14A6DED" +
"6A26EAFF8F89DF733EEB44117DF0FD357186BA4A15BD5C669F60D6D4C3402832" +
"2D4DDF035302131AB6FD08683804CC90C1791182F1AE3281EE69DDBBCC12B81E" +
"60942FD082286B16BE27DC11E3BB0F18C281E02F3BA66E48C5FD8E8EA3B731BD" +
"B12A4A3F2D9E1F833DD204372003532E1BB11298BDF5092F2959FC439E6BD2DC" +
"6C37E3E775DCBE821B9CBB02E95D84C15E736CEA2FDDAD63F5CD47115B4AD552" +
"27C2A02886CD2700540EBFD5BF18DC5F94C5874972FD5424FE62B30500B1A875" +
"21EA3798D11970220B2BE7EFC915FCB7A6B8962F09ABA005861E839813EDA3E5" +
"9F70D1F9C277B73928DFFC84A1B7B0F78A8B001164EB0824F2510885CA269FDC" +
"BB2C3AE91BDE91A8BBC648299A3EB626E6F4236CCE79E14C803498562BAD6028" +
"F5B619125F80925A2D3B1A56790795D04F417003A8E9E53320B89D3A3109B19B" +
"B17B34CC9700DA138FABB5997EC34D0A44A26553153DBCFF8F6A1B5432B15058" +
"F7AD87C6B37537796C95369DAD53BE5543D86D940892F93983153B4031D4FAB2" +
"5DAB02C1091ACC1DAE2118ABD26D19435CD4F1A02BDE1896236C174743BCA6A3" +
"3FB5429E627EB3FD9F513E81F7BD205B81AAE627C69CF227B043722FA0514139" +
"347D202C9B7B4E55612FC27164F3B5F287F29C443793E22F6ED6D2F353ED82A9" +
"F33EDBA8F5F1B2958F1D6A3943A9614E7411FDBCA597965CD08A8042307081BA" +
"C5A070B467E52D5B91CA58F986C5A33502236B5BAE6DB613B1A408D16B29D356" +
"0F1E94AD840CFA93E83412937A115ABF68322538DA8082F0192D19EAAA41C929" +
"9729D487A9404ECDB6396DDA1534841EAE1E7884FA43574E213AE656116D9EF7" +
"591AA7BDE2B44733DFE27AA59949E5DC0EE00FDF42130A748DDD0FB0053C1A55" +
"986983C8B9CEAC023CAD7EDFFA1C20D3C437C0EF0FC9868D845484D8BE6538EA" +
"ADA6365D48BA776EE239ED045667B101E3798FE53E1D4B9A2ACBBE6AF1E5C88A" +
"3FB03AD616404013E249EC34458F3A7C9363E7772151119FE058BD0939BAB764" +
"A2E545B0B2FDAA650B7E849C8DD4033922B2CAE46D0461C04A2C87657CB4C0FF" +
"BA23DED69D097109EC8BFDC25BB64417FEEB32842DE3EFEF2BF4A47F08B9FCD1" +
"907BC899CA9DA604F5132FB420C8D142D132E7E7B5A4BD0EF4A56D9E9B0ACD88" +
"F0E862D3F8F0440954879FFE3AA7AA90573C6BFDC6D6474C606ACA1CD94C1C34" +
"04349DD83A639B786AFCDEA1779860C05400E0479708F4A9A0DD51429A3F35FB" +
"D5FB9B68CECC1D585F3E35B7BBFC469F3EAEEB8020A6F0C8E4D1804A3EB32EB3" +
"909E80B0A41571B23931E164E0E1D0D05379F9FD3BF51AF04D2BE78BDB84BD78" +
"7D419E85626297CB35FCFB6ED64042EAD2EBC17BB65677A1A33A5C48ADD28023" +
"7FB2451D0EFB3A3C32354222C7AB77A3C92F7A45B5FB10092698D88725864A36" +
"85FBDD0DC741424FCCD8A00B928F3638150892CAAB535CC2813D13026615B999" +
"77F7B8240E914ACA0FF2DCB1A9274BA1F55DF0D24CCD2BAB7741C9EA8B1ECDE9" +
"7477C45F88F034FDF73023502944AEE1FF370260C576992826C4B2E5CE992484" +
"E3B85170FCCAC3413DC0FF6F093593219E637F699A98BD29E8EE4550C128CA18" +
"2680FDA3B10BC07625734EE8A8274B43B170FC3AEC9AA58CD92709D388E166AB" +
"4ADFD5A4876DC47C17DE51FDD42A32AF672515B6A81E7ABECFE748912B321AFD" +
"0CBF4880298DD79403900A4002B5B436230EB6E49192DF49FAE0F6B60EBA75A5" +
"4592587C141AD3B319129006367E9532861C2893E7A2D0D2832DF4377C31845C" +
"B02A1D020282C3D2B7F77221F71FEA7FF0A988FEF15C4B2F6637159EEC5752D8" +
"A7F4AB971117666A977370E754A4EB0DC52D6E8901DC60FCD87B5B6EF9A91AF8" +
"D9A4E11E2FFDAB55FC11AF6EEB5B36557FC8945A1E291B7FF8931BE4A57B8E68" +
"F04B9D4A9A02FC61AE913F2E2DDBEE42C065F4D30F568834D5BB15FDAF691F19" +
"7EF6C25AE87D8E968C6D15351093AAC4813A8E7B191F77E6B19146F839A43E2F" +
"40DE8BE28EB22C0272545BADF3BD396D383B8DA8388147100B347999DDC4125A" +
"B0AA1159BC6776BD2BF51534C1B40522D41466F414BDE333226973BAD1E6D576" +
"39D30AD94BEA1F6A98C047F1CE1294F0067B771778D59E7C722C73C2FF100E13" +
"603206A694BF0ED07303BE0655DC984CA29893FD0A088B122B67AABDC803E73E" +
"5729E868B1CA26F5D05C818D9832C70F5992E7D15E14F9775C6AD24907CF2F21" +
"1CF87167861F94DCF9E3D365CB600B336D93AD44B8B89CA24E59C1F7812C84DB" +
"E3EE57A536ED0D4BF948F7662E5BCBBB388C72243CFCEB720852D5A4A52F018C" +
"2C087E4DB43410FE9ABA3A8EF737B6E8FFDB1AB9832EBF606ED5E4BD62A86BBC" +
"AE115C67682EDEA93E7845D0D6962C146B411F7784545851D2F327BEC7E4344D" +
"68F137CDA217A3F0FF3B752A34C3B5339C79CB8E1AC690C038E85D6FC1337909" +
"0198D3555394D7A2159A23BD5EEF06EB0BCC729BB29B5BE911D02DA78FDA56F0" +
"35E508C722139AD6F25A6C84BED0E98893370164B033A2B52BC40D9BF5163AF9" +
"650AB55EABB23370492A7D3A87E17C11B4D07A7296273F33069C835FD208BA8F" +
"989A3CF8659054E2CCCFB0C983531DC6590F27C4A1D2C3A780FE945F7E52BB9F" +
"FD2E324640E3E348541A620CD62605BBDB284AF97C621A00D5D1D2C31D6BD611" +
"49137B8A0250BC426417A92445A52574E999FB9102C16671914A1542E92DDE54" +
"1B2A0457112AF936DA84707CADFEA43BFEDAE5F58859908640420948086E57FF" +
"D1B867C241D40197CB0D4AD58BB69B3724772E0079406A1272858AAA620668F6" +
"96955102639F3E95CFFC637EAF8AB54F0B5B2131AB292438D06E15F3826352DE" +
"DC653DA5A4AACE2BB97061A498F3B6789A2310471B32F91A6B7A9944DDBB7031" +
"525B3AE387214DC85A1C7749E9168F41272680D0B3C331D61175F23B623EEC40" +
"F984C35C831268036680DE0821E5DEE5BB250C6984775D49B7AF94057371DB72" +
"F81D2B0295FC6A51BCD00A697649D4346FDD59AC0DFAF21BFCC942C23C6134FF" +
"BA2ABABC141FF700B52C5B26496BF3F42665A5B71BAC7F0C19870BD987389023" +
"9C578CDDD8E08A1B0A429312FB24F151A11E4D180359A7FA043E8155453F6726" +
"5CB2812B1C98C144E7675CFC86413B40E35445AE7710227D13DC0B5550C87010" +
"B363C492DA316FB40D3928570BF71BF47638F1401549369B1255DB080E5DFA18" +
"EA666B9ECBE5C9768C06B3FF125D0E94B98BB24B4FD44E770B78D7B336E0214F" +
"D72E77C1D0BE9F313EDCD147957E3463C62E753C10BB98584C85871AAEA9D1F3" +
"97FE9F1A639ADE31D40EAB391B03B588B8B031BCAC6C837C61B06E4B74505247" +
"4D33531086519C39EDD6310F3079EB5AC83289A6EDCBA3DC97E36E837134F730" +
"3B301F300706052B0E03021A04143EE801FCFB9F6CD2B975E2B2BB37DA8E6F29" +
"369B0414DF1D90CD18B3FBC72226B3C66EC2CB1AB351D4D2020207D0").HexToByteArray();
internal static readonly byte[] Pkcs7ChainDerBytes = (
"30820E1606092A864886F70D010702A0820E0730820E030201013100300B0609" +
"2A864886F70D010701A0820DEB3082050B30820474A003020102020A15EAA83A" +
"000100009291300D06092A864886F70D010105050030818131133011060A0992" +
"268993F22C6401191603636F6D31193017060A0992268993F22C64011916096D" +
"6963726F736F667431143012060A0992268993F22C6401191604636F72703117" +
"3015060A0992268993F22C64011916077265646D6F6E643120301E0603550403" +
"13174D532050617373706F7274205465737420537562204341301E170D313330" +
"3131303231333931325A170D3331313231333232323630375A308185310B3009" +
"060355040613025553310B30090603550408130257413110300E060355040713" +
"075265646D6F6E64310D300B060355040A130454455354310D300B060355040B" +
"130454455354311330110603550403130A746573742E6C6F63616C3124302206" +
"092A864886F70D010901161563726973706F70406D6963726F736F66742E636F" +
"6D30819F300D06092A864886F70D010101050003818D0030818902818100B406" +
"851089E9CF7CDB438DD77BEBD819197BEEFF579C35EF9C4652DF9E6330AA7E2E" +
"24B181C59DA4AF10E97220C1DF99F66CE6E97247E9126A016AC647BD2EFD136C" +
"31470C7BE01A20E381243BEEC8530B7F6466C50A051DCE37274ED7FF2AFFF4E5" +
"8AABA61D5A448F4A8A9B3765D1D769F627ED2F2DE9EE67B1A7ECA3D288C90203" +
"010001A38202823082027E300E0603551D0F0101FF0404030204F0301D060355" +
"1D250416301406082B0601050507030106082B06010505070302301D0603551D" +
"0E04160414FB3485708CBF6188F720EF948489405C8D0413A7301F0603551D23" +
"0418301680146A6678620A4FF49CA8B75FD566348F3371E42B133081D0060355" +
"1D1F0481C83081C53081C2A081BFA081BC865F687474703A2F2F707074657374" +
"73756263612E7265646D6F6E642E636F72702E6D6963726F736F66742E636F6D" +
"2F43657274456E726F6C6C2F4D5325323050617373706F727425323054657374" +
"25323053756225323043412831292E63726C865966696C653A2F2F5C5C707074" +
"65737473756263612E7265646D6F6E642E636F72702E6D6963726F736F66742E" +
"636F6D5C43657274456E726F6C6C5C4D532050617373706F7274205465737420" +
"5375622043412831292E63726C3082013806082B060105050701010482012A30" +
"82012630819306082B06010505073002868186687474703A2F2F707074657374" +
"73756263612E7265646D6F6E642E636F72702E6D6963726F736F66742E636F6D" +
"2F43657274456E726F6C6C2F70707465737473756263612E7265646D6F6E642E" +
"636F72702E6D6963726F736F66742E636F6D5F4D5325323050617373706F7274" +
"2532305465737425323053756225323043412831292E63727430818D06082B06" +
"01050507300286818066696C653A2F2F5C5C70707465737473756263612E7265" +
"646D6F6E642E636F72702E6D6963726F736F66742E636F6D5C43657274456E72" +
"6F6C6C5C70707465737473756263612E7265646D6F6E642E636F72702E6D6963" +
"726F736F66742E636F6D5F4D532050617373706F727420546573742053756220" +
"43412831292E637274300D06092A864886F70D0101050500038181009DEBB8B5" +
"A41ED54859795F68EF767A98A61EF7B07AAC190FCC0275228E4CAD360C9BA98B" +
"0AE153C75522EEF42D400E813B4E49E7ACEB963EEE7B61D3C8DA05C183471544" +
"725B2EBD1889877F62134827FB5993B8FDF618BD421ABA18D70D1C5B41ECDD11" +
"695A48CB42EB501F96DA905471830C612B609126559120F6E18EA44830820358" +
"308202C1A00302010202101B9671A4BC128B8341B0E314EAD9A191300D06092A" +
"864886F70D01010505003081A13124302206092A864886F70D01090116156173" +
"6D656D6F6E406D6963726F736F66742E636F6D310B3009060355040613025553" +
"310B30090603550408130257413110300E060355040713075265646D6F6E6431" +
"123010060355040A13094D6963726F736F667431163014060355040B130D5061" +
"7373706F727420546573743121301F060355040313184D532050617373706F72" +
"74205465737420526F6F74204341301E170D3035303132363031333933325A17" +
"0D3331313231333232323630375A3081A13124302206092A864886F70D010901" +
"161561736D656D6F6E406D6963726F736F66742E636F6D310B30090603550406" +
"13025553310B30090603550408130257413110300E060355040713075265646D" +
"6F6E6431123010060355040A13094D6963726F736F667431163014060355040B" +
"130D50617373706F727420546573743121301F060355040313184D5320506173" +
"73706F7274205465737420526F6F7420434130819F300D06092A864886F70D01" +
"0101050003818D0030818902818100C4673C1226254F6BBD01B01D21BB05264A" +
"9AA5B77AC51748EAC52048706DA6B890DCE043C6426FC44E76D70F9FE3A4AC85" +
"5F533E3D08E140853DB769EE24DBDB7269FABEC0FDFF6ADE0AA85F0085B78864" +
"58E7585E433B0924E81600433CB1177CE6AD5F2477B2A0E2D1A34B41F6C6F5AD" +
"E4A9DD7D565C65F02C2AAA01C8E0C10203010001A3818E30818B301306092B06" +
"0104018237140204061E0400430041300B0603551D0F040403020186300F0603" +
"551D130101FF040530030101FF301D0603551D0E04160414F509C1D6267FC39F" +
"CA1DE648C969C74FB111FE10301206092B060104018237150104050203010002" +
"302306092B0601040182371502041604147F7A5208411D4607C0057C98F0C473" +
"07010CB3DE300D06092A864886F70D0101050500038181004A8EAC73D8EA6D7E" +
"893D5880945E0E3ABFC79C40BFA60A680CF8A8BF63EDC3AD9C11C081F1F44408" +
"9581F5C8DCB23C0AEFA27571D971DBEB2AA9A1B3F7B9B0877E9311D36098A65B" +
"7D03FC69A835F6C3096DEE135A864065F9779C82DEB0C777B9C4DB49F0DD11A0" +
"EAB287B6E352F7ECA467D0D3CA2A8081119388BAFCDD25573082057C308204E5" +
"A003020102020A6187C7F200020000001B300D06092A864886F70D0101050500" +
"3081A13124302206092A864886F70D010901161561736D656D6F6E406D696372" +
"6F736F66742E636F6D310B3009060355040613025553310B3009060355040813" +
"0257413110300E060355040713075265646D6F6E6431123010060355040A1309" +
"4D6963726F736F667431163014060355040B130D50617373706F727420546573" +
"743121301F060355040313184D532050617373706F7274205465737420526F6F" +
"74204341301E170D3039313032373231333133395A170D333131323133323232" +
"3630375A30818131133011060A0992268993F22C6401191603636F6D31193017" +
"060A0992268993F22C64011916096D6963726F736F667431143012060A099226" +
"8993F22C6401191604636F727031173015060A0992268993F22C640119160772" +
"65646D6F6E643120301E060355040313174D532050617373706F727420546573" +
"742053756220434130819F300D06092A864886F70D010101050003818D003081" +
"8902818100A6A4918F93C5D23B3C3A325AD8EC77043D207A0DDC294AD3F5BDE0" +
"4033FADD4097BB1DB042B1D3B2F26A42CC3CB88FA9357710147AB4E1020A0DFB" +
"2597AB8031DB62ABDC48398067EB79E4E2BBE5762F6B4C5EA7629BAC23F70269" +
"06D46EC106CC6FBB4D143F7D5ADADEDE19B021EEF4A6BCB9D01DAEBB9A947703" +
"40B748A3490203010001A38202D7308202D3300F0603551D130101FF04053003" +
"0101FF301D0603551D0E041604146A6678620A4FF49CA8B75FD566348F3371E4" +
"2B13300B0603551D0F040403020186301206092B060104018237150104050203" +
"010001302306092B060104018237150204160414A0A485AE8296EA4944C6F6F3" +
"886A8603FD07472C301906092B0601040182371402040C1E0A00530075006200" +
"430041301F0603551D23041830168014F509C1D6267FC39FCA1DE648C969C74F" +
"B111FE103081D60603551D1F0481CE3081CB3081C8A081C5A081C28663687474" +
"703A2F2F70617373706F72747465737463612E7265646D6F6E642E636F72702E" +
"6D6963726F736F66742E636F6D2F43657274456E726F6C6C2F4D532532305061" +
"7373706F727425323054657374253230526F6F7425323043412831292E63726C" +
"865B66696C653A2F2F50415353504F52545445535443412E7265646D6F6E642E" +
"636F72702E6D6963726F736F66742E636F6D2F43657274456E726F6C6C2F4D53" +
"2050617373706F7274205465737420526F6F742043412831292E63726C308201" +
"4406082B06010505070101048201363082013230819A06082B06010505073002" +
"86818D687474703A2F2F70617373706F72747465737463612E7265646D6F6E64" +
"2E636F72702E6D6963726F736F66742E636F6D2F43657274456E726F6C6C2F50" +
"415353504F52545445535443412E7265646D6F6E642E636F72702E6D6963726F" +
"736F66742E636F6D5F4D5325323050617373706F727425323054657374253230" +
"526F6F7425323043412832292E63727430819206082B06010505073002868185" +
"66696C653A2F2F50415353504F52545445535443412E7265646D6F6E642E636F" +
"72702E6D6963726F736F66742E636F6D2F43657274456E726F6C6C2F50415353" +
"504F52545445535443412E7265646D6F6E642E636F72702E6D6963726F736F66" +
"742E636F6D5F4D532050617373706F7274205465737420526F6F742043412832" +
"292E637274300D06092A864886F70D010105050003818100C44788F8C4F5C2DC" +
"84976F66417CBAE19FBFA82C257DA4C7FED6267BC711D113C78B1C097154A62A" +
"B462ADC84A434AEBAE38DEB9605FAB534A3CAF7B72C199448E58640388911296" +
"115ED6B3478D0E741D990F2D59D66F12E58669D8983489AB0406E37462164B56" +
"6AA1D9B273C406FA694A2556D1D3ACE723382C19871B8C143100").HexToByteArray();
internal static readonly byte[] Pkcs7ChainPemBytes = ByteUtils.AsciiBytes(
@"-----BEGIN PKCS7-----
MIIOFgYJKoZIhvcNAQcCoIIOBzCCDgMCAQExADALBgkqhkiG9w0BBwGggg3rMIIF
CzCCBHSgAwIBAgIKFeqoOgABAACSkTANBgkqhkiG9w0BAQUFADCBgTETMBEGCgmS
JomT8ixkARkWA2NvbTEZMBcGCgmSJomT8ixkARkWCW1pY3Jvc29mdDEUMBIGCgmS
JomT8ixkARkWBGNvcnAxFzAVBgoJkiaJk/IsZAEZFgdyZWRtb25kMSAwHgYDVQQD
ExdNUyBQYXNzcG9ydCBUZXN0IFN1YiBDQTAeFw0xMzAxMTAyMTM5MTJaFw0zMTEy
MTMyMjI2MDdaMIGFMQswCQYDVQQGEwJVUzELMAkGA1UECBMCV0ExEDAOBgNVBAcT
B1JlZG1vbmQxDTALBgNVBAoTBFRFU1QxDTALBgNVBAsTBFRFU1QxEzARBgNVBAMT
CnRlc3QubG9jYWwxJDAiBgkqhkiG9w0BCQEWFWNyaXNwb3BAbWljcm9zb2Z0LmNv
bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAtAaFEInpz3zbQ43Xe+vYGRl7
7v9XnDXvnEZS355jMKp+LiSxgcWdpK8Q6XIgwd+Z9mzm6XJH6RJqAWrGR70u/RNs
MUcMe+AaIOOBJDvuyFMLf2RmxQoFHc43J07X/yr/9OWKq6YdWkSPSoqbN2XR12n2
J+0vLenuZ7Gn7KPSiMkCAwEAAaOCAoIwggJ+MA4GA1UdDwEB/wQEAwIE8DAdBgNV
HSUEFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIwHQYDVR0OBBYEFPs0hXCMv2GI9yDv
lISJQFyNBBOnMB8GA1UdIwQYMBaAFGpmeGIKT/ScqLdf1WY0jzNx5CsTMIHQBgNV
HR8EgcgwgcUwgcKggb+ggbyGX2h0dHA6Ly9wcHRlc3RzdWJjYS5yZWRtb25kLmNv
cnAubWljcm9zb2Z0LmNvbS9DZXJ0RW5yb2xsL01TJTIwUGFzc3BvcnQlMjBUZXN0
JTIwU3ViJTIwQ0EoMSkuY3JshllmaWxlOi8vXFxwcHRlc3RzdWJjYS5yZWRtb25k
LmNvcnAubWljcm9zb2Z0LmNvbVxDZXJ0RW5yb2xsXE1TIFBhc3Nwb3J0IFRlc3Qg
U3ViIENBKDEpLmNybDCCATgGCCsGAQUFBwEBBIIBKjCCASYwgZMGCCsGAQUFBzAC
hoGGaHR0cDovL3BwdGVzdHN1YmNhLnJlZG1vbmQuY29ycC5taWNyb3NvZnQuY29t
L0NlcnRFbnJvbGwvcHB0ZXN0c3ViY2EucmVkbW9uZC5jb3JwLm1pY3Jvc29mdC5j
b21fTVMlMjBQYXNzcG9ydCUyMFRlc3QlMjBTdWIlMjBDQSgxKS5jcnQwgY0GCCsG
AQUFBzAChoGAZmlsZTovL1xccHB0ZXN0c3ViY2EucmVkbW9uZC5jb3JwLm1pY3Jv
c29mdC5jb21cQ2VydEVucm9sbFxwcHRlc3RzdWJjYS5yZWRtb25kLmNvcnAubWlj
cm9zb2Z0LmNvbV9NUyBQYXNzcG9ydCBUZXN0IFN1YiBDQSgxKS5jcnQwDQYJKoZI
hvcNAQEFBQADgYEAneu4taQe1UhZeV9o73Z6mKYe97B6rBkPzAJ1Io5MrTYMm6mL
CuFTx1Ui7vQtQA6BO05J56zrlj7ue2HTyNoFwYNHFURyWy69GImHf2ITSCf7WZO4
/fYYvUIauhjXDRxbQezdEWlaSMtC61AfltqQVHGDDGErYJEmVZEg9uGOpEgwggNY
MIICwaADAgECAhAblnGkvBKLg0Gw4xTq2aGRMA0GCSqGSIb3DQEBBQUAMIGhMSQw
IgYJKoZIhvcNAQkBFhVhc21lbW9uQG1pY3Jvc29mdC5jb20xCzAJBgNVBAYTAlVT
MQswCQYDVQQIEwJXQTEQMA4GA1UEBxMHUmVkbW9uZDESMBAGA1UEChMJTWljcm9z
b2Z0MRYwFAYDVQQLEw1QYXNzcG9ydCBUZXN0MSEwHwYDVQQDExhNUyBQYXNzcG9y
dCBUZXN0IFJvb3QgQ0EwHhcNMDUwMTI2MDEzOTMyWhcNMzExMjEzMjIyNjA3WjCB
oTEkMCIGCSqGSIb3DQEJARYVYXNtZW1vbkBtaWNyb3NvZnQuY29tMQswCQYDVQQG
EwJVUzELMAkGA1UECBMCV0ExEDAOBgNVBAcTB1JlZG1vbmQxEjAQBgNVBAoTCU1p
Y3Jvc29mdDEWMBQGA1UECxMNUGFzc3BvcnQgVGVzdDEhMB8GA1UEAxMYTVMgUGFz
c3BvcnQgVGVzdCBSb290IENBMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDE
ZzwSJiVPa70BsB0huwUmSpqlt3rFF0jqxSBIcG2muJDc4EPGQm/ETnbXD5/jpKyF
X1M+PQjhQIU9t2nuJNvbcmn6vsD9/2reCqhfAIW3iGRY51heQzsJJOgWAEM8sRd8
5q1fJHeyoOLRo0tB9sb1reSp3X1WXGXwLCqqAcjgwQIDAQABo4GOMIGLMBMGCSsG
AQQBgjcUAgQGHgQAQwBBMAsGA1UdDwQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0G
A1UdDgQWBBT1CcHWJn/Dn8od5kjJacdPsRH+EDASBgkrBgEEAYI3FQEEBQIDAQAC
MCMGCSsGAQQBgjcVAgQWBBR/elIIQR1GB8AFfJjwxHMHAQyz3jANBgkqhkiG9w0B
AQUFAAOBgQBKjqxz2Optfok9WICUXg46v8ecQL+mCmgM+Ki/Y+3DrZwRwIHx9EQI
lYH1yNyyPArvonVx2XHb6yqpobP3ubCHfpMR02CYplt9A/xpqDX2wwlt7hNahkBl
+Xecgt6wx3e5xNtJ8N0RoOqyh7bjUvfspGfQ08oqgIERk4i6/N0lVzCCBXwwggTl
oAMCAQICCmGHx/IAAgAAABswDQYJKoZIhvcNAQEFBQAwgaExJDAiBgkqhkiG9w0B
CQEWFWFzbWVtb25AbWljcm9zb2Z0LmNvbTELMAkGA1UEBhMCVVMxCzAJBgNVBAgT
AldBMRAwDgYDVQQHEwdSZWRtb25kMRIwEAYDVQQKEwlNaWNyb3NvZnQxFjAUBgNV
BAsTDVBhc3Nwb3J0IFRlc3QxITAfBgNVBAMTGE1TIFBhc3Nwb3J0IFRlc3QgUm9v
dCBDQTAeFw0wOTEwMjcyMTMxMzlaFw0zMTEyMTMyMjI2MDdaMIGBMRMwEQYKCZIm
iZPyLGQBGRYDY29tMRkwFwYKCZImiZPyLGQBGRYJbWljcm9zb2Z0MRQwEgYKCZIm
iZPyLGQBGRYEY29ycDEXMBUGCgmSJomT8ixkARkWB3JlZG1vbmQxIDAeBgNVBAMT
F01TIFBhc3Nwb3J0IFRlc3QgU3ViIENBMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCB
iQKBgQCmpJGPk8XSOzw6MlrY7HcEPSB6DdwpStP1veBAM/rdQJe7HbBCsdOy8mpC
zDy4j6k1dxAUerThAgoN+yWXq4Ax22Kr3Eg5gGfreeTiu+V2L2tMXqdim6wj9wJp
BtRuwQbMb7tNFD99Wtre3hmwIe70pry50B2uu5qUdwNAt0ijSQIDAQABo4IC1zCC
AtMwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUamZ4YgpP9Jyot1/VZjSPM3Hk
KxMwCwYDVR0PBAQDAgGGMBIGCSsGAQQBgjcVAQQFAgMBAAEwIwYJKwYBBAGCNxUC
BBYEFKCkha6ClupJRMb284hqhgP9B0csMBkGCSsGAQQBgjcUAgQMHgoAUwB1AGIA
QwBBMB8GA1UdIwQYMBaAFPUJwdYmf8Ofyh3mSMlpx0+xEf4QMIHWBgNVHR8Egc4w
gcswgciggcWggcKGY2h0dHA6Ly9wYXNzcG9ydHRlc3RjYS5yZWRtb25kLmNvcnAu
bWljcm9zb2Z0LmNvbS9DZXJ0RW5yb2xsL01TJTIwUGFzc3BvcnQlMjBUZXN0JTIw
Um9vdCUyMENBKDEpLmNybIZbZmlsZTovL1BBU1NQT1JUVEVTVENBLnJlZG1vbmQu
Y29ycC5taWNyb3NvZnQuY29tL0NlcnRFbnJvbGwvTVMgUGFzc3BvcnQgVGVzdCBS
b290IENBKDEpLmNybDCCAUQGCCsGAQUFBwEBBIIBNjCCATIwgZoGCCsGAQUFBzAC
hoGNaHR0cDovL3Bhc3Nwb3J0dGVzdGNhLnJlZG1vbmQuY29ycC5taWNyb3NvZnQu
Y29tL0NlcnRFbnJvbGwvUEFTU1BPUlRURVNUQ0EucmVkbW9uZC5jb3JwLm1pY3Jv
c29mdC5jb21fTVMlMjBQYXNzcG9ydCUyMFRlc3QlMjBSb290JTIwQ0EoMikuY3J0
MIGSBggrBgEFBQcwAoaBhWZpbGU6Ly9QQVNTUE9SVFRFU1RDQS5yZWRtb25kLmNv
cnAubWljcm9zb2Z0LmNvbS9DZXJ0RW5yb2xsL1BBU1NQT1JUVEVTVENBLnJlZG1v
bmQuY29ycC5taWNyb3NvZnQuY29tX01TIFBhc3Nwb3J0IFRlc3QgUm9vdCBDQSgy
KS5jcnQwDQYJKoZIhvcNAQEFBQADgYEAxEeI+MT1wtyEl29mQXy64Z+/qCwlfaTH
/tYme8cR0RPHixwJcVSmKrRirchKQ0rrrjjeuWBfq1NKPK97csGZRI5YZAOIkRKW
EV7Ws0eNDnQdmQ8tWdZvEuWGadiYNImrBAbjdGIWS1Zqodmyc8QG+mlKJVbR06zn
IzgsGYcbjBQxAA==
-----END PKCS7-----");
internal static readonly byte[] Pkcs7EmptyPemBytes = ByteUtils.AsciiBytes(
@"-----BEGIN PKCS7-----
MCcGCSqGSIb3DQEHAqAaMBgCAQExADALBgkqhkiG9w0BBwGgAKEAMQA=
-----END PKCS7-----");
internal static readonly byte[] Pkcs7EmptyDerBytes = (
"302706092A864886F70D010702A01A30180201013100300B06092A864886F70D" +
"010701A000A1003100").HexToByteArray();
internal static readonly byte[] Pkcs7SingleDerBytes = (
"3082021406092A864886F70D010702A0820205308202010201013100300B0609" +
"2A864886F70D010701A08201E9308201E530820152A0030201020210D5B5BC1C" +
"458A558845BFF51CB4DFF31C300906052B0E03021D05003011310F300D060355" +
"040313064D794E616D65301E170D3130303430313038303030305A170D313130" +
"3430313038303030305A3011310F300D060355040313064D794E616D6530819F" +
"300D06092A864886F70D010101050003818D0030818902818100B11E30EA8742" +
"4A371E30227E933CE6BE0E65FF1C189D0D888EC8FF13AA7B42B68056128322B2" +
"1F2B6976609B62B6BC4CF2E55FF5AE64E9B68C78A3C2DACC916A1BC7322DD353" +
"B32898675CFB5B298B176D978B1F12313E3D865BC53465A11CCA106870A4B5D5" +
"0A2C410938240E92B64902BAEA23EB093D9599E9E372E48336730203010001A3" +
"46304430420603551D01043B3039801024859EBF125E76AF3F0D7979B4AC7A96" +
"A1133011310F300D060355040313064D794E616D658210D5B5BC1C458A558845" +
"BFF51CB4DFF31C300906052B0E03021D0500038181009BF6E2CF830ED485B86D" +
"6B9E8DFFDCD65EFC7EC145CB9348923710666791FCFA3AB59D689FFD7234B787" +
"2611C5C23E5E0714531ABADB5DE492D2C736E1C929E648A65CC9EB63CD84E57B" +
"5909DD5DDF5DBBBA4A6498B9CA225B6E368B94913BFC24DE6B2BD9A26B192B95" +
"7304B89531E902FFC91B54B237BB228BE8AFCDA264763100").HexToByteArray();
internal static readonly byte[] Pkcs7SinglePemBytes = ByteUtils.AsciiBytes(
@"-----BEGIN PKCS7-----
MIICFAYJKoZIhvcNAQcCoIICBTCCAgECAQExADALBgkqhkiG9w0BBwGgggHpMIIB
5TCCAVKgAwIBAgIQ1bW8HEWKVYhFv/UctN/zHDAJBgUrDgMCHQUAMBExDzANBgNV
BAMTBk15TmFtZTAeFw0xMDA0MDEwODAwMDBaFw0xMTA0MDEwODAwMDBaMBExDzAN
BgNVBAMTBk15TmFtZTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAsR4w6odC
SjceMCJ+kzzmvg5l/xwYnQ2Ijsj/E6p7QraAVhKDIrIfK2l2YJtitrxM8uVf9a5k
6baMeKPC2syRahvHMi3TU7MomGdc+1spixdtl4sfEjE+PYZbxTRloRzKEGhwpLXV
CixBCTgkDpK2SQK66iPrCT2VmenjcuSDNnMCAwEAAaNGMEQwQgYDVR0BBDswOYAQ
JIWevxJedq8/DXl5tKx6lqETMBExDzANBgNVBAMTBk15TmFtZYIQ1bW8HEWKVYhF
v/UctN/zHDAJBgUrDgMCHQUAA4GBAJv24s+DDtSFuG1rno3/3NZe/H7BRcuTSJI3
EGZnkfz6OrWdaJ/9cjS3hyYRxcI+XgcUUxq6213kktLHNuHJKeZIplzJ62PNhOV7
WQndXd9du7pKZJi5yiJbbjaLlJE7/CTeayvZomsZK5VzBLiVMekC/8kbVLI3uyKL
6K/NomR2MQA=
-----END PKCS7-----");
internal static readonly byte[] MicrosoftDotComSslCertBytes = (
"308205943082047CA00302010202103DF70C5D9903F8D8868B9B8CCF20DF6930" +
"0D06092A864886F70D01010B05003077310B3009060355040613025553311D30" +
"1B060355040A131453796D616E74656320436F72706F726174696F6E311F301D" +
"060355040B131653796D616E746563205472757374204E6574776F726B312830" +
"260603550403131F53796D616E74656320436C61737320332045562053534C20" +
"4341202D204733301E170D3134313031353030303030305A170D313631303135" +
"3233353935395A3082010F31133011060B2B0601040182373C02010313025553" +
"311B3019060B2B0601040182373C0201020C0A57617368696E67746F6E311D30" +
"1B060355040F131450726976617465204F7267616E697A6174696F6E31123010" +
"06035504051309363030343133343835310B3009060355040613025553310E30" +
"0C06035504110C0539383035323113301106035504080C0A57617368696E6774" +
"6F6E3110300E06035504070C075265646D6F6E643118301606035504090C0F31" +
"204D6963726F736F667420576179311E301C060355040A0C154D6963726F736F" +
"667420436F72706F726174696F6E310E300C060355040B0C054D53434F4D311A" +
"301806035504030C117777772E6D6963726F736F66742E636F6D30820122300D" +
"06092A864886F70D01010105000382010F003082010A0282010100A46861FA9D" +
"5DB763633BF5A64EF6E7C2C2367F48D2D46643A22DFCFCCB24E58A14D0F06BDC" +
"956437F2A56BA4BEF70BA361BF12964A0D665AFD84B0F7494C8FA4ABC5FCA2E0" +
"17C06178AEF2CDAD1B5F18E997A14B965C074E8F564970607276B00583932240" +
"FE6E2DD013026F9AE13D7C91CC07C4E1E8E87737DC06EF2B575B89D62EFE4685" +
"9F8255A123692A706C68122D4DAFE11CB205A7B3DE06E553F7B95F978EF8601A" +
"8DF819BF32040BDF92A0DE0DF269B4514282E17AC69934E8440A48AB9D1F5DF8" +
"9A502CEF6DFDBE790045BD45E0C94E5CA8ADD76A013E9C978440FC8A9E2A9A49" +
"40B2460819C3E302AA9C9F355AD754C86D3ED77DDAA3DA13810B4D0203010001" +
"A38201803082017C30310603551D11042A302882117777772E6D6963726F736F" +
"66742E636F6D821377777771612E6D6963726F736F66742E636F6D3009060355" +
"1D1304023000300E0603551D0F0101FF0404030205A0301D0603551D25041630" +
"1406082B0601050507030106082B0601050507030230660603551D20045F305D" +
"305B060B6086480186F84501071706304C302306082B06010505070201161768" +
"747470733A2F2F642E73796D63622E636F6D2F637073302506082B0601050507" +
"020230191A1768747470733A2F2F642E73796D63622E636F6D2F727061301F06" +
"03551D230418301680140159ABE7DD3A0B59A66463D6CF200757D591E76A302B" +
"0603551D1F042430223020A01EA01C861A687474703A2F2F73722E73796D6362" +
"2E636F6D2F73722E63726C305706082B06010505070101044B3049301F06082B" +
"060105050730018613687474703A2F2F73722E73796D63642E636F6D30260608" +
"2B06010505073002861A687474703A2F2F73722E73796D63622E636F6D2F7372" +
"2E637274300D06092A864886F70D01010B0500038201010015F8505B627ED7F9" +
"F96707097E93A51E7A7E05A3D420A5C258EC7A1CFE1843EC20ACF728AAFA7A1A" +
"1BC222A7CDBF4AF90AA26DEEB3909C0B3FB5C78070DAE3D645BFCF840A4A3FDD" +
"988C7B3308BFE4EB3FD66C45641E96CA3352DBE2AEB4488A64A9C5FB96932BA7" +
"0059CE92BD278B41299FD213471BD8165F924285AE3ECD666C703885DCA65D24" +
"DA66D3AFAE39968521995A4C398C7DF38DFA82A20372F13D4A56ADB21B582254" +
"9918015647B5F8AC131CC5EB24534D172BC60218A88B65BCF71C7F388CE3E0EF" +
"697B4203720483BB5794455B597D80D48CD3A1D73CBBC609C058767D1FF060A6" +
"09D7E3D4317079AF0CD0A8A49251AB129157F9894A036487").HexToByteArray();
internal static readonly byte[] MicrosoftDotComIssuerBytes = (
"3082052B30820413A00302010202107EE14A6F6FEFF2D37F3FAD654D3ADAB430" +
"0D06092A864886F70D01010B05003081CA310B30090603550406130255533117" +
"3015060355040A130E566572695369676E2C20496E632E311F301D060355040B" +
"1316566572695369676E205472757374204E6574776F726B313A303806035504" +
"0B1331286329203230303620566572695369676E2C20496E632E202D20466F72" +
"20617574686F72697A656420757365206F6E6C79314530430603550403133C56" +
"6572695369676E20436C6173732033205075626C6963205072696D6172792043" +
"657274696669636174696F6E20417574686F72697479202D204735301E170D31" +
"33313033313030303030305A170D3233313033303233353935395A3077310B30" +
"09060355040613025553311D301B060355040A131453796D616E74656320436F" +
"72706F726174696F6E311F301D060355040B131653796D616E74656320547275" +
"7374204E6574776F726B312830260603550403131F53796D616E74656320436C" +
"61737320332045562053534C204341202D20473330820122300D06092A864886" +
"F70D01010105000382010F003082010A0282010100D8A1657423E82B64E232D7" +
"33373D8EF5341648DD4F7F871CF84423138EFB11D8445A18718E601626929BFD" +
"170BE1717042FEBFFA1CC0AAA3A7B571E8FF1883F6DF100A1362C83D9CA7DE2E" +
"3F0CD91DE72EFB2ACEC89A7F87BFD84C041532C9D1CC9571A04E284F84D935FB" +
"E3866F9453E6728A63672EBE69F6F76E8E9C6004EB29FAC44742D27898E3EC0B" +
"A592DCB79ABD80642B387C38095B66F62D957A86B2342E859E900E5FB75DA451" +
"72467013BF67F2B6A74D141E6CB953EE231A4E8D48554341B189756A4028C57D" +
"DDD26ED202192F7B24944BEBF11AA99BE3239AEAFA33AB0A2CB7F46008DD9F1C" +
"CDDD2D016680AFB32F291D23B88AE1A170070C340F0203010001A382015D3082" +
"0159302F06082B0601050507010104233021301F06082B060105050730018613" +
"687474703A2F2F73322E73796D63622E636F6D30120603551D130101FF040830" +
"060101FF02010030650603551D20045E305C305A0604551D2000305230260608" +
"2B06010505070201161A687474703A2F2F7777772E73796D617574682E636F6D" +
"2F637073302806082B06010505070202301C1A1A687474703A2F2F7777772E73" +
"796D617574682E636F6D2F72706130300603551D1F042930273025A023A02186" +
"1F687474703A2F2F73312E73796D63622E636F6D2F706361332D67352E63726C" +
"300E0603551D0F0101FF04040302010630290603551D1104223020A41E301C31" +
"1A30180603550403131153796D616E746563504B492D312D353333301D060355" +
"1D0E041604140159ABE7DD3A0B59A66463D6CF200757D591E76A301F0603551D" +
"230418301680147FD365A7C2DDECBBF03009F34339FA02AF333133300D06092A" +
"864886F70D01010B050003820101004201557BD0161A5D58E8BB9BA84DD7F3D7" +
"EB139486D67F210B47BC579B925D4F059F38A4107CCF83BE0643468D08BC6AD7" +
"10A6FAABAF2F61A863F265DF7F4C8812884FB369D9FF27C00A97918F56FB89C4" +
"A8BB922D1B73B0C6AB36F4966C2008EF0A1E6624454F670040C8075474333BA6" +
"ADBB239F66EDA2447034FB0EEA01FDCF7874DFA7AD55B75F4DF6D63FE086CE24" +
"C742A9131444354BB6DFC960AC0C7FD993214BEE9CE4490298D3607B5CBCD530" +
"2F07CE4442C40B99FEE69FFCB07886516DD12C9DC696FB8582BB042FF76280EF" +
"62DA7FF60EAC90B856BD793FF2806EA3D9B90F5D3A071D9193864B294CE1DCB5" +
"E1E0339DB3CB36914BFEA1B4EEF0F9").HexToByteArray();
internal static readonly byte[] MicrosoftDotComRootBytes = (
"308204D3308203BBA003020102021018DAD19E267DE8BB4A2158CDCC6B3B4A30" +
"0D06092A864886F70D01010505003081CA310B30090603550406130255533117" +
"3015060355040A130E566572695369676E2C20496E632E311F301D060355040B" +
"1316566572695369676E205472757374204E6574776F726B313A303806035504" +
"0B1331286329203230303620566572695369676E2C20496E632E202D20466F72" +
"20617574686F72697A656420757365206F6E6C79314530430603550403133C56" +
"6572695369676E20436C6173732033205075626C6963205072696D6172792043" +
"657274696669636174696F6E20417574686F72697479202D204735301E170D30" +
"36313130383030303030305A170D3336303731363233353935395A3081CA310B" +
"300906035504061302555331173015060355040A130E566572695369676E2C20" +
"496E632E311F301D060355040B1316566572695369676E205472757374204E65" +
"74776F726B313A3038060355040B133128632920323030362056657269536967" +
"6E2C20496E632E202D20466F7220617574686F72697A656420757365206F6E6C" +
"79314530430603550403133C566572695369676E20436C617373203320507562" +
"6C6963205072696D6172792043657274696669636174696F6E20417574686F72" +
"697479202D20473530820122300D06092A864886F70D01010105000382010F00" +
"3082010A0282010100AF240808297A359E600CAAE74B3B4EDC7CBC3C451CBB2B" +
"E0FE2902F95708A364851527F5F1ADC831895D22E82AAAA642B38FF8B955B7B1" +
"B74BB3FE8F7E0757ECEF43DB66621561CF600DA4D8DEF8E0C362083D5413EB49" +
"CA59548526E52B8F1B9FEBF5A191C23349D843636A524BD28FE870514DD18969" +
"7BC770F6B3DC1274DB7B5D4B56D396BF1577A1B0F4A225F2AF1C926718E5F406" +
"04EF90B9E400E4DD3AB519FF02BAF43CEEE08BEB378BECF4D7ACF2F6F03DAFDD" +
"759133191D1C40CB7424192193D914FEAC2A52C78FD50449E48D6347883C6983" +
"CBFE47BD2B7E4FC595AE0E9DD4D143C06773E314087EE53F9F73B8330ACF5D3F" +
"3487968AEE53E825150203010001A381B23081AF300F0603551D130101FF0405" +
"30030101FF300E0603551D0F0101FF040403020106306D06082B060105050701" +
"0C0461305FA15DA05B3059305730551609696D6167652F6769663021301F3007" +
"06052B0E03021A04148FE5D31A86AC8D8E6BC3CF806AD448182C7B192E302516" +
"23687474703A2F2F6C6F676F2E766572697369676E2E636F6D2F76736C6F676F" +
"2E676966301D0603551D0E041604147FD365A7C2DDECBBF03009F34339FA02AF" +
"333133300D06092A864886F70D0101050500038201010093244A305F62CFD81A" +
"982F3DEADC992DBD77F6A5792238ECC4A7A07812AD620E457064C5E797662D98" +
"097E5FAFD6CC2865F201AA081A47DEF9F97C925A0869200DD93E6D6E3C0D6ED8" +
"E606914018B9F8C1EDDFDB41AAE09620C9CD64153881C994EEA284290B136F8E" +
"DB0CDD2502DBA48B1944D2417A05694A584F60CA7E826A0B02AA251739B5DB7F" +
"E784652A958ABD86DE5E8116832D10CCDEFDA8822A6D281F0D0BC4E5E71A2619" +
"E1F4116F10B595FCE7420532DBCE9D515E28B69E85D35BEFA57D4540728EB70E" +
"6B0E06FB33354871B89D278BC4655F0D86769C447AF6955CF65D320833A454B6" +
"183F685CF2424A853854835FD1E82CF2AC11D6A8ED636A").HexToByteArray();
internal static readonly ECDsaCngKeyValues ECDsaCng256PublicKey =
new ECDsaCngKeyValues()
{
QX = "448d98ee08aeba0d8b40f3c6dbd500e8b69f07c70c661771655228ea5a178a91".HexToByteArray(),
QY = "0ef5cb1759f6f2e062021d4f973f5bb62031be87ae915cff121586809e3219af".HexToByteArray(),
D = "692837e9cf613c0e290462a6f08faadcc7002398f75598d5554698a0cb51cf47".HexToByteArray(),
};
internal static readonly byte[] ECDsa256Certificate =
("308201223081c9a00302010202106a3c9e85ba6af1ac4f08111d8bdda340300906072a8648ce3d0401301431123010060355"
+ "04031309456332353655736572301e170d3135303931303231333533305a170d3136303931303033333533305a3014311230"
+ "10060355040313094563323536557365723059301306072a8648ce3d020106082a8648ce3d03010703420004448d98ee08ae"
+ "ba0d8b40f3c6dbd500e8b69f07c70c661771655228ea5a178a910ef5cb1759f6f2e062021d4f973f5bb62031be87ae915cff"
+ "121586809e3219af300906072a8648ce3d04010349003046022100f221063dca71955d17c8f0e0f63a144c4065578fd9f68e"
+ "1ae6a7683e209ea742022100ed1db6a8be27cfb20ab43e0ca061622ceff26f7249a0f791e4d6be1a4e52adfa").HexToByteArray();
internal static readonly ECDsaCngKeyValues ECDsaCng384PublicKey =
new ECDsaCngKeyValues()
{
QX = "c59eca607aa5559e6b2f8ac2eeb12d9ab47f420feabeb444c3f71520d7f2280439979323ab5a67344811d296fef6d1bd".HexToByteArray(),
QY = "d15f307cc6cc6c8baeeeb168bfb02c34d6eb0621efb3d06ad31c06b29eaf6ec2ec67bf288455e729d82e5a6439f70901".HexToByteArray(),
D = "f55ba33e28cea32a014e2fe1213bb4d41cef361f1fee022116b15be50feb96bc946b10a46a9a7a94176787e0928a3e1d".HexToByteArray(),
};
internal static readonly byte[] ECDsa384Certificate =
("3082015f3081e6a00302010202101e78eb573e70a2a64744672296988ad7300906072a8648ce3d0401301431123010060355"
+ "04031309456333383455736572301e170d3135303931303231333634365a170d3136303931303033333634365a3014311230"
+ "10060355040313094563333834557365723076301006072a8648ce3d020106052b8104002203620004c59eca607aa5559e6b"
+ "2f8ac2eeb12d9ab47f420feabeb444c3f71520d7f2280439979323ab5a67344811d296fef6d1bdd15f307cc6cc6c8baeeeb1"
+ "68bfb02c34d6eb0621efb3d06ad31c06b29eaf6ec2ec67bf288455e729d82e5a6439f70901300906072a8648ce3d04010369"
+ "003066023100a8fbaeeae61953897eae5f0beeeffaca48e89bc0cb782145f39f4ba5b03390ce6a28e432e664adf5ebc6a802"
+ "040b238b023100dcc19109383b9482fdda68f40a63ee41797dbb8f25c0284155cc4238d682fbb3fb6e86ea0933297e850a26"
+ "16f6c39bbf").HexToByteArray();
internal static readonly ECDsaCngKeyValues ECDsaCng521PublicKey =
new ECDsaCngKeyValues()
{
QX = "0134af29d1fe5e581fd2ff6194263abcb6f8cb4d9c08bdb384ede9b8663ae2f4e1af6c85eacc69dc768fbfcd856630792e05484cefb1fefb693081dc6490dac579c0".HexToByteArray(),
QY = "00bfe103f53cbcb039873b1a3e81a9da9abd71995e722318367281d30b35a338bf356662342b653eff38e85881863b7128ddbb856d8ae158365550bb6330b93d4ef0".HexToByteArray(),
D = "0153603164bcef5c9f62388d06dcbf5681479be4397c07ff6f44bb848465e3397537d5f61abc7bc9266d4df6bae1df4847fcfd3dabdda37a2fe549b821ea858d088d".HexToByteArray(),
};
internal static readonly byte[] ECDsa521Certificate =
("308201a93082010ca00302010202102c3134fe79bb9daa48df6431f4c1e4f3300906072a8648ce3d04013014311230100603"
+ "5504031309456335323155736572301e170d3135303931303231333832305a170d3136303931303033333832305a30143112"
+ "30100603550403130945633532315573657230819b301006072a8648ce3d020106052b8104002303818600040134af29d1fe"
+ "5e581fd2ff6194263abcb6f8cb4d9c08bdb384ede9b8663ae2f4e1af6c85eacc69dc768fbfcd856630792e05484cefb1fefb"
+ "693081dc6490dac579c000bfe103f53cbcb039873b1a3e81a9da9abd71995e722318367281d30b35a338bf356662342b653e"
+ "ff38e85881863b7128ddbb856d8ae158365550bb6330b93d4ef0300906072a8648ce3d040103818b0030818702420090bdf5"
+ "dfb328501910da4b02ba3ccd41f2bb073608c55f0f2b2e1198496c59b44db9e516a6a63ba7841d22cf590e39d3f09636d0eb"
+ "cd59a92c105f499e1329615602414285111634719b9bbd10eb7d08655b2fa7d7eb5e225bfdafef15562ae2f9f0c6a943a7bd"
+ "f0e39223d807b5e2e617a8e424294d90869567326531bcad0f893a0f3a").HexToByteArray();
internal static readonly byte[] EccCert_KeyAgreement = (
"308201553081FDA00302010202105A1C956450FFED894E85DC61E11CD968300A" +
"06082A8648CE3D04030230143112301006035504030C09454344482054657374" +
"301E170D3135303433303138303131325A170D3136303433303138323131325A" +
"30143112301006035504030C094543444820546573743059301306072A8648CE" +
"3D020106082A8648CE3D0301070342000477DE73EA00A82250B69E3F24A14CDD" +
"C4C47C83993056DD0A2C6C17D5C8E7A054216B9253533D12C082E0C8B91B3B10" +
"CDAB564820D417E6D056E4E34BCCA87301A331302F300E0603551D0F0101FF04" +
"0403020009301D0603551D0E0416041472DE05F588BF2741C8A28FF99EA399F7" +
"AAB2C1B3300A06082A8648CE3D040302034700304402203CDF0CC71C63747BDA" +
"2D2D563115AE68D34867E74BCA02738086C316B846CDF2022079F3990E5DCCEE" +
"627B2E6E42317D4D279181EE695EE239D0C8516DD53A896EC3").HexToByteArray();
internal static readonly byte[] ECDsa224Certificate = (
"3082026630820214A003020102020900B94BCCE3179BAA21300A06082A8648CE" +
"3D040302308198310B30090603550406130255533113301106035504080C0A57" +
"617368696E67746F6E3110300E06035504070C075265646D6F6E64311E301C06" +
"0355040A0C154D6963726F736F667420436F72706F726174696F6E3120301E06" +
"0355040B0C172E4E4554204672616D65776F726B2028436F7265465829312030" +
"1E06035504030C174E4953542F53454320502D3232342054657374204B657930" +
"1E170D3135313233313232353532345A170D3136303133303232353532345A30" +
"8198310B30090603550406130255533113301106035504080C0A57617368696E" +
"67746F6E3110300E06035504070C075265646D6F6E64311E301C060355040A0C" +
"154D6963726F736F667420436F72706F726174696F6E3120301E060355040B0C" +
"172E4E4554204672616D65776F726B2028436F72654658293120301E06035504" +
"030C174E4953542F53454320502D3232342054657374204B6579304E30100607" +
"2A8648CE3D020106052B81040021033A000452FF02B55AE35AA7FFF1B0A82DC2" +
"260083DD7D5893E85FBAD1D663B718176F7D5D9A04B8AEA968E9FECFEE348CDB" +
"49A938401783BADAC484A350304E301D0603551D0E041604140EA9C5C4681A6E" +
"48CE64E47EE8BBB0BA5FF8AB3E301F0603551D230418301680140EA9C5C4681A" +
"6E48CE64E47EE8BBB0BA5FF8AB3E300C0603551D13040530030101FF300A0608" +
"2A8648CE3D040302034000303D021D00AC10B79B6FD6BEE113573A1B68A3B771" +
"3B9DA2719A9588376E334811021C1AAC3CA829DA79CE223FA83283E6F0A5A59D" +
"2399E140D957C1C9DDAF").HexToByteArray();
internal struct ECDsaCngKeyValues
{
public byte[] QX;
public byte[] QY;
public byte[] D;
}
}
}
| |
//
// Copyright (c) 2004-2011 Jaroslaw Kowalski <[email protected]>
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
namespace NLog.Targets
{
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Text;
using System.Threading;
using NLog.Common;
using NLog.Internal.NetworkSenders;
using NLog.Layouts;
/// <summary>
/// Sends log messages over the network.
/// </summary>
/// <seealso href="http://nlog-project.org/wiki/Network_target">Documentation on NLog Wiki</seealso>
/// <example>
/// <p>
/// To set up the target in the <a href="config.html">configuration file</a>,
/// use the following syntax:
/// </p>
/// <code lang="XML" source="examples/targets/Configuration File/Network/NLog.config" />
/// <p>
/// This assumes just one target and a single rule. More configuration
/// options are described <a href="config.html">here</a>.
/// </p>
/// <p>
/// To set up the log target programmatically use code like this:
/// </p>
/// <code lang="C#" source="examples/targets/Configuration API/Network/Simple/Example.cs" />
/// <p>
/// To print the results, use any application that's able to receive messages over
/// TCP or UDP. <a href="http://m.nu/program/util/netcat/netcat.html">NetCat</a> is
/// a simple but very powerful command-line tool that can be used for that. This image
/// demonstrates the NetCat tool receiving log messages from Network target.
/// </p>
/// <img src="examples/targets/Screenshots/Network/Output.gif" />
/// <p>
/// NOTE: If your receiver application is ever likely to be off-line, don't use TCP protocol
/// or you'll get TCP timeouts and your application will be very slow.
/// Either switch to UDP transport or use <a href="target.AsyncWrapper.html">AsyncWrapper</a> target
/// so that your application threads will not be blocked by the timing-out connection attempts.
/// </p>
/// <p>
/// There are two specialized versions of the Network target: <a href="target.Chainsaw.html">Chainsaw</a>
/// and <a href="target.NLogViewer.html">NLogViewer</a> which write to instances of Chainsaw log4j viewer
/// or NLogViewer application respectively.
/// </p>
/// </example>
[Target("Network")]
public class NetworkTarget : TargetWithLayout
{
private Dictionary<string, NetworkSender> currentSenderCache = new Dictionary<string, NetworkSender>();
private List<NetworkSender> openNetworkSenders = new List<NetworkSender>();
/// <summary>
/// Initializes a new instance of the <see cref="NetworkTarget" /> class.
/// </summary>
/// <remarks>
/// The default value of the layout is: <code>${longdate}|${level:uppercase=true}|${logger}|${message}</code>
/// </remarks>
public NetworkTarget()
{
this.SenderFactory = NetworkSenderFactory.Default;
this.Encoding = Encoding.UTF8;
this.OnOverflow = NetworkTargetOverflowAction.Split;
this.KeepConnection = true;
this.MaxMessageSize = 65000;
this.ConnectionCacheSize = 5;
}
/// <summary>
/// Gets or sets the network address.
/// </summary>
/// <remarks>
/// The network address can be:
/// <ul>
/// <li>tcp://host:port - TCP (auto select IPv4/IPv6) (not supported on Windows Phone 7.0)</li>
/// <li>tcp4://host:port - force TCP/IPv4 (not supported on Windows Phone 7.0)</li>
/// <li>tcp6://host:port - force TCP/IPv6 (not supported on Windows Phone 7.0)</li>
/// <li>udp://host:port - UDP (auto select IPv4/IPv6, not supported on Silverlight and on Windows Phone 7.0)</li>
/// <li>udp4://host:port - force UDP/IPv4 (not supported on Silverlight and on Windows Phone 7.0)</li>
/// <li>udp6://host:port - force UDP/IPv6 (not supported on Silverlight and on Windows Phone 7.0)</li>
/// <li>http://host:port/pageName - HTTP using POST verb</li>
/// <li>https://host:port/pageName - HTTPS using POST verb</li>
/// </ul>
/// For SOAP-based webservice support over HTTP use WebService target.
/// </remarks>
/// <docgen category='Connection Options' order='10' />
public Layout Address { get; set; }
/// <summary>
/// Gets or sets a value indicating whether to keep connection open whenever possible.
/// </summary>
/// <docgen category='Connection Options' order='10' />
[DefaultValue(true)]
public bool KeepConnection { get; set; }
/// <summary>
/// Gets or sets a value indicating whether to append newline at the end of log message.
/// </summary>
/// <docgen category='Layout Options' order='10' />
[DefaultValue(false)]
public bool NewLine { get; set; }
/// <summary>
/// Gets or sets the maximum message size in bytes.
/// </summary>
/// <docgen category='Layout Options' order='10' />
[DefaultValue(65000)]
public int MaxMessageSize { get; set; }
/// <summary>
/// Gets or sets the size of the connection cache (number of connections which are kept alive).
/// </summary>
/// <docgen category="Connection Options" order="10"/>
[DefaultValue(5)]
public int ConnectionCacheSize { get; set; }
/// <summary>
/// Gets or sets the maximum queue size.
/// </summary>
[DefaultValue(0)]
public int MaxQueueSize { get; set; }
/// <summary>
/// Gets or sets the action that should be taken if the message is larger than
/// maxMessageSize.
/// </summary>
/// <docgen category='Layout Options' order='10' />
public NetworkTargetOverflowAction OnOverflow { get; set; }
/// <summary>
/// Gets or sets the encoding to be used.
/// </summary>
/// <docgen category='Layout Options' order='10' />
[DefaultValue("utf-8")]
public Encoding Encoding { get; set; }
internal INetworkSenderFactory SenderFactory { get; set; }
/// <summary>
/// Flush any pending log messages asynchronously (in case of asynchronous targets).
/// </summary>
/// <param name="asyncContinuation">The asynchronous continuation.</param>
protected override void FlushAsync(AsyncContinuation asyncContinuation)
{
int remainingCount = 0;
AsyncContinuation continuation =
ex =>
{
// ignore exception
if (Interlocked.Decrement(ref remainingCount) == 0)
{
asyncContinuation(null);
}
};
lock (this.openNetworkSenders)
{
remainingCount = this.openNetworkSenders.Count;
if (remainingCount == 0)
{
// nothing to flush
asyncContinuation(null);
}
else
{
// otherwise call FlushAsync() on all senders
// and invoke continuation at the very end
foreach (var openSender in this.openNetworkSenders)
{
openSender.FlushAsync(continuation);
}
}
}
}
/// <summary>
/// Closes the target.
/// </summary>
protected override void CloseTarget()
{
base.CloseTarget();
lock (this.openNetworkSenders)
{
foreach (var openSender in this.openNetworkSenders)
{
openSender.Close(ex => { });
}
this.openNetworkSenders.Clear();
}
}
/// <summary>
/// Sends the
/// rendered logging event over the network optionally concatenating it with a newline character.
/// </summary>
/// <param name="logEvent">The logging event.</param>
protected override void Write(AsyncLogEventInfo logEvent)
{
string address = this.Address.Render(logEvent.LogEvent);
byte[] bytes = this.GetBytesToWrite(logEvent.LogEvent);
if (this.KeepConnection)
{
var sender = this.GetCachedNetworkSender(address);
this.ChunkedSend(
sender,
bytes,
ex =>
{
if (ex != null)
{
InternalLogger.Error("Error when sending {0}", ex);
this.ReleaseCachedConnection(sender);
}
logEvent.Continuation(ex);
});
}
else
{
var sender = this.SenderFactory.Create(address, MaxQueueSize);
sender.Initialize();
lock (this.openNetworkSenders)
{
this.openNetworkSenders.Add(sender);
this.ChunkedSend(
sender,
bytes,
ex =>
{
lock (this.openNetworkSenders)
{
this.openNetworkSenders.Remove(sender);
}
if (ex != null)
{
InternalLogger.Error("Error when sending {0}", ex);
}
sender.Close(ex2 => { });
logEvent.Continuation(ex);
});
}
}
}
/// <summary>
/// Gets the bytes to be written.
/// </summary>
/// <param name="logEvent">Log event.</param>
/// <returns>Byte array.</returns>
protected virtual byte[] GetBytesToWrite(LogEventInfo logEvent)
{
string text;
if (this.NewLine)
{
text = this.Layout.Render(logEvent) + "\r\n";
}
else
{
text = this.Layout.Render(logEvent);
}
return this.Encoding.GetBytes(text);
}
private NetworkSender GetCachedNetworkSender(string address)
{
lock (this.currentSenderCache)
{
NetworkSender sender;
// already have address
if (this.currentSenderCache.TryGetValue(address, out sender))
{
sender.CheckSocket();
return sender;
}
if (this.currentSenderCache.Count >= this.ConnectionCacheSize)
{
// make room in the cache by closing the least recently used connection
int minAccessTime = int.MaxValue;
NetworkSender leastRecentlyUsed = null;
foreach (var kvp in this.currentSenderCache)
{
if (kvp.Value.LastSendTime < minAccessTime)
{
minAccessTime = kvp.Value.LastSendTime;
leastRecentlyUsed = kvp.Value;
}
}
if (leastRecentlyUsed != null)
{
this.ReleaseCachedConnection(leastRecentlyUsed);
}
}
sender = this.SenderFactory.Create(address, MaxQueueSize);
sender.Initialize();
lock (this.openNetworkSenders)
{
this.openNetworkSenders.Add(sender);
}
this.currentSenderCache.Add(address, sender);
return sender;
}
}
private void ReleaseCachedConnection(NetworkSender sender)
{
lock (this.currentSenderCache)
{
lock (this.openNetworkSenders)
{
if (this.openNetworkSenders.Remove(sender))
{
// only remove it once
sender.Close(ex => { });
}
}
NetworkSender sender2;
// make sure the current sender for this address is the one we want to remove
if (this.currentSenderCache.TryGetValue(sender.Address, out sender2))
{
if (ReferenceEquals(sender, sender2))
{
this.currentSenderCache.Remove(sender.Address);
}
}
}
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA2204:Literals should be spelled correctly", Justification = "Using property names in message.")]
private void ChunkedSend(NetworkSender sender, byte[] buffer, AsyncContinuation continuation)
{
int tosend = buffer.Length;
int pos = 0;
AsyncContinuation sendNextChunk = null;
sendNextChunk = ex =>
{
if (ex != null)
{
continuation(ex);
return;
}
if (tosend <= 0)
{
continuation(null);
return;
}
int chunksize = tosend;
if (chunksize > this.MaxMessageSize)
{
if (this.OnOverflow == NetworkTargetOverflowAction.Discard)
{
continuation(null);
return;
}
if (this.OnOverflow == NetworkTargetOverflowAction.Error)
{
continuation(new OverflowException("Attempted to send a message larger than MaxMessageSize (" + this.MaxMessageSize + "). Actual size was: " + buffer.Length + ". Adjust OnOverflow and MaxMessageSize parameters accordingly."));
return;
}
chunksize = this.MaxMessageSize;
}
int pos0 = pos;
tosend -= chunksize;
pos += chunksize;
sender.Send(buffer, pos0, chunksize, sendNextChunk);
};
sendNextChunk(null);
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/monitoring/v3/group_service.proto
// Original file comments:
// Copyright 2016 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#region Designer generated code
using System;
using System.Threading;
using System.Threading.Tasks;
using Grpc.Core;
namespace Google.Cloud.Monitoring.V3 {
/// <summary>
/// The Group API lets you inspect and manage your
/// [groups](google.monitoring.v3.Group).
///
/// A group is a named filter that is used to identify
/// a collection of monitored resources. Groups are typically used to
/// mirror the physical and/or logical topology of the environment.
/// Because group membership is computed dynamically, monitored
/// resources that are started in the future are automatically placed
/// in matching groups. By using a group to name monitored resources in,
/// for example, an alert policy, the target of that alert policy is
/// updated automatically as monitored resources are added and removed
/// from the infrastructure.
/// </summary>
public static class GroupService
{
static readonly string __ServiceName = "google.monitoring.v3.GroupService";
static readonly Marshaller<global::Google.Cloud.Monitoring.V3.ListGroupsRequest> __Marshaller_ListGroupsRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Monitoring.V3.ListGroupsRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Cloud.Monitoring.V3.ListGroupsResponse> __Marshaller_ListGroupsResponse = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Monitoring.V3.ListGroupsResponse.Parser.ParseFrom);
static readonly Marshaller<global::Google.Cloud.Monitoring.V3.GetGroupRequest> __Marshaller_GetGroupRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Monitoring.V3.GetGroupRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Cloud.Monitoring.V3.Group> __Marshaller_Group = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Monitoring.V3.Group.Parser.ParseFrom);
static readonly Marshaller<global::Google.Cloud.Monitoring.V3.CreateGroupRequest> __Marshaller_CreateGroupRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Monitoring.V3.CreateGroupRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Cloud.Monitoring.V3.UpdateGroupRequest> __Marshaller_UpdateGroupRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Monitoring.V3.UpdateGroupRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Cloud.Monitoring.V3.DeleteGroupRequest> __Marshaller_DeleteGroupRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Monitoring.V3.DeleteGroupRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Protobuf.WellKnownTypes.Empty> __Marshaller_Empty = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Protobuf.WellKnownTypes.Empty.Parser.ParseFrom);
static readonly Marshaller<global::Google.Cloud.Monitoring.V3.ListGroupMembersRequest> __Marshaller_ListGroupMembersRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Monitoring.V3.ListGroupMembersRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Cloud.Monitoring.V3.ListGroupMembersResponse> __Marshaller_ListGroupMembersResponse = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Monitoring.V3.ListGroupMembersResponse.Parser.ParseFrom);
static readonly Method<global::Google.Cloud.Monitoring.V3.ListGroupsRequest, global::Google.Cloud.Monitoring.V3.ListGroupsResponse> __Method_ListGroups = new Method<global::Google.Cloud.Monitoring.V3.ListGroupsRequest, global::Google.Cloud.Monitoring.V3.ListGroupsResponse>(
MethodType.Unary,
__ServiceName,
"ListGroups",
__Marshaller_ListGroupsRequest,
__Marshaller_ListGroupsResponse);
static readonly Method<global::Google.Cloud.Monitoring.V3.GetGroupRequest, global::Google.Cloud.Monitoring.V3.Group> __Method_GetGroup = new Method<global::Google.Cloud.Monitoring.V3.GetGroupRequest, global::Google.Cloud.Monitoring.V3.Group>(
MethodType.Unary,
__ServiceName,
"GetGroup",
__Marshaller_GetGroupRequest,
__Marshaller_Group);
static readonly Method<global::Google.Cloud.Monitoring.V3.CreateGroupRequest, global::Google.Cloud.Monitoring.V3.Group> __Method_CreateGroup = new Method<global::Google.Cloud.Monitoring.V3.CreateGroupRequest, global::Google.Cloud.Monitoring.V3.Group>(
MethodType.Unary,
__ServiceName,
"CreateGroup",
__Marshaller_CreateGroupRequest,
__Marshaller_Group);
static readonly Method<global::Google.Cloud.Monitoring.V3.UpdateGroupRequest, global::Google.Cloud.Monitoring.V3.Group> __Method_UpdateGroup = new Method<global::Google.Cloud.Monitoring.V3.UpdateGroupRequest, global::Google.Cloud.Monitoring.V3.Group>(
MethodType.Unary,
__ServiceName,
"UpdateGroup",
__Marshaller_UpdateGroupRequest,
__Marshaller_Group);
static readonly Method<global::Google.Cloud.Monitoring.V3.DeleteGroupRequest, global::Google.Protobuf.WellKnownTypes.Empty> __Method_DeleteGroup = new Method<global::Google.Cloud.Monitoring.V3.DeleteGroupRequest, global::Google.Protobuf.WellKnownTypes.Empty>(
MethodType.Unary,
__ServiceName,
"DeleteGroup",
__Marshaller_DeleteGroupRequest,
__Marshaller_Empty);
static readonly Method<global::Google.Cloud.Monitoring.V3.ListGroupMembersRequest, global::Google.Cloud.Monitoring.V3.ListGroupMembersResponse> __Method_ListGroupMembers = new Method<global::Google.Cloud.Monitoring.V3.ListGroupMembersRequest, global::Google.Cloud.Monitoring.V3.ListGroupMembersResponse>(
MethodType.Unary,
__ServiceName,
"ListGroupMembers",
__Marshaller_ListGroupMembersRequest,
__Marshaller_ListGroupMembersResponse);
/// <summary>Service descriptor</summary>
public static global::Google.Protobuf.Reflection.ServiceDescriptor Descriptor
{
get { return global::Google.Cloud.Monitoring.V3.GroupServiceReflection.Descriptor.Services[0]; }
}
/// <summary>Base class for server-side implementations of GroupService</summary>
public abstract class GroupServiceBase
{
/// <summary>
/// Lists the existing groups.
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Cloud.Monitoring.V3.ListGroupsResponse> ListGroups(global::Google.Cloud.Monitoring.V3.ListGroupsRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Gets a single group.
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Cloud.Monitoring.V3.Group> GetGroup(global::Google.Cloud.Monitoring.V3.GetGroupRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Creates a new group.
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Cloud.Monitoring.V3.Group> CreateGroup(global::Google.Cloud.Monitoring.V3.CreateGroupRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Updates an existing group.
/// You can change any group attributes except `name`.
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Cloud.Monitoring.V3.Group> UpdateGroup(global::Google.Cloud.Monitoring.V3.UpdateGroupRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Deletes an existing group.
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Protobuf.WellKnownTypes.Empty> DeleteGroup(global::Google.Cloud.Monitoring.V3.DeleteGroupRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Lists the monitored resources that are members of a group.
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Cloud.Monitoring.V3.ListGroupMembersResponse> ListGroupMembers(global::Google.Cloud.Monitoring.V3.ListGroupMembersRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
}
/// <summary>Client for GroupService</summary>
public class GroupServiceClient : ClientBase<GroupServiceClient>
{
/// <summary>Creates a new client for GroupService</summary>
/// <param name="channel">The channel to use to make remote calls.</param>
public GroupServiceClient(Channel channel) : base(channel)
{
}
/// <summary>Creates a new client for GroupService that uses a custom <c>CallInvoker</c>.</summary>
/// <param name="callInvoker">The callInvoker to use to make remote calls.</param>
public GroupServiceClient(CallInvoker callInvoker) : base(callInvoker)
{
}
/// <summary>Protected parameterless constructor to allow creation of test doubles.</summary>
protected GroupServiceClient() : base()
{
}
/// <summary>Protected constructor to allow creation of configured clients.</summary>
/// <param name="configuration">The client configuration.</param>
protected GroupServiceClient(ClientBaseConfiguration configuration) : base(configuration)
{
}
/// <summary>
/// Lists the existing groups.
/// </summary>
public virtual global::Google.Cloud.Monitoring.V3.ListGroupsResponse ListGroups(global::Google.Cloud.Monitoring.V3.ListGroupsRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return ListGroups(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Lists the existing groups.
/// </summary>
public virtual global::Google.Cloud.Monitoring.V3.ListGroupsResponse ListGroups(global::Google.Cloud.Monitoring.V3.ListGroupsRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_ListGroups, null, options, request);
}
/// <summary>
/// Lists the existing groups.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Cloud.Monitoring.V3.ListGroupsResponse> ListGroupsAsync(global::Google.Cloud.Monitoring.V3.ListGroupsRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return ListGroupsAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Lists the existing groups.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Cloud.Monitoring.V3.ListGroupsResponse> ListGroupsAsync(global::Google.Cloud.Monitoring.V3.ListGroupsRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_ListGroups, null, options, request);
}
/// <summary>
/// Gets a single group.
/// </summary>
public virtual global::Google.Cloud.Monitoring.V3.Group GetGroup(global::Google.Cloud.Monitoring.V3.GetGroupRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return GetGroup(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Gets a single group.
/// </summary>
public virtual global::Google.Cloud.Monitoring.V3.Group GetGroup(global::Google.Cloud.Monitoring.V3.GetGroupRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_GetGroup, null, options, request);
}
/// <summary>
/// Gets a single group.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Cloud.Monitoring.V3.Group> GetGroupAsync(global::Google.Cloud.Monitoring.V3.GetGroupRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return GetGroupAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Gets a single group.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Cloud.Monitoring.V3.Group> GetGroupAsync(global::Google.Cloud.Monitoring.V3.GetGroupRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_GetGroup, null, options, request);
}
/// <summary>
/// Creates a new group.
/// </summary>
public virtual global::Google.Cloud.Monitoring.V3.Group CreateGroup(global::Google.Cloud.Monitoring.V3.CreateGroupRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return CreateGroup(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Creates a new group.
/// </summary>
public virtual global::Google.Cloud.Monitoring.V3.Group CreateGroup(global::Google.Cloud.Monitoring.V3.CreateGroupRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_CreateGroup, null, options, request);
}
/// <summary>
/// Creates a new group.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Cloud.Monitoring.V3.Group> CreateGroupAsync(global::Google.Cloud.Monitoring.V3.CreateGroupRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return CreateGroupAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Creates a new group.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Cloud.Monitoring.V3.Group> CreateGroupAsync(global::Google.Cloud.Monitoring.V3.CreateGroupRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_CreateGroup, null, options, request);
}
/// <summary>
/// Updates an existing group.
/// You can change any group attributes except `name`.
/// </summary>
public virtual global::Google.Cloud.Monitoring.V3.Group UpdateGroup(global::Google.Cloud.Monitoring.V3.UpdateGroupRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return UpdateGroup(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Updates an existing group.
/// You can change any group attributes except `name`.
/// </summary>
public virtual global::Google.Cloud.Monitoring.V3.Group UpdateGroup(global::Google.Cloud.Monitoring.V3.UpdateGroupRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_UpdateGroup, null, options, request);
}
/// <summary>
/// Updates an existing group.
/// You can change any group attributes except `name`.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Cloud.Monitoring.V3.Group> UpdateGroupAsync(global::Google.Cloud.Monitoring.V3.UpdateGroupRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return UpdateGroupAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Updates an existing group.
/// You can change any group attributes except `name`.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Cloud.Monitoring.V3.Group> UpdateGroupAsync(global::Google.Cloud.Monitoring.V3.UpdateGroupRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_UpdateGroup, null, options, request);
}
/// <summary>
/// Deletes an existing group.
/// </summary>
public virtual global::Google.Protobuf.WellKnownTypes.Empty DeleteGroup(global::Google.Cloud.Monitoring.V3.DeleteGroupRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return DeleteGroup(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Deletes an existing group.
/// </summary>
public virtual global::Google.Protobuf.WellKnownTypes.Empty DeleteGroup(global::Google.Cloud.Monitoring.V3.DeleteGroupRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_DeleteGroup, null, options, request);
}
/// <summary>
/// Deletes an existing group.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Protobuf.WellKnownTypes.Empty> DeleteGroupAsync(global::Google.Cloud.Monitoring.V3.DeleteGroupRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return DeleteGroupAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Deletes an existing group.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Protobuf.WellKnownTypes.Empty> DeleteGroupAsync(global::Google.Cloud.Monitoring.V3.DeleteGroupRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_DeleteGroup, null, options, request);
}
/// <summary>
/// Lists the monitored resources that are members of a group.
/// </summary>
public virtual global::Google.Cloud.Monitoring.V3.ListGroupMembersResponse ListGroupMembers(global::Google.Cloud.Monitoring.V3.ListGroupMembersRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return ListGroupMembers(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Lists the monitored resources that are members of a group.
/// </summary>
public virtual global::Google.Cloud.Monitoring.V3.ListGroupMembersResponse ListGroupMembers(global::Google.Cloud.Monitoring.V3.ListGroupMembersRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_ListGroupMembers, null, options, request);
}
/// <summary>
/// Lists the monitored resources that are members of a group.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Cloud.Monitoring.V3.ListGroupMembersResponse> ListGroupMembersAsync(global::Google.Cloud.Monitoring.V3.ListGroupMembersRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return ListGroupMembersAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Lists the monitored resources that are members of a group.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Cloud.Monitoring.V3.ListGroupMembersResponse> ListGroupMembersAsync(global::Google.Cloud.Monitoring.V3.ListGroupMembersRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_ListGroupMembers, null, options, request);
}
protected override GroupServiceClient NewInstance(ClientBaseConfiguration configuration)
{
return new GroupServiceClient(configuration);
}
}
/// <summary>Creates service definition that can be registered with a server</summary>
public static ServerServiceDefinition BindService(GroupServiceBase serviceImpl)
{
return ServerServiceDefinition.CreateBuilder()
.AddMethod(__Method_ListGroups, serviceImpl.ListGroups)
.AddMethod(__Method_GetGroup, serviceImpl.GetGroup)
.AddMethod(__Method_CreateGroup, serviceImpl.CreateGroup)
.AddMethod(__Method_UpdateGroup, serviceImpl.UpdateGroup)
.AddMethod(__Method_DeleteGroup, serviceImpl.DeleteGroup)
.AddMethod(__Method_ListGroupMembers, serviceImpl.ListGroupMembers).Build();
}
}
}
#endregion
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Microsoft.ApplicationInsights;
using Microsoft.ApplicationInsights.Extensibility;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Threading;
using System.Threading.Tasks;
namespace Microsoft.Tools.ServiceModel.Svcutil
{
// Provides the TelemetryClient instance for logging telemetry using AppInsights.
internal class AppInsightsTelemetryClient
{
private const string instrumentationKey = "97d0a8a2-1954-4c71-b95d-89df9627dccb";
internal const string OptOutVariable = "DOTNET_SVCUTIL_TELEMETRY_OPTOUT";
private const string eventNamePrefix = "VS/dotnetSvcutil/";
private const string testModeVariable = "DOTNET_SVCUTIL_TEST_MODE";
private static bool? s_isUserOptedIn = null;
public static bool IsUserOptedIn
{
get
{
if (!s_isUserOptedIn.HasValue)
{
string optOut = Environment.GetEnvironmentVariable(OptOutVariable);
if (string.IsNullOrEmpty(optOut))
{
s_isUserOptedIn = true;
}
else
{
// We parse the same values here as the dotnet SDK's opt out.
switch (optOut.ToLowerInvariant())
{
case "true":
case "1":
case "yes":
s_isUserOptedIn = false;
break;
case "false":
case "0":
case "no":
default:
s_isUserOptedIn = true;
break;
}
}
}
return s_isUserOptedIn.Value;
}
set
{
s_isUserOptedIn = value;
}
}
private static readonly object s_lockObj = new object();
private static AppInsightsTelemetryClient s_instance = null;
private TelemetryClient _telemetryClient = null;
private AppInsightsTelemetryClient(TelemetryClient telemetryClient)
{
_telemetryClient = telemetryClient;
}
public static async Task<AppInsightsTelemetryClient> GetInstanceAsync(CancellationToken cancellationToken)
{
if (s_instance == null)
{
try
{
if (!bool.TryParse(Environment.GetEnvironmentVariable(testModeVariable), out bool testMode))
{
testMode = false;
}
lock (s_lockObj)
{
if (s_instance == null)
{
if (!IsUserOptedIn)
{
// If the user hasn't opted in return now with a null telemetry client to ensure we don't create any telemetry context.
return new AppInsightsTelemetryClient(null);
}
TelemetryConfiguration config;
try
{
config = TelemetryConfiguration.Active;
}
catch (InvalidOperationException)
{
config = new TelemetryConfiguration();
}
config.TelemetryChannel.DeveloperMode = testMode;
s_instance = new AppInsightsTelemetryClient(new TelemetryClient(config));
}
}
var telemetryClient = s_instance._telemetryClient;
telemetryClient.InstrumentationKey = instrumentationKey;
// Populate context with properties that are common and should be logged for all events.
var context = telemetryClient.Context;
context.Device.OperatingSystem = GetOperatingSystemString();
#if !NETCORE10
// Set the user id to a stable hash of the user's current username. Users with the same username
// or those with hash collisions will show up as the same id. So the user id won't be perfectly unique.
// However, it will give us some idea of how many different users are using the tool.
context.User.Id = GetStableHashCode(Environment.UserName).ToString();
#endif
// DebugLogger tracks telemetry when adding exceptions. We pass null for the logger to avoid the possibility of an endless cyclic call if something goes wrong in GetSdkVersionAsync.
var sdkVersion = await ProjectPropertyResolver.GetSdkVersionAsync(System.IO.Directory.GetCurrentDirectory(), null /* logger */, cancellationToken).ConfigureAwait(false);
context.Properties["SvcUtil.Version"] = Tool.PackageVersion;
context.Properties["Dotnet.Version"] = string.IsNullOrEmpty(sdkVersion) ? "unknown" : sdkVersion;
context.Properties["TestMode"] = testMode.ToString();
}
catch (Exception ex)
{
#if DEBUG
ToolConsole.WriteWarning(ex.Message);
#endif
s_isUserOptedIn = false;
}
}
return s_instance;
}
// This is copied from the 32 bit implementation from String.GetHashCode.
// It's a stable string hashing algorithm so it won't change with each run of the tool.
private static int GetStableHashCode(string str)
{
unsafe
{
fixed (char* src = str)
{
int hash1 = (5381 << 16) + 5381;
int hash2 = hash1;
int* pint = (int*)src;
int len = str.Length;
while (len > 2)
{
hash1 = ((hash1 << 5) + hash1 + (hash1 >> 27)) ^ pint[0];
hash2 = ((hash2 << 5) + hash2 + (hash2 >> 27)) ^ pint[1];
pint += 2;
len -= 4;
}
if (len > 0)
{
hash1 = ((hash1 << 5) + hash1 + (hash1 >> 27)) ^ pint[0];
}
return hash1 + (hash2 * 1566083941);
}
}
}
private static string GetOperatingSystemString()
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
return "Windows";
}
else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
{
return "macOS";
}
else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
{
return "Linux";
}
else
{
return "Unknown";
}
}
public void TrackEvent(string eventName)
{
if (IsUserOptedIn)
{
_telemetryClient.TrackEvent(eventNamePrefix + eventName);
_telemetryClient.Flush();
}
}
public void TrackEvent(string eventName, Dictionary<string, string> properties)
{
if (IsUserOptedIn)
{
_telemetryClient.TrackEvent(eventNamePrefix + eventName, properties);
_telemetryClient.Flush();
}
}
public void TrackError(string eventName, Exception exceptionObject)
{
this.TrackError(eventName, exceptionObject.ToString());
}
public void TrackError(string eventName, string exceptionString)
{
if (IsUserOptedIn)
{
var properties = new Dictionary<string, string>();
properties.Add("ExceptionString", exceptionString);
_telemetryClient.TrackEvent(eventNamePrefix + eventName, properties);
_telemetryClient.Flush();
}
}
}
}
| |
/*
* @(#)SpecialFunctions.cs 3.0.0 2016-05-07
*
* You may use this software under the condition of "Simplified BSD License"
*
* Copyright 2010-2016 MARIUSZ GROMADA. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY <MARIUSZ GROMADA> ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are those of the
* authors and should not be interpreted as representing official policies, either expressed
* or implied, of MARIUSZ GROMADA.
*
* Some parts of the SpecialFunctions class were adopted from Math.NET Numerics project
* Copyright (c) 2002-2015 Math.NET http://numerics.mathdotnet.com/
* http://numerics.mathdotnet.com/License.html
*
* If you have any questions/bugs feel free to contact:
*
* Mariusz Gromada
* [email protected]
* http://mathparser.org
* http://mathspace.pl
* http://janetsudoku.mariuszgromada.org
* http://github.com/mariuszgromada/MathParser.org-mXparser
* http://mariuszgromada.github.io/MathParser.org-mXparser
* http://mxparser.sourceforge.net
* http://bitbucket.org/mariuszgromada/mxparser
* http://mxparser.codeplex.com
* http://github.com/mariuszgromada/Janet-Sudoku
* http://janetsudoku.codeplex.com
* http://sourceforge.net/projects/janetsudoku
* http://bitbucket.org/mariuszgromada/janet-sudoku
* http://github.com/mariuszgromada/MathParser.org-mXparser
*
* Asked if he believes in one God, a mathematician answered:
* "Yes, up to isomorphism."
*/
using System;
namespace org.mariuszgromada.math.mxparser.mathcollection {
/**
* SpecialFunctions - special (non-elementary functions).
*
* @author <b>Mariusz Gromada</b><br>
* <a href="mailto:[email protected]">[email protected]</a><br>
* <a href="http://mathspace.pl" target="_blank">MathSpace.pl</a><br>
* <a href="http://mathparser.org" target="_blank">MathParser.org - mXparser project page</a><br>
* <a href="http://github.com/mariuszgromada/MathParser.org-mXparser" target="_blank">mXparser on GitHub</a><br>
* <a href="http://mxparser.sourceforge.net" target="_blank">mXparser on SourceForge</a><br>
* <a href="http://bitbucket.org/mariuszgromada/mxparser" target="_blank">mXparser on Bitbucket</a><br>
* <a href="http://mxparser.codeplex.com" target="_blank">mXparser on CodePlex</a><br>
* <a href="http://janetsudoku.mariuszgromada.org" target="_blank">Janet Sudoku - project web page</a><br>
* <a href="http://github.com/mariuszgromada/Janet-Sudoku" target="_blank">Janet Sudoku on GitHub</a><br>
* <a href="http://janetsudoku.codeplex.com" target="_blank">Janet Sudoku on CodePlex</a><br>
* <a href="http://sourceforge.net/projects/janetsudoku" target="_blank">Janet Sudoku on SourceForge</a><br>
* <a href="http://bitbucket.org/mariuszgromada/janet-sudoku" target="_blank">Janet Sudoku on BitBucket</a><br>
*
* @version 3.0.0
*/
[CLSCompliant(true)]
public sealed class SpecialFunctions {
/**
* Exponential integral function Ei(x)
* @param x Point at which function will be evaluated.
* @return Exponential integral function Ei(x)
*/
public static double exponentialIntegralEi(double x) {
if (Double.IsNaN(x))
return Double.NaN;
if (x < -5.0)
return continuedFractionEi(x);
if (x == 0.0)
return -Double.MaxValue;
if (x < 6.8)
return powerSeriesEi(x);
if (x < 50.0)
return argumentAdditionSeriesEi(x);
return continuedFractionEi(x);
}
/**
* Constants for Exponential integral function Ei(x) calculation
*/
private static double EI_DBL_EPSILON = MathFunctions.ulp(1.0);
private static double EI_EPSILON = 10.0 * EI_DBL_EPSILON;
/**
* Supporting function
* while Exponential integral function Ei(x) calculation
*/
private static double continuedFractionEi(double x) {
double Am1 = 1.0;
double A0 = 0.0;
double Bm1 = 0.0;
double B0 = 1.0;
double a = Math.Exp(x);
double b = -x + 1.0;
double Ap1 = b * A0 + a * Am1;
double Bp1 = b * B0 + a * Bm1;
int j = 1;
a = 1.0;
while (Math.Abs(Ap1 * B0 - A0 * Bp1) > EI_EPSILON * Math.Abs(A0 * Bp1)) {
if (Math.Abs(Bp1) > 1.0) {
Am1 = A0 / Bp1;
A0 = Ap1 / Bp1;
Bm1 = B0 / Bp1;
B0 = 1.0;
} else {
Am1 = A0;
A0 = Ap1;
Bm1 = B0;
B0 = Bp1;
}
a = -j * j;
b += 2.0;
Ap1 = b * A0 + a * Am1;
Bp1 = b * B0 + a * Bm1;
j += 1;
}
return (-Ap1 / Bp1);
}
/**
* Supporting function
* while Exponential integral function Ei(x) calculation
*/
private static double powerSeriesEi(double x) {
double xn = -x;
double Sn = -x;
double Sm1 = 0.0;
double hsum = 1.0;
double g = MathConstants.EULER_MASCHERONI;
double y = 1.0;
double factorial = 1.0;
if (x == 0.0)
return -Double.MaxValue;
while (Math.Abs(Sn - Sm1) > EI_EPSILON * Math.Abs(Sm1)) {
Sm1 = Sn;
y += 1.0;
xn *= (-x);
factorial *= y;
hsum += (1.0 / y);
Sn += hsum * xn / factorial;
}
return (g + Math.Log(Math.Abs(x)) - Math.Exp(x) * Sn);
}
/**
* Supporting function
* while Exponential integral function Ei(x) calculation
*/
private static double argumentAdditionSeriesEi(double x) {
int k = (int)(x + 0.5);
int j = 0;
double xx = k;
double dx = x - xx;
double xxj = xx;
double edx = Math.Exp(dx);
double Sm = 1.0;
double Sn = (edx - 1.0) / xxj;
double term = Double.MaxValue;
double factorial = 1.0;
double dxj = 1.0;
while (Math.Abs(term) > EI_EPSILON * Math.Abs(Sn)) {
j++;
factorial *= j;
xxj *= xx;
dxj *= (-dx);
Sm += (dxj / factorial);
term = (factorial * (edx * Sm - 1.0)) / xxj;
Sn += term;
}
return Coefficients.EI[k - 7] + Sn * Math.Exp(xx);
}
/**
* Logarithmic integral function li(x)
* @param x Point at which function will be evaluated.
* @return Logarithmic integral function li(x)
*/
public static double logarithmicIntegralLi(double x) {
if (Double.IsNaN(x))
return Double.NaN;
if (x < 0)
return Double.NaN;
if (x == 0)
return 0;
if (x == 2)
return MathConstants.LI2;
return exponentialIntegralEi(MathFunctions.ln(x));
}
/**
* Offset logarithmic integral function Li(x)
* @param x Point at which function will be evaluated.
* @return Offset logarithmic integral function Li(x)
*/
public static double offsetLogarithmicIntegralLi(double x) {
if (Double.IsNaN(x))
return Double.NaN;
if (x < 0)
return Double.NaN;
if (x == 0)
return -MathConstants.LI2;
return logarithmicIntegralLi(x) - MathConstants.LI2;
}
/**
* Calculates the error function
* @param x Point at which function will be evaluated.
* @return Error function erf(x)
*/
public static double erf(double x) {
if (Double.IsNaN(x)) return Double.NaN;
if (x == 0) return 0;
if (x == Double.PositiveInfinity) return 1.0;
if (x == Double.NegativeInfinity) return -1.0;
return erfImp(x, false);
}
/**
* Calculates the complementary error function.
* @param x Point at which function will be evaluated.
* @return Complementary error function erfc(x)
*/
public static double erfc(double x) {
if (Double.IsNaN(x)) return Double.NaN;
if (x == 0) return 1;
if (x == Double.PositiveInfinity) return 0.0;
if (x == Double.NegativeInfinity) return 2.0;
return erfImp(x, true);
}
/**
* Calculates the inverse error function evaluated at x.
* @param x Point at which function will be evaluated.
* @return Inverse error function erfInv(x)
*/
public static double erfInv(double x) {
if (x == 0.0) return 0;
if (x >= 1.0) return Double.PositiveInfinity;
if (x <= -1.0) return Double.NegativeInfinity;
double p, q, s;
if (x < 0) {
p = -x;
q = 1 - p;
s = -1;
} else {
p = x;
q = 1 - x;
s = 1;
}
return erfInvImpl(p, q, s);
}
/**
* Calculates the inverse error function evaluated at x.
* @param x
* @param invert
* @return
*/
private static double erfImp(double z, bool invert) {
if (z < 0) {
if (!invert) return -erfImp(-z, false);
if (z < -0.5) return 2 - erfImp(-z, true);
return 1 + erfImp(-z, false);
}
double result;
if (z < 0.5) {
if (z < 1e-10) result = (z * 1.125) + (z * 0.003379167095512573896158903121545171688);
else result = (z * 1.125) + (z * Evaluate.polynomial(z, Coefficients.erfImpAn) / Evaluate.polynomial(z, Coefficients.erfImpAd));
} else if ((z < 110) || ((z < 110) && invert)) {
invert = !invert;
double r, b;
if (z < 0.75) {
r = Evaluate.polynomial(z - 0.5, Coefficients.erfImpBn) / Evaluate.polynomial(z - 0.5, Coefficients.erfImpBd);
b = 0.3440242112F;
} else if (z < 1.25) {
r = Evaluate.polynomial(z - 0.75, Coefficients.erfImpCn) / Evaluate.polynomial(z - 0.75, Coefficients.erfImpCd);
b = 0.419990927F;
} else if (z < 2.25) {
r = Evaluate.polynomial(z - 1.25, Coefficients.erfImpDn) / Evaluate.polynomial(z - 1.25, Coefficients.erfImpDd);
b = 0.4898625016F;
} else if (z < 3.5) {
r = Evaluate.polynomial(z - 2.25, Coefficients.erfImpEn) / Evaluate.polynomial(z - 2.25, Coefficients.erfImpEd);
b = 0.5317370892F;
} else if (z < 5.25) {
r = Evaluate.polynomial(z - 3.5, Coefficients.erfImpFn) / Evaluate.polynomial(z - 3.5, Coefficients.erfImpFd);
b = 0.5489973426F;
} else if (z < 8) {
r = Evaluate.polynomial(z - 5.25, Coefficients.erfImpGn) / Evaluate.polynomial(z - 5.25, Coefficients.erfImpGd);
b = 0.5571740866F;
} else if (z < 11.5) {
r = Evaluate.polynomial(z - 8, Coefficients.erfImpHn) / Evaluate.polynomial(z - 8, Coefficients.erfImpHd);
b = 0.5609807968F;
} else if (z < 17) {
r = Evaluate.polynomial(z - 11.5, Coefficients.erfImpIn) / Evaluate.polynomial(z - 11.5, Coefficients.erfImpId);
b = 0.5626493692F;
} else if (z < 24) {
r = Evaluate.polynomial(z - 17, Coefficients.erfImpJn) / Evaluate.polynomial(z - 17, Coefficients.erfImpJd);
b = 0.5634598136F;
} else if (z < 38) {
r = Evaluate.polynomial(z - 24, Coefficients.erfImpKn) / Evaluate.polynomial(z - 24, Coefficients.erfImpKd);
b = 0.5638477802F;
} else if (z < 60) {
r = Evaluate.polynomial(z - 38, Coefficients.erfImpLn) / Evaluate.polynomial(z - 38, Coefficients.erfImpLd);
b = 0.5640528202F;
} else if (z < 85) {
r = Evaluate.polynomial(z - 60, Coefficients.erfImpMn) / Evaluate.polynomial(z - 60, Coefficients.erfImpMd);
b = 0.5641309023F;
} else {
r = Evaluate.polynomial(z - 85, Coefficients.erfImpNn) / Evaluate.polynomial(z - 85, Coefficients.erfImpNd);
b = 0.5641584396F;
}
double g = MathFunctions.exp(-z * z) / z;
result = (g * b) + (g * r);
} else {
result = 0;
invert = !invert;
}
if (invert) result = 1 - result;
return result;
}
/**
* Calculates the complementary inverse error function evaluated at x.
* @param z Point at which function will be evaluated.
* @return Inverse of complementary inverse error function erfcInv(x)
*/
public static double erfcInv(double z) {
if (z <= 0.0) return Double.PositiveInfinity;
if (z >= 2.0) return Double.NegativeInfinity;
double p, q, s;
if (z > 1) {
q = 2 - z;
p = 1 - q;
s = -1;
} else {
p = 1 - z;
q = z;
s = 1;
}
return erfInvImpl(p, q, s);
}
/**
* The implementation of the inverse error function.
* @param p
* @param q
* @param s
* @return
*/
private static double erfInvImpl(double p, double q, double s) {
double result;
if (p <= 0.5) {
float y = 0.0891314744949340820313f;
double g = p * (p + 10);
double r = Evaluate.polynomial(p, Coefficients.ervInvImpAn) / Evaluate.polynomial(p, Coefficients.ervInvImpAd);
result = (g * y) + (g * r);
} else if (q >= 0.25) {
float y = 2.249481201171875f;
double g = MathFunctions.sqrt(-2 * MathFunctions.ln(q));
double xs = q - 0.25;
double r = Evaluate.polynomial(xs, Coefficients.ervInvImpBn) / Evaluate.polynomial(xs, Coefficients.ervInvImpBd);
result = g / (y + r);
} else {
double x = MathFunctions.sqrt(-MathFunctions.ln(q));
if (x < 3) {
float y = 0.807220458984375f;
double xs = x - 1.125;
double r = Evaluate.polynomial(xs, Coefficients.ervInvImpCn) / Evaluate.polynomial(xs, Coefficients.ervInvImpCd);
result = (y * x) + (r * x);
} else if (x < 6) {
float y = 0.93995571136474609375f;
double xs = x - 3;
double r = Evaluate.polynomial(xs, Coefficients.ervInvImpDn) / Evaluate.polynomial(xs, Coefficients.ervInvImpDd);
result = (y * x) + (r * x);
} else if (x < 18) {
float y = 0.98362827301025390625f;
double xs = x - 6;
double r = Evaluate.polynomial(xs, Coefficients.ervInvImpEn) / Evaluate.polynomial(xs, Coefficients.ervInvImpEd);
result = (y * x) + (r * x);
} else if (x < 44) {
float y = 0.99714565277099609375f;
double xs = x - 18;
double r = Evaluate.polynomial(xs, Coefficients.ervInvImpFn) / Evaluate.polynomial(xs, Coefficients.ervInvImpFd);
result = (y * x) + (r * x);
} else {
float y = 0.99941349029541015625f;
double xs = x - 44;
double r = Evaluate.polynomial(xs, Coefficients.ervInvImpGn) / Evaluate.polynomial(xs, Coefficients.ervInvImpGd);
result = (y * x) + (r * x);
}
}
return s * result;
}
}
}
| |
/*
MIT License
Copyright (c) 2017 Saied Zarrinmehr
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Shapes;
using SpatialAnalysis.Events;
namespace SpatialAnalysis.Data.Visualization
{
/// <summary>
/// Interaction logic for SpatialDataControlPanel.xaml
/// </summary>
public partial class SpatialDataControlPanel : Window
{
protected override void OnStateChanged(EventArgs e)
{
base.OnStateChanged(e);
this.Owner.WindowState = this.WindowState;
}
SpatialDataPropertySetting _dataPropertySetter;
OSMDocument _host;
/// <summary>
/// Initializes a new instance of the <see cref="SpatialDataControlPanel"/> class.
/// </summary>
/// <param name="host">The main document to which this window belongs.</param>
/// <param name="types">The types of data to include.</param>
public SpatialDataControlPanel(OSMDocument host, IncludedDataTypes types)
{
InitializeComponent();
this._host = host;
switch (types)
{
case IncludedDataTypes.SpatialData:
this.onlySpatialData(this._host.cellularFloor.AllSpatialDataFields);
break;
case IncludedDataTypes.SpatialDataAndActivities:
this.SpatialDataAndActivities();
break;
case IncludedDataTypes.All:
this.anyData();
break;
}
this._grid.SizeChanged += _grid_SizeChanged;
}
void _grid_SizeChanged(object sender, SizeChangedEventArgs e)
{
if (this._dataPropertySetter != null)
{
this._dataPropertySetter.Width = this._grid.ColumnDefinitions[1].ActualWidth - 5;
this._dataPropertySetter.Height = this._grid.RowDefinitions[0].ActualHeight - 5;
}
}
private void onlySpatialData(Dictionary<string, SpatialDataField> data)
{
var cellularDataNames = new TextBlock()
{
Text = "Spatial Data".ToUpper(),
FontSize = 13,
FontWeight = FontWeights.DemiBold,
};
this._dataNames.Items.Add(cellularDataNames);
foreach (var item in data.Values)
{
SpatialDataField spatialData = item as SpatialDataField;
if (spatialData != null)
{
this._dataNames.Items.Add(spatialData);
}
}
this._dataNames.SelectionChanged += new SelectionChangedEventHandler(_dataNames_SelectionChanged);
}
private void anyData()
{
var cellularDataNames = new TextBlock()
{
Text = "Spatial Data".ToUpper(),
FontSize = 13,
FontWeight = FontWeights.DemiBold,
};
this._dataNames.Items.Add(cellularDataNames);
foreach (var item in this._host.cellularFloor.AllSpatialDataFields.Values)
{
SpatialDataField spatialData = item as SpatialDataField;
if (spatialData != null)
{
this._dataNames.Items.Add(spatialData);
}
}
if (this._host.AllActivities.Count > 0)
{
var fieldNames = new TextBlock()
{
Text = "Activity".ToUpper(),
FontSize = 13,
FontWeight = FontWeights.DemiBold,
};
this._dataNames.Items.Add(fieldNames);
foreach (var item in this._host.AllActivities.Values)
{
this._dataNames.Items.Add(item);
}
}
if (this._host.AllOccupancyEvent.Count > 0)
{
var eventNames = new TextBlock
{
Text = "Occupancy Events".ToUpper(),
FontSize = 13,
FontWeight = FontWeights.DemiBold,
};
this._dataNames.Items.Add(eventNames);
foreach (var item in this._host.AllOccupancyEvent.Values)
{
this._dataNames.Items.Add(item);
}
}
if (this._host.AllSimulationResults.Count>0)
{
var simulationResults = new TextBlock
{
Text = "Simulation Results".ToUpper(),
FontSize = 13,
FontWeight = FontWeights.DemiBold,
};
this._dataNames.Items.Add(simulationResults);
foreach (var item in this._host.AllSimulationResults.Values)
{
this._dataNames.Items.Add(item);
}
}
this._dataNames.SelectionChanged += new SelectionChangedEventHandler(_dataNames_SelectionChanged);
}
private void SpatialDataAndActivities()
{
var cellularDataNames = new TextBlock()
{
Text = "Spatial Data".ToUpper(),
FontSize = 13,
FontWeight = FontWeights.DemiBold,
};
this._dataNames.Items.Add(cellularDataNames);
foreach (var item in this._host.cellularFloor.AllSpatialDataFields.Values)
{
SpatialDataField spatialData = item as SpatialDataField;
if (spatialData != null)
{
this._dataNames.Items.Add(spatialData);
}
}
if (this._host.AllActivities.Count > 0)
{
var fieldNames = new TextBlock()
{
Text = "Activity".ToUpper(),
FontSize = 13,
FontWeight = FontWeights.DemiBold,
};
this._dataNames.Items.Add(fieldNames);
foreach (var item in this._host.AllActivities.Values)
{
this._dataNames.Items.Add(item);
}
}
this._dataNames.SelectionChanged += new SelectionChangedEventHandler(_dataNames_SelectionChanged);
}
void _dataNames_SelectionChanged(object sender, SelectionChangedEventArgs e)
{
ISpatialData spatialDataField = ((ListBox)sender).SelectedItem as ISpatialData;
if (spatialDataField == null)
{
((ListBox)sender).SelectedIndex = -1;
return;
}
if (this._dataPropertySetter != null)
{
if (this._grid.Children.Contains(this._dataPropertySetter))
{
this._grid.Children.Remove(this._dataPropertySetter);
this._dataPropertySetter = null;
}
}
this._dataPropertySetter = new SpatialDataPropertySetting(this._host,spatialDataField);
this._grid.Children.Add(this._dataPropertySetter);
Grid.SetColumn(this._dataPropertySetter, 1);
Grid.SetRow(this._dataPropertySetter, 0);
this._dataPropertySetter.Width = this._grid.ColumnDefinitions[1].ActualWidth - 5;
this._dataPropertySetter.Height = this._grid.RowDefinitions[0].ActualHeight - 5;
}
private void _close_Click(object sender, RoutedEventArgs e)
{
this.Close();
}
}
/// <summary>
/// Enum Included Data Types in the data control panel
/// </summary>
public enum IncludedDataTypes
{
/// <summary>
/// The spatial data
/// </summary>
SpatialData = 0,
/// <summary>
/// The spatial data and activities
/// </summary>
SpatialDataAndActivities = 1,
/// <summary>
/// All
/// </summary>
All = 2,
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Threading;
using Microsoft.Extensions.Logging;
using Orleans.Runtime.Scheduler;
namespace Orleans.Runtime.GrainDirectory
{
internal class AdaptiveDirectoryCacheMaintainer<TValue> : DedicatedAsynchAgent
{
private static readonly TimeSpan SLEEP_TIME_BETWEEN_REFRESHES = Debugger.IsAttached ? TimeSpan.FromMinutes(5) : TimeSpan.FromMinutes(1); // this should be something like minTTL/4
private readonly AdaptiveGrainDirectoryCache<TValue> cache;
private readonly LocalGrainDirectory router;
private readonly Func<List<ActivationAddress>, TValue> updateFunc;
private readonly IInternalGrainFactory grainFactory;
private long lastNumAccesses; // for stats
private long lastNumHits; // for stats
internal AdaptiveDirectoryCacheMaintainer(
LocalGrainDirectory router,
AdaptiveGrainDirectoryCache<TValue> cache,
Func<List<ActivationAddress>, TValue> updateFunc,
IInternalGrainFactory grainFactory,
ExecutorService executorService,
ILoggerFactory loggerFactory)
:base(executorService, loggerFactory)
{
this.updateFunc = updateFunc;
this.grainFactory = grainFactory;
this.router = router;
this.cache = cache;
lastNumAccesses = 0;
lastNumHits = 0;
OnFault = FaultBehavior.RestartOnFault;
}
protected override void Run()
{
while (router.Running)
{
// Run through all cache entries and do the following:
// 1. If the entry is not expired, skip it
// 2. If the entry is expired and was not accessed in the last time interval -- throw it away
// 3. If the entry is expired and was accessed in the last time interval, put into "fetch-batch-requests" list
// At the end of the process, fetch batch requests for entries that need to be refreshed
// Upon receiving refreshing answers, if the entry was not changed, double its expiration timer.
// If it was changed, update the cache and reset the expiration timer.
// this dictionary holds a map between a silo address and the list of grains that need to be refreshed
var fetchInBatchList = new Dictionary<SiloAddress, List<GrainId>>();
// get the list of cached grains
// for debug only
int cnt1 = 0, cnt2 = 0, cnt3 = 0, cnt4 = 0;
// run through all cache entries
var enumerator = cache.GetStoredEntries();
while (enumerator.MoveNext())
{
var pair = enumerator.Current;
GrainId grain = pair.Key;
var entry = pair.Value;
SiloAddress owner = router.CalculateGrainDirectoryPartition(grain);
if (owner == null) // Null means there's no other silo and we're shutting down, so skip this entry
{
continue;
}
if (owner.Equals(router.MyAddress))
{
// we found our owned entry in the cache -- it is not supposed to happen unless there were
// changes in the membership
Log.Warn(ErrorCode.Runtime_Error_100185, "Grain {grain} owned by {owner} was found in the cache of {owner}", grain, owner, owner);
cache.Remove(grain);
cnt1++; // for debug
}
else
{
if (entry == null)
{
// 0. If the entry was deleted in parallel, presumably due to cleanup after silo death
cache.Remove(grain); // for debug
cnt3++;
}
else if (!entry.IsExpired())
{
// 1. If the entry is not expired, skip it
cnt2++; // for debug
}
else if (entry.NumAccesses == 0)
{
// 2. If the entry is expired and was not accessed in the last time interval -- throw it away
cache.Remove(grain); // for debug
cnt3++;
}
else
{
// 3. If the entry is expired and was accessed in the last time interval, put into "fetch-batch-requests" list
if (!fetchInBatchList.ContainsKey(owner))
{
fetchInBatchList[owner] = new List<GrainId>();
}
fetchInBatchList[owner].Add(grain);
// And reset the entry's access count for next time
entry.NumAccesses = 0;
cnt4++; // for debug
}
}
}
if (Log.IsEnabled(LogLevel.Trace)) Log.Trace("Silo {0} self-owned (and removed) {1}, kept {2}, removed {3} and tries to refresh {4} grains", router.MyAddress, cnt1, cnt2, cnt3, cnt4);
// send batch requests
SendBatchCacheRefreshRequests(fetchInBatchList);
ProduceStats();
// recheck every X seconds (Consider making it a configurable parameter)
Thread.Sleep(SLEEP_TIME_BETWEEN_REFRESHES);
}
}
private void SendBatchCacheRefreshRequests(Dictionary<SiloAddress, List<GrainId>> refreshRequests)
{
foreach (SiloAddress silo in refreshRequests.Keys)
{
List<Tuple<GrainId, int>> cachedGrainAndETagList = BuildGrainAndETagList(refreshRequests[silo]);
SiloAddress capture = silo;
router.CacheValidationsSent.Increment();
// Send all of the items in one large request
var validator = this.grainFactory.GetSystemTarget<IRemoteGrainDirectory>(Constants.DirectoryCacheValidatorId, capture);
router.Scheduler.QueueTask(async () =>
{
var response = await validator.LookUpMany(cachedGrainAndETagList);
ProcessCacheRefreshResponse(capture, response);
}, router.CacheValidator.SchedulingContext).Ignore();
if (Log.IsEnabled(LogLevel.Trace)) Log.Trace("Silo {0} is sending request to silo {1} with {2} entries", router.MyAddress, silo, cachedGrainAndETagList.Count);
}
}
private void ProcessCacheRefreshResponse(
SiloAddress silo,
IReadOnlyCollection<Tuple<GrainId, int, List<ActivationAddress>>> refreshResponse)
{
if (Log.IsEnabled(LogLevel.Trace)) Log.Trace("Silo {0} received ProcessCacheRefreshResponse. #Response entries {1}.", router.MyAddress, refreshResponse.Count);
int cnt1 = 0, cnt2 = 0, cnt3 = 0;
// pass through returned results and update the cache if needed
foreach (Tuple<GrainId, int, List<ActivationAddress>> tuple in refreshResponse)
{
if (tuple.Item3 != null)
{
// the server returned an updated entry
var updated = updateFunc(tuple.Item3);
cache.AddOrUpdate(tuple.Item1, updated, tuple.Item2);
cnt1++;
}
else if (tuple.Item2 == -1)
{
// The server indicates that it does not own the grain anymore.
// It could be that by now, the cache has been already updated and contains an entry received from another server (i.e., current owner for the grain).
// For simplicity, we do not care about this corner case and simply remove the cache entry.
cache.Remove(tuple.Item1);
cnt2++;
}
else
{
// The server returned only a (not -1) generation number, indicating that we hold the most
// updated copy of the grain's activations list.
// Validate that the generation number in the request and the response are equal
// Contract.Assert(tuple.Item2 == refreshRequest.Find(o => o.Item1 == tuple.Item1).Item2);
// refresh the entry in the cache
cache.MarkAsFresh(tuple.Item1);
cnt3++;
}
}
if (Log.IsEnabled(LogLevel.Trace)) Log.Trace("Silo {0} processed refresh response from {1} with {2} updated, {3} removed, {4} unchanged grains", router.MyAddress, silo, cnt1, cnt2, cnt3);
}
/// <summary>
/// Gets the list of grains (all owned by the same silo) and produces a new list
/// of tuples, where each tuple holds the grain and its generation counter currently stored in the cache
/// </summary>
/// <param name="grains">List of grains owned by the same silo</param>
/// <returns>List of grains in input along with their generation counters stored in the cache </returns>
private List<Tuple<GrainId, int>> BuildGrainAndETagList(IEnumerable<GrainId> grains)
{
var grainAndETagList = new List<Tuple<GrainId, int>>();
foreach (GrainId grain in grains)
{
// NOTE: should this be done with TryGet? Won't Get invoke the LRU getter function?
AdaptiveGrainDirectoryCache<TValue>.GrainDirectoryCacheEntry entry = cache.Get(grain);
if (entry != null)
{
grainAndETagList.Add(new Tuple<GrainId, int>(grain, entry.ETag));
}
else
{
// this may happen only if the LRU cache is full and decided to drop this grain
// while we try to refresh it
Log.Warn(ErrorCode.Runtime_Error_100199, "Grain {0} disappeared from the cache during maintenance", grain);
}
}
return grainAndETagList;
}
private void ProduceStats()
{
// We do not want to synchronize the access on numAccess and numHits in cache to avoid performance issues.
// Thus we take the current reading of these fields and calculate the stats. We might miss an access or two,
// but it should not be matter.
long curNumAccesses = cache.NumAccesses;
long curNumHits = cache.NumHits;
long numAccesses = curNumAccesses - lastNumAccesses;
long numHits = curNumHits - lastNumHits;
if (Log.IsEnabled(LogLevel.Trace)) Log.Trace("#accesses: {0}, hit-ratio: {1}%", numAccesses, (numHits / Math.Max(numAccesses, 0.00001)) * 100);
lastNumAccesses = curNumAccesses;
lastNumHits = curNumHits;
}
}
}
| |
// Copyright (c) ppy Pty Ltd <[email protected]>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System;
using System.Threading;
using ManagedBass;
using ManagedBass.Fx;
using ManagedBass.Mix;
using NUnit.Framework;
using osu.Framework.Audio.Mixing.Bass;
using osu.Framework.Audio.Sample;
using osu.Framework.Audio.Track;
using osu.Framework.Extensions;
namespace osu.Framework.Tests.Audio
{
[TestFixture]
public class BassAudioMixerTest
{
private BassTestComponents bass;
private TrackBass track;
private SampleBass sample;
[SetUp]
public void Setup()
{
bass = new BassTestComponents();
track = bass.GetTrack();
sample = bass.GetSample();
bass.Update();
}
[TearDown]
public void Teardown()
{
bass?.Dispose();
}
[Test]
public void TestMixerInitialised()
{
Assert.That(bass.Mixer.Handle, Is.Not.Zero);
}
[Test]
public void TestAddedToGlobalMixerByDefault()
{
Assert.That(BassMix.ChannelGetMixer(getHandle()), Is.EqualTo(bass.Mixer.Handle));
}
[Test]
public void TestCannotBeRemovedFromGlobalMixer()
{
bass.Mixer.Remove(track);
bass.Update();
Assert.That(BassMix.ChannelGetMixer(getHandle()), Is.EqualTo(bass.Mixer.Handle));
}
[Test]
public void TestTrackIsMovedBetweenMixers()
{
var secondMixer = bass.CreateMixer();
secondMixer.Add(track);
bass.Update();
Assert.That(BassMix.ChannelGetMixer(getHandle()), Is.EqualTo(secondMixer.Handle));
bass.Mixer.Add(track);
bass.Update();
Assert.That(BassMix.ChannelGetMixer(getHandle()), Is.EqualTo(bass.Mixer.Handle));
}
[Test]
public void TestMovedToGlobalMixerWhenRemovedFromMixer()
{
var secondMixer = bass.CreateMixer();
secondMixer.Add(track);
secondMixer.Remove(track);
bass.Update();
Assert.That(BassMix.ChannelGetMixer(getHandle()), Is.EqualTo(bass.Mixer.Handle));
}
[Test]
public void TestVirtualTrackCanBeAddedAndRemoved()
{
var secondMixer = bass.CreateMixer();
var virtualTrack = bass.TrackStore.GetVirtual();
secondMixer.Add(virtualTrack);
bass.Update();
secondMixer.Remove(virtualTrack);
bass.Update();
}
[Test]
public void TestFreedChannelRemovedFromDefault()
{
track.Dispose();
bass.Update();
Assert.That(BassMix.ChannelGetMixer(getHandle()), Is.Zero);
}
[Test]
public void TestChannelMovedToGlobalMixerAfterDispose()
{
var secondMixer = bass.CreateMixer();
secondMixer.Add(track);
bass.Update();
secondMixer.Dispose();
bass.Update();
Assert.That(BassMix.ChannelGetMixer(getHandle()), Is.EqualTo(bass.Mixer.Handle));
}
[Test]
public void TestPlayPauseStop()
{
Assert.That(!track.IsRunning);
bass.RunOnAudioThread(() => track.Start());
bass.Update();
Assert.That(track.IsRunning);
bass.RunOnAudioThread(() => track.Stop());
bass.Update();
Assert.That(!track.IsRunning);
bass.RunOnAudioThread(() =>
{
track.Seek(track.Length - 1000);
track.Start();
});
bass.Update();
Assert.That(() =>
{
bass.Update();
return !track.IsRunning;
}, Is.True.After(3000));
}
[Test]
public void TestChannelRetainsPlayingStateWhenMovedBetweenMixers()
{
var secondMixer = bass.CreateMixer();
secondMixer.Add(track);
bass.Update();
Assert.That(!track.IsRunning);
bass.RunOnAudioThread(() => track.Start());
bass.Update();
Assert.That(track.IsRunning);
bass.Mixer.Add(track);
bass.Update();
Assert.That(track.IsRunning);
}
[Test]
public void TestTrackReferenceLostWhenTrackIsDisposed()
{
var trackReference = testDisposeTrackWithoutReference();
// The first update disposes the track, the second one removes the track from the TrackStore.
bass.Update();
bass.Update();
GC.Collect();
GC.WaitForPendingFinalizers();
Assert.That(!trackReference.TryGetTarget(out _));
}
private WeakReference<TrackBass> testDisposeTrackWithoutReference()
{
var weakRef = new WeakReference<TrackBass>(track);
track.Dispose();
track = null;
return weakRef;
}
[Test]
public void TestSampleChannelReferenceLostWhenSampleChannelIsDisposed()
{
var channelReference = runTest(sample);
// The first update disposes the track, the second one removes the track from the TrackStore.
bass.Update();
bass.Update();
GC.Collect();
GC.WaitForPendingFinalizers();
Assert.That(!channelReference.TryGetTarget(out _));
static WeakReference<SampleChannel> runTest(SampleBass sample)
{
var channel = sample.GetChannel();
channel.Play(); // Creates the handle/adds to mixer.
channel.Stop();
channel.Dispose();
return new WeakReference<SampleChannel>(channel);
}
}
[Test]
public void TestAddEffect()
{
bass.Mixer.Effects.Add(new BQFParameters());
assertEffectParameters();
bass.Mixer.Effects.AddRange(new[]
{
new BQFParameters(),
new BQFParameters(),
new BQFParameters()
});
assertEffectParameters();
}
[Test]
public void TestRemoveEffect()
{
bass.Mixer.Effects.Add(new BQFParameters());
assertEffectParameters();
bass.Mixer.Effects.RemoveAt(0);
assertEffectParameters();
bass.Mixer.Effects.AddRange(new[]
{
new BQFParameters(),
new BQFParameters(),
new BQFParameters()
});
assertEffectParameters();
bass.Mixer.Effects.RemoveAt(1);
assertEffectParameters();
bass.Mixer.Effects.RemoveAt(1);
assertEffectParameters();
}
[Test]
public void TestMoveEffect()
{
bass.Mixer.Effects.AddRange(new[]
{
new BQFParameters(),
new BQFParameters(),
new BQFParameters()
});
assertEffectParameters();
bass.Mixer.Effects.Move(0, 1);
assertEffectParameters();
bass.Mixer.Effects.Move(2, 0);
assertEffectParameters();
}
[Test]
public void TestReplaceEffect()
{
bass.Mixer.Effects.AddRange(new[]
{
new BQFParameters(),
new BQFParameters(),
new BQFParameters()
});
assertEffectParameters();
bass.Mixer.Effects[1] = new BQFParameters();
assertEffectParameters();
}
[Test]
public void TestInsertEffect()
{
bass.Mixer.Effects.AddRange(new[]
{
new BQFParameters(),
new BQFParameters()
});
assertEffectParameters();
bass.Mixer.Effects.Insert(1, new BQFParameters());
assertEffectParameters();
bass.Mixer.Effects.Insert(3, new BQFParameters());
assertEffectParameters();
}
[Test]
public void TestChannelDoesNotPlayIfReachedEndAndSeekedBackwards()
{
bass.RunOnAudioThread(() =>
{
track.Seek(track.Length - 1);
track.Start();
});
Thread.Sleep(50);
bass.Update();
Assert.That(bass.Mixer.ChannelIsActive(track), Is.Not.EqualTo(PlaybackState.Playing));
bass.RunOnAudioThread(() => track.SeekAsync(0).WaitSafely());
bass.Update();
Assert.That(bass.Mixer.ChannelIsActive(track), Is.Not.EqualTo(PlaybackState.Playing));
}
[Test]
public void TestChannelDoesNotPlayIfReachedEndAndMovedMixers()
{
bass.RunOnAudioThread(() =>
{
track.Seek(track.Length - 1);
track.Start();
});
Thread.Sleep(50);
bass.Update();
Assert.That(bass.Mixer.ChannelIsActive(track), Is.Not.EqualTo(PlaybackState.Playing));
var secondMixer = bass.CreateMixer();
secondMixer.Add(track);
bass.Update();
Assert.That(secondMixer.ChannelIsActive(track), Is.Not.EqualTo(PlaybackState.Playing));
}
private void assertEffectParameters()
{
bass.Update();
Assert.That(bass.Mixer.ActiveEffects.Count, Is.EqualTo(bass.Mixer.Effects.Count));
Assert.Multiple(() =>
{
for (int i = 0; i < bass.Mixer.ActiveEffects.Count; i++)
{
Assert.That(bass.Mixer.ActiveEffects[i].Effect, Is.EqualTo(bass.Mixer.Effects[i]));
Assert.That(bass.Mixer.ActiveEffects[i].Priority, Is.EqualTo(-i));
}
});
}
private int getHandle() => ((IBassAudioChannel)track).Handle;
}
}
| |
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Copyright (c) Microsoft Corporation. All rights reserved.
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
using System;
using System.IO;
using System.Text;
namespace FileSystemTest
{
public class Open_FM_FA : IMFTestInterface
{
[SetUp]
public InitializeResult Initialize()
{
// These tests rely on underlying file system so we need to make
// sure we can format it before we start the tests. If we can't
// format it, then we assume there is no FS to test on this platform.
// delete the directory DOTNETMF_FS_EMULATION
try
{
IOTests.IntializeVolume();
Directory.CreateDirectory(testDir);
Directory.SetCurrentDirectory(testDir);
}
catch (Exception ex)
{
Log.Comment("Skipping: Unable to initialize file system" + ex.Message);
return InitializeResult.Skip;
}
return InitializeResult.ReadyToGo;
}
[TearDown]
public void CleanUp()
{
}
#region Local vars
private const string fileName = "file1.tmp";
private const string file2Name = "file2.txt";
private const string testDir = "Open_FM_FA";
#endregion Local vars
#region Helper Methods
private MFTestResults TestMethod(FileMode fm, FileAccess fa)
{
Log.Comment("Starting tests in FileMode: " + fm.ToString() + " with FileAccess: " + fa.ToString());
int iCountErrors = 0;
String fileName = "TestFile";
StreamWriter sw2;
FileStream fs2;
String str2;
if (File.Exists(fileName))
File.Delete(fileName);
Log.Comment("File does not exist");
//------------------------------------------------------------------
switch (fm)
{
case FileMode.CreateNew:
case FileMode.Create:
case FileMode.OpenOrCreate:
try
{
Log.Comment( "null path" );
iCountErrors = 0; // ZeligBUG not resetting the value here leads to uninit iCountErrors value
fs2 = File.Open( null, fm, fa );
if (!File.Exists(fileName))
{
iCountErrors++;
Log.Exception("File not created, FileMode==" + fm.ToString());
}
fs2.Close();
}
catch (ArgumentException aexc)
{
Log.Comment("Caught expected exception, aexc==" + aexc.Message);
iCountErrors = 0;
}
catch (Exception exc)
{
iCountErrors = 1;
Log.Exception("Incorrect exception thrown, exc==" + exc);
}
try
{
Log.Comment("string empty path");
fs2 = File.Open("", fm, fa);
if (!File.Exists(fileName))
{
iCountErrors++;
Log.Exception("File not created, FileMode==" + fm.ToString());
}
fs2.Close();
}
catch (ArgumentException aexc)
{
iCountErrors = 0;
Log.Comment( "Caught expected exception, aexc==" + aexc.Message );
}
catch (Exception exc)
{
iCountErrors++;
Log.Exception("Incorrect exception thrown, exc==" + exc);
}
try
{
Log.Comment("string:" + fileName);
fs2 = File.Open(fileName, fm, fa);
if (!File.Exists(fileName))
{
iCountErrors++;
Log.Exception("File not created, FileMode==" + fm.ToString());
}
fs2.Close();
}
catch (ArgumentException aexc)
{
if ((fm == FileMode.Create && fa == FileAccess.Read) || (fm == FileMode.CreateNew && fa == FileAccess.Read))
{
iCountErrors = 0;
Log.Comment( "Caught expected exception, aexc==" + aexc.Message );
}
else
{
iCountErrors++;
Log.Exception("Unexpected exception, aexc==" + aexc);
}
}
catch (Exception exc)
{
iCountErrors++;
Log.Exception("Incorrect exception thrown, exc==" + exc);
}
break;
case FileMode.Open:
case FileMode.Truncate:
try
{
Log.Comment( "null path" );
iCountErrors = 0; // ZeligBUG not resetting the value here leads to uninit iCountErrors value
fs2 = File.Open( null, fm, fa );
iCountErrors++;
Log.Exception("Expected exception not thrown");
fs2.Close();
}
catch (IOException fexc)
{
iCountErrors = 0;
Log.Comment( "Caught expected exception, fexc==" + fexc.Message );
}
catch (ArgumentException aexc)
{
iCountErrors = 0;
Log.Comment( "Caught expected exception, aexc==" + aexc.Message );
}
catch (Exception exc)
{
iCountErrors = 1;
Log.Exception("Incorrect exception thrown, exc==" + exc.ToString());
}
try
{
Log.Comment("string empty path");
fs2 = File.Open("", fm, fa);
iCountErrors++;
Log.Exception("Expected exception not thrown");
fs2.Close();
}
catch (IOException fexc)
{
iCountErrors = 0;
Log.Comment( "Caught expected exception, fexc==" + fexc.Message );
}
catch (ArgumentException aexc)
{
iCountErrors = 0;
Log.Comment( "Caught expected exception, aexc==" + aexc.Message );
}
catch (Exception exc)
{
iCountErrors++;
Log.Exception("Incorrect exception thrown, exc==" + exc.ToString());
}
try
{
fs2 = File.Open(fileName, fm, fa);
iCountErrors++;
Log.Exception("Expected exception not thrown");
fs2.Close();
}
catch (IOException fexc)
{
iCountErrors = 0;
Log.Comment( "Caught expected exception, fexc==" + fexc.Message );
}
catch (ArgumentException aexc)
{
if(fa == FileAccess.Read)
{
iCountErrors = 0;
Log.Comment( "Caught expected exception, aexc==" + aexc.Message );
}
else
{
iCountErrors++;
Log.Exception( "Unexpected exception thrown, aexc==" + aexc );
}
}
catch (Exception exc)
{
iCountErrors++;
Log.Exception("Incorrect exception thrown, exc==" + exc.ToString());
}
break;
case FileMode.Append:
if (fa == FileAccess.Write)
{
Log.Comment( "FileAccess.Write" );
iCountErrors = 0; // ZeligBUG not resetting the value here leads to uninit iCountErrors value
fs2 = File.Open( fileName, fm, fa );
if (!File.Exists(fileName))
{
iCountErrors = 1;
Log.Exception("File not created");
}
fs2.Close();
}
else
{
try
{
Log.Comment( "FileAccess != Write" );
iCountErrors = 0; // ZeligBUG not resetting the value here leads to uninit iCountErrors value
fs2 = File.Open( fileName, fm, fa );
iCountErrors++;
Log.Exception("Expected exception not thrown");
fs2.Close();
}
catch (ArgumentException aexc)
{
iCountErrors = 0;
Log.Comment( "Caught expected exception, aexc==" + aexc.Message );
}
catch (Exception exc)
{
iCountErrors = 1;
Log.Exception("Incorrect exception thrown, exc==" + exc.ToString());
}
}
break;
default:
iCountErrors = 1;
Log.Exception("Invalid FileMode.");
break;
}
if (File.Exists(fileName))
File.Delete(fileName);
if(iCountErrors > 0) return MFTestResults.Fail;
//------------------------------------------------------------------
Log.Comment("File already exists");
//------------------------------------------------------------------
sw2 = new StreamWriter(fileName);
str2 = "Du er en ape";
sw2.Write(str2);
sw2.Close();
switch (fm)
{
case FileMode.CreateNew:
try
{
fs2 = File.Open(null, fm, fa);
iCountErrors++;
Log.Exception("Expected exception not thrown");
fs2.Close();
}
catch (ArgumentException aexc)
{
iCountErrors = 0;
Log.Comment( "Caught expected exception, aexc==" + aexc.Message );
}
catch (IOException aexc)
{
iCountErrors = 0;
Log.Comment( "Caught expected exception, aexc==" + aexc.Message );
}
catch (Exception exc)
{
iCountErrors++;
Log.Exception("Incorrect exception thrown, exc==" + exc.ToString());
}
try
{
fs2 = File.Open("", fm, fa);
iCountErrors++;
Log.Exception("Expected exception not thrown");
fs2.Close();
}
catch (ArgumentException aexc)
{
iCountErrors = 0;
Log.Comment( "Caught expected exception, aexc==" + aexc.Message );
}
catch (IOException aexc)
{
iCountErrors = 0;
Log.Comment( "Caught expected exception, aexc==" + aexc.Message );
}
catch (Exception exc)
{
iCountErrors++;
Log.Exception("Incorrect exception thrown, exc==" + exc.ToString());
}
try
{
fs2 = File.Open(fileName, fm, fa);
iCountErrors++;
Log.Exception("Expected exception not thrown");
fs2.Close();
}
catch (ArgumentException aexc)
{
if (fa == FileAccess.Read)
{
iCountErrors = 0;
Log.Comment( "Caught expected exception, aexc==" + aexc.Message );
}
else
{
iCountErrors++;
Log.Exception("Unexpected exception, aexc==" + aexc);
}
}
catch (IOException aexc)
{
iCountErrors = 0;
Log.Comment( "Caught expected exception, aexc==" + aexc.Message );
}
catch (Exception exc)
{
iCountErrors++;
Log.Exception("Incorrect exception thrown, exc==" + exc.ToString());
}
break;
case FileMode.Create:
try
{
fs2 = File.Open(fileName, fm, fa);
if (fs2.Length != 0)
{
iCountErrors++;
Log.Exception("Incorrect length of file==" + fs2.Length);
}
fs2.Close();
}
catch (ArgumentException aexc)
{
if (fa == FileAccess.Read)
{
iCountErrors = 0;
Log.Comment( "Caught expected exception, aexc==" + aexc.Message );
}
else
{
iCountErrors++;
Log.Exception("Unexpected exception, aexc==" + aexc);
}
}
catch (Exception exc)
{
iCountErrors++;
Log.Exception("Incorrect exception thrown, exc==" + exc.ToString());
}
break;
case FileMode.OpenOrCreate:
case FileMode.Open:
iCountErrors = 0;
fs2 = File.Open(fileName, fm, fa);
if (fs2.Length != str2.Length)
{
iCountErrors++;
Log.Exception("Incorrect length on file==" + fs2.Length);
}
fs2.Close();
break;
case FileMode.Truncate:
if (fa == FileAccess.Read)
{
try
{
fs2 = File.Open(fileName, fm, fa);
iCountErrors++;
Log.Exception("Expected exception not thrown");
}
catch (ArgumentException iexc)
{
iCountErrors = 0;
Log.Comment( "Caught expected exception, iexc==" + iexc.Message );
}
catch (Exception exc)
{
iCountErrors++;
Log.Exception("Incorrect exception thrown, exc==" + exc.ToString());
}
}
else
{
fs2 = File.Open(fileName, fm, fa);
iCountErrors = 0;
if(fs2.Length != 0)
{
iCountErrors++;
Log.Exception("Incorrect length on file==" + fs2.Length);
}
fs2.Close();
}
break;
case FileMode.Append:
if (fa == FileAccess.Write)
{
iCountErrors = 0;
fs2 = File.Open( fileName, fm, fa );
if (!File.Exists(fileName))
{
iCountErrors++;
Log.Exception("File not created");
}
fs2.Close();
}
else
{
try
{
fs2 = File.Open(fileName, fm, fa);
iCountErrors++;
Log.Exception("Expected exception not thrown");
fs2.Close();
}
catch (ArgumentException aexc)
{
iCountErrors = 0;
Log.Comment( "Caught expected exception, aexc==" + aexc.Message );
}
catch (Exception exc)
{
iCountErrors++;
Log.Exception("Incorrect exception thrown, exc==" + exc.ToString());
}
}
break;
default:
iCountErrors++;
Log.Exception("Invalid file mode");
break;
}
return iCountErrors == 0 ? MFTestResults.Pass : MFTestResults.Fail;
}
#endregion Helper Methods
#region Test Cases
[TestMethod]
public MFTestResults FileMode_CreateNew_FileAccess_Read()
{
return TestMethod(FileMode.CreateNew, FileAccess.Read);
}
[TestMethod]
public MFTestResults FileMode_CreateNew_FileAccess_Write()
{
return TestMethod(FileMode.CreateNew, FileAccess.Write);
}
[TestMethod]
public MFTestResults FileMode_CreateNew_FileAccess_ReadWrite()
{
return TestMethod(FileMode.CreateNew, FileAccess.ReadWrite);
}
[TestMethod]
public MFTestResults FileMode_Create_FileAccess_Read()
{
return TestMethod(FileMode.Create, FileAccess.Read);
}
[TestMethod]
public MFTestResults FileMode_Create_FileAccess_Write()
{
return TestMethod(FileMode.Create, FileAccess.Write);
}
[TestMethod]
public MFTestResults FileMode_Create_FileAccess_ReadWrite()
{
return TestMethod(FileMode.Create, FileAccess.ReadWrite);
}
[TestMethod]
public MFTestResults FileMode_Open_FileAccess_Read()
{
return TestMethod(FileMode.Open, FileAccess.Read);
}
[TestMethod]
public MFTestResults FileMode_Open_FileAccess_Write()
{
return TestMethod(FileMode.Open, FileAccess.Write);
}
[TestMethod]
public MFTestResults FileMode_Open_FileAccess_ReadWrite()
{
return TestMethod(FileMode.Open, FileAccess.ReadWrite);
}
[TestMethod]
public MFTestResults FileMode_OpenOrCreate_FileAccess_Read()
{
return TestMethod(FileMode.OpenOrCreate, FileAccess.Read);
}
[TestMethod]
public MFTestResults FileMode_OpenOrCreate_FileAccess_Write()
{
return TestMethod(FileMode.OpenOrCreate, FileAccess.Write);
}
[TestMethod]
public MFTestResults FileMode_OpenOrCreate_FileAccess_ReadWrite()
{
return TestMethod(FileMode.OpenOrCreate, FileAccess.ReadWrite);
}
[TestMethod]
public MFTestResults FileMode_Truncate_FileAccess_Read()
{
return TestMethod(FileMode.Truncate, FileAccess.Read);
}
[TestMethod]
public MFTestResults FileMode_Truncate_FileAccess_Write()
{
return TestMethod(FileMode.Truncate, FileAccess.Write);
}
[TestMethod]
public MFTestResults FileMode_Truncate_FileAccess_ReadWrite()
{
return TestMethod(FileMode.Truncate, FileAccess.ReadWrite);
}
[TestMethod]
public MFTestResults FileMode_Append_FileAccess_Read()
{
return TestMethod(FileMode.Append, FileAccess.Read);
}
[TestMethod]
public MFTestResults FileMode_Append_FileAccess_Write()
{
return TestMethod(FileMode.Append, FileAccess.Write);
}
[TestMethod]
public MFTestResults FileMode_Append_FileAccess_ReadWrite()
{
return TestMethod(FileMode.Append, FileAccess.ReadWrite);
}
#endregion Test Cases
public MFTestMethod[] Tests
{
get
{
return new MFTestMethod[]
{
new MFTestMethod( FileMode_CreateNew_FileAccess_Read, "FileMode_CreateNew_FileAccess_Read" ),
new MFTestMethod( FileMode_CreateNew_FileAccess_Write, "FileMode_CreateNew_FileAccess_Write" ),
new MFTestMethod( FileMode_CreateNew_FileAccess_ReadWrite, "FileMode_CreateNew_FileAccess_ReadWrite" ),
new MFTestMethod( FileMode_Create_FileAccess_Read, "FileMode_Create_FileAccess_Read" ),
new MFTestMethod( FileMode_Create_FileAccess_Write, "FileMode_Create_FileAccess_Write" ),
new MFTestMethod( FileMode_Create_FileAccess_ReadWrite, "FileMode_Create_FileAccess_ReadWrite" ),
new MFTestMethod( FileMode_Open_FileAccess_Read, "FileMode_Open_FileAccess_Read" ),
new MFTestMethod( FileMode_Open_FileAccess_Write, "FileMode_Open_FileAccess_Write" ),
new MFTestMethod( FileMode_Open_FileAccess_ReadWrite, "FileMode_Open_FileAccess_ReadWrite" ),
new MFTestMethod( FileMode_OpenOrCreate_FileAccess_Read, "FileMode_OpenOrCreate_FileAccess_Read" ),
new MFTestMethod( FileMode_OpenOrCreate_FileAccess_Write, "FileMode_OpenOrCreate_FileAccess_Write" ),
new MFTestMethod( FileMode_OpenOrCreate_FileAccess_ReadWrite, "FileMode_OpenOrCreate_FileAccess_ReadWrite" ),
new MFTestMethod( FileMode_Truncate_FileAccess_Read, "FileMode_Truncate_FileAccess_Read" ),
new MFTestMethod( FileMode_Truncate_FileAccess_Write, "FileMode_Truncate_FileAccess_Write" ),
new MFTestMethod( FileMode_Truncate_FileAccess_ReadWrite, "FileMode_Truncate_FileAccess_ReadWrite" ),
new MFTestMethod( FileMode_Append_FileAccess_Read, "FileMode_Append_FileAccess_Read" ),
new MFTestMethod( FileMode_Append_FileAccess_Write, "FileMode_Append_FileAccess_Write" ),
new MFTestMethod( FileMode_Append_FileAccess_ReadWrite, "FileMode_Append_FileAccess_ReadWrite" ),
};
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.IO.PortsTests;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Legacy.Support;
using Xunit;
using Xunit.NetCore.Extensions;
namespace System.IO.Ports.Tests
{
public class Write_char_int_int_generic : PortsTest
{
//Set bounds fore random timeout values.
//If the min is to low write will not timeout accurately and the testcase will fail
private const int minRandomTimeout = 250;
//If the max is to large then the testcase will take forever to run
private const int maxRandomTimeout = 2000;
//If the percentage difference between the expected timeout and the actual timeout
//found through Stopwatch is greater then 10% then the timeout value was not correctly
//to the write method and the testcase fails.
private static double s_maxPercentageDifference = .15;
//The char size used when veryifying exceptions that write will throw
private const int CHAR_SIZE_EXCEPTION = 4;
//The char size used when veryifying timeout
private const int CHAR_SIZE_TIMEOUT = 4;
//The char size used when veryifying BytesToWrite
private const int CHAR_SIZE_BYTES_TO_WRITE = 4;
//The char size used when veryifying Handshake
private const int CHAR_SIZE_HANDSHAKE = 8;
private const int NUM_TRYS = 5;
#region Test Cases
[Fact]
public void WriteWithoutOpen()
{
using (SerialPort com = new SerialPort())
{
Debug.WriteLine("Verifying write method throws exception without a call to Open()");
VerifyWriteException(com, typeof(InvalidOperationException));
}
}
[ConditionalFact(nameof(HasOneSerialPort))]
public void WriteAfterFailedOpen()
{
using (SerialPort com = new SerialPort("BAD_PORT_NAME"))
{
Debug.WriteLine("Verifying write method throws exception with a failed call to Open()");
//Since the PortName is set to a bad port name Open will thrown an exception
//however we don't care what it is since we are verifying a write method
Assert.ThrowsAny<Exception>(() => com.Open());
VerifyWriteException(com, typeof(InvalidOperationException));
}
}
[ConditionalFact(nameof(HasOneSerialPort))]
public void WriteAfterClose()
{
using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
{
Debug.WriteLine("Verifying write method throws exception after a call to Cloes()");
com.Open();
com.Close();
VerifyWriteException(com, typeof(InvalidOperationException));
}
}
[ConditionalFact(nameof(HasNullModem))]
public void Timeout()
{
using (SerialPort com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
using (SerialPort com2 = new SerialPort(TCSupport.LocalMachineSerialInfo.SecondAvailablePortName))
{
Random rndGen = new Random(-55);
byte[] XOffBuffer = new byte[1];
XOffBuffer[0] = 19;
com1.WriteTimeout = rndGen.Next(minRandomTimeout, maxRandomTimeout);
com1.Handshake = Handshake.XOnXOff;
Debug.WriteLine("Verifying WriteTimeout={0}", com1.WriteTimeout);
com1.Open();
com2.Open();
com2.Write(XOffBuffer, 0, 1);
Thread.Sleep(250);
com2.Close();
VerifyTimeout(com1);
}
}
[Trait(XunitConstants.Category, XunitConstants.IgnoreForCI)] // Timing-sensitive
[ConditionalFact(nameof(HasOneSerialPort), nameof(HasHardwareFlowControl))]
public void SuccessiveReadTimeout()
{
using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
{
Random rndGen = new Random(-55);
com.WriteTimeout = rndGen.Next(minRandomTimeout, maxRandomTimeout);
com.Handshake = Handshake.RequestToSendXOnXOff;
// com.Encoding = new System.Text.UTF7Encoding();
com.Encoding = Encoding.Unicode;
Debug.WriteLine("Verifying WriteTimeout={0} with successive call to write method", com.WriteTimeout);
com.Open();
try
{
com.Write(new char[CHAR_SIZE_TIMEOUT], 0, CHAR_SIZE_TIMEOUT);
}
catch (TimeoutException)
{
}
VerifyTimeout(com);
}
}
[ConditionalFact(nameof(HasNullModem), nameof(HasHardwareFlowControl))]
public void SuccessiveReadTimeoutWithWriteSucceeding()
{
using (SerialPort com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
{
Random rndGen = new Random(-55);
AsyncEnableRts asyncEnableRts = new AsyncEnableRts();
var t = new Task(asyncEnableRts.EnableRTS);
com1.WriteTimeout = rndGen.Next(minRandomTimeout, maxRandomTimeout);
com1.Handshake = Handshake.RequestToSend;
com1.Encoding = new UTF8Encoding();
Debug.WriteLine("Verifying WriteTimeout={0} with successive call to write method with the write succeeding sometime before its timeout", com1.WriteTimeout);
com1.Open();
//Call EnableRTS asynchronously this will enable RTS in the middle of the following write call allowing it to succeed
//before the timeout is reached
t.Start();
TCSupport.WaitForTaskToStart(t);
try
{
com1.Write(new char[CHAR_SIZE_TIMEOUT], 0, CHAR_SIZE_TIMEOUT);
}
catch (TimeoutException)
{
}
asyncEnableRts.Stop();
TCSupport.WaitForTaskCompletion(t);
VerifyTimeout(com1);
}
}
[ConditionalFact(nameof(HasNullModem), nameof(HasHardwareFlowControl))]
public void BytesToWrite()
{
using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
{
AsyncWriteRndCharArray asyncWriteRndCharArray = new AsyncWriteRndCharArray(com, CHAR_SIZE_BYTES_TO_WRITE);
var t = new Task(asyncWriteRndCharArray.WriteRndCharArray);
Debug.WriteLine("Verifying BytesToWrite with one call to Write");
com.Handshake = Handshake.RequestToSend;
com.Open();
com.WriteTimeout = 500;
//Write a random char[] asynchronously so we can verify some things while the write call is blocking
t.Start();
TCSupport.WaitForTaskToStart(t);
TCSupport.WaitForExactWriteBufferLoad(com, CHAR_SIZE_BYTES_TO_WRITE);
TCSupport.WaitForTaskCompletion(t);
}
}
[ConditionalFact(nameof(HasNullModem), nameof(HasHardwareFlowControl))]
public void BytesToWriteSuccessive()
{
using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
{
AsyncWriteRndCharArray asyncWriteRndCharArray = new AsyncWriteRndCharArray(com, CHAR_SIZE_BYTES_TO_WRITE);
var t1 = new Task(asyncWriteRndCharArray.WriteRndCharArray);
var t2 = new Task(asyncWriteRndCharArray.WriteRndCharArray);
Debug.WriteLine("Verifying BytesToWrite with successive calls to Write");
com.Handshake = Handshake.RequestToSend;
com.Open();
com.WriteTimeout = 4000;
//Write a random char[] asynchronously so we can verify some things while the write call is blocking
t1.Start();
TCSupport.WaitForTaskToStart(t1);
TCSupport.WaitForExactWriteBufferLoad(com, CHAR_SIZE_BYTES_TO_WRITE);
//Write a random char[] asynchronously so we can verify some things while the write call is blocking
t2.Start();
TCSupport.WaitForTaskToStart(t2);
TCSupport.WaitForExactWriteBufferLoad(com, CHAR_SIZE_BYTES_TO_WRITE * 2);
//Wait for both write methods to timeout
TCSupport.WaitForTaskCompletion(t1);
var aggregatedException = Assert.Throws<AggregateException>(() => TCSupport.WaitForTaskCompletion(t2));
Assert.IsType<IOException>(aggregatedException.InnerException);
}
}
[ConditionalFact(nameof(HasNullModem))]
public void Handshake_None()
{
using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
{
AsyncWriteRndCharArray asyncWriteRndCharArray = new AsyncWriteRndCharArray(com, CHAR_SIZE_HANDSHAKE);
var t = new Task(asyncWriteRndCharArray.WriteRndCharArray);
//Write a random char[] asynchronously so we can verify some things while the write call is blocking
Debug.WriteLine("Verifying Handshake=None");
com.Open();
t.Start();
TCSupport.WaitForTaskCompletion(t);
Assert.Equal(0, com.BytesToWrite);
}
}
[ConditionalFact(nameof(HasNullModem))]
public void Handshake_RequestToSend()
{
Verify_Handshake(Handshake.RequestToSend);
}
[ConditionalFact(nameof(HasNullModem))]
public void Handshake_XOnXOff()
{
Verify_Handshake(Handshake.XOnXOff);
}
[ConditionalFact(nameof(HasNullModem))]
public void Handshake_RequestToSendXOnXOff()
{
Verify_Handshake(Handshake.RequestToSendXOnXOff);
}
public class AsyncEnableRts
{
private bool _stop;
public void EnableRTS()
{
lock (this)
{
using (SerialPort com2 = new SerialPort(TCSupport.LocalMachineSerialInfo.SecondAvailablePortName))
{
Random rndGen = new Random(-55);
int sleepPeriod = rndGen.Next(minRandomTimeout, maxRandomTimeout / 2);
//Sleep some random period with of a maximum duration of half the largest possible timeout value for a write method on COM1
Thread.Sleep(sleepPeriod);
com2.Open();
com2.RtsEnable = true;
while (!_stop)
Monitor.Wait(this);
com2.RtsEnable = false;
}
}
}
public void Stop()
{
lock (this)
{
_stop = true;
Monitor.Pulse(this);
}
}
}
public class AsyncWriteRndCharArray
{
private readonly SerialPort _com;
private readonly int _charLength;
public AsyncWriteRndCharArray(SerialPort com, int charLength)
{
_com = com;
_charLength = charLength;
}
public void WriteRndCharArray()
{
char[] buffer = TCSupport.GetRandomChars(_charLength, TCSupport.CharacterOptions.Surrogates);
try
{
_com.Write(buffer, 0, buffer.Length);
}
catch (TimeoutException)
{
}
}
}
#endregion
#region Verification for Test Cases
public static void VerifyWriteException(SerialPort com, Type expectedException)
{
Assert.Throws(expectedException, () => com.Write(new char[CHAR_SIZE_EXCEPTION], 0, CHAR_SIZE_EXCEPTION));
}
private void VerifyTimeout(SerialPort com)
{
Stopwatch timer = new Stopwatch();
int expectedTime = com.WriteTimeout;
int actualTime = 0;
double percentageDifference;
try
{
com.Write(new char[CHAR_SIZE_TIMEOUT], 0, CHAR_SIZE_TIMEOUT); //Warm up write method
}
catch (TimeoutException) { }
Thread.CurrentThread.Priority = ThreadPriority.Highest;
for (int i = 0; i < NUM_TRYS; i++)
{
timer.Start();
try
{
com.Write(new char[CHAR_SIZE_TIMEOUT], 0, CHAR_SIZE_TIMEOUT);
}
catch (TimeoutException) { }
timer.Stop();
actualTime += (int)timer.ElapsedMilliseconds;
timer.Reset();
}
Thread.CurrentThread.Priority = ThreadPriority.Normal;
actualTime /= NUM_TRYS;
percentageDifference = Math.Abs((expectedTime - actualTime) / (double)expectedTime);
//Verify that the percentage difference between the expected and actual timeout is less then maxPercentageDifference
if (s_maxPercentageDifference < percentageDifference)
{
Fail("ERROR!!!: The write method timedout in {0} expected {1} percentage difference: {2}", actualTime, expectedTime, percentageDifference);
}
}
private void Verify_Handshake(Handshake handshake)
{
using (SerialPort com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
using (SerialPort com2 = new SerialPort(TCSupport.LocalMachineSerialInfo.SecondAvailablePortName))
{
AsyncWriteRndCharArray asyncWriteRndCharArray = new AsyncWriteRndCharArray(com1, CHAR_SIZE_HANDSHAKE);
var t = new Task(asyncWriteRndCharArray.WriteRndCharArray);
byte[] XOffBuffer = new byte[1];
byte[] XOnBuffer = new byte[1];
XOffBuffer[0] = 19;
XOnBuffer[0] = 17;
Debug.WriteLine("Verifying Handshake={0}", handshake);
com1.Handshake = handshake;
com1.Open();
com2.Open();
//Setup to ensure write will bock with type of handshake method being used
if (Handshake.RequestToSend == handshake || Handshake.RequestToSendXOnXOff == handshake)
{
com2.RtsEnable = false;
}
if (Handshake.XOnXOff == handshake || Handshake.RequestToSendXOnXOff == handshake)
{
com2.Write(XOffBuffer, 0, 1);
Thread.Sleep(250);
}
//Write a random char array asynchronously so we can verify some things while the write call is blocking
t.Start();
TCSupport.WaitForTaskToStart(t);
TCSupport.WaitForExactWriteBufferLoad(com1, CHAR_SIZE_HANDSHAKE);
//Verify that CtsHolding is false if the RequestToSend or RequestToSendXOnXOff handshake method is used
if ((Handshake.RequestToSend == handshake || Handshake.RequestToSendXOnXOff == handshake) && com1.CtsHolding)
{
Fail("ERROR!!! Expcted CtsHolding={0} actual {1}", false, com1.CtsHolding);
}
//Setup to ensure write will succeed
if (Handshake.RequestToSend == handshake || Handshake.RequestToSendXOnXOff == handshake)
{
com2.RtsEnable = true;
}
if (Handshake.XOnXOff == handshake || Handshake.RequestToSendXOnXOff == handshake)
{
com2.Write(XOnBuffer, 0, 1);
}
TCSupport.WaitForTaskCompletion(t);
//Verify that the correct number of bytes are in the buffer
Assert.Equal(0, com1.BytesToWrite);
//Verify that CtsHolding is true if the RequestToSend or RequestToSendXOnXOff handshake method is used
if ((Handshake.RequestToSend == handshake || Handshake.RequestToSendXOnXOff == handshake) &&
!com1.CtsHolding)
{
Fail("ERROR!!! Expcted CtsHolding={0} actual {1}", true, com1.CtsHolding);
}
}
}
#endregion
}
}
| |
// Copyright (C) 2014 dot42
//
// Original filename: Javax.Xml.Transform.Sax.cs
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma warning disable 1717
namespace Javax.Xml.Transform.Sax
{
/// <summary>
/// <para>A TransformerHandler listens for SAX ContentHandler parse events and transforms them to a Result. </para>
/// </summary>
/// <java-name>
/// javax/xml/transform/sax/TransformerHandler
/// </java-name>
[Dot42.DexImport("javax/xml/transform/sax/TransformerHandler", AccessFlags = 1537)]
public partial interface ITransformerHandler : global::Org.Xml.Sax.IContentHandler, global::Org.Xml.Sax.Ext.ILexicalHandler, global::Org.Xml.Sax.IDTDHandler
/* scope: __dot42__ */
{
/// <summary>
/// <para>Set the <code>Result</code> associated with this <code>TransformerHandler</code> to be used for the transformation.</para><para></para>
/// </summary>
/// <java-name>
/// setResult
/// </java-name>
[Dot42.DexImport("setResult", "(Ljavax/xml/transform/Result;)V", AccessFlags = 1025)]
void SetResult(global::Javax.Xml.Transform.IResult result) /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Set the base ID (URI or system ID) from where relative URLs will be resolved. </para>
/// </summary>
/// <java-name>
/// setSystemId
/// </java-name>
[Dot42.DexImport("setSystemId", "(Ljava/lang/String;)V", AccessFlags = 1025)]
void SetSystemId(string systemID) /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Get the base ID (URI or system ID) from where relative URLs will be resolved. </para>
/// </summary>
/// <returns>
/// <para>The systemID that was set with setSystemId. </para>
/// </returns>
/// <java-name>
/// getSystemId
/// </java-name>
[Dot42.DexImport("getSystemId", "()Ljava/lang/String;", AccessFlags = 1025)]
string GetSystemId() /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Get the <code>Transformer</code> associated with this handler, which is needed in order to set parameters and output properties.</para><para></para>
/// </summary>
/// <returns>
/// <para><code>Transformer</code> associated with this <code>TransformerHandler</code>. </para>
/// </returns>
/// <java-name>
/// getTransformer
/// </java-name>
[Dot42.DexImport("getTransformer", "()Ljavax/xml/transform/Transformer;", AccessFlags = 1025)]
global::Javax.Xml.Transform.Transformer GetTransformer() /* MethodBuilder.Create */ ;
}
/// <summary>
/// <para>This class extends TransformerFactory to provide SAX-specific factory methods. It provides two types of ContentHandlers, one for creating Transformers, the other for creating Templates objects.</para><para>If an application wants to set the ErrorHandler or EntityResolver for an XMLReader used during a transformation, it should use a URIResolver to return the SAXSource which provides (with getXMLReader) a reference to the XMLReader.</para>
/// </summary>
/// <java-name>
/// javax/xml/transform/sax/SAXTransformerFactory
/// </java-name>
[Dot42.DexImport("javax/xml/transform/sax/SAXTransformerFactory", AccessFlags = 1057)]
public abstract partial class SAXTransformerFactory : global::Javax.Xml.Transform.TransformerFactory
/* scope: __dot42__ */
{
/// <summary>
/// <para>If javax.xml.transform.TransformerFactory#getFeature returns true when passed this value as an argument, the TransformerFactory returned from javax.xml.transform.TransformerFactory#newInstance may be safely cast to a SAXTransformerFactory. </para>
/// </summary>
/// <java-name>
/// FEATURE
/// </java-name>
[Dot42.DexImport("FEATURE", "Ljava/lang/String;", AccessFlags = 25)]
public const string FEATURE = "http://javax.xml.transform.sax.SAXTransformerFactory/feature";
/// <summary>
/// <para>If javax.xml.transform.TransformerFactory#getFeature returns true when passed this value as an argument, the newXMLFilter(Source src) and newXMLFilter(Templates templates) methods are supported. </para>
/// </summary>
/// <java-name>
/// FEATURE_XMLFILTER
/// </java-name>
[Dot42.DexImport("FEATURE_XMLFILTER", "Ljava/lang/String;", AccessFlags = 25)]
public const string FEATURE_XMLFILTER = "http://javax.xml.transform.sax.SAXTransformerFactory/feature/xmlfilter";
/// <summary>
/// <para>The default constructor is protected on purpose. </para>
/// </summary>
[Dot42.DexImport("<init>", "()V", AccessFlags = 4)]
protected internal SAXTransformerFactory() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Get a TransformerHandler object that can process SAX ContentHandler events into a Result, based on the transformation instructions specified by the argument.</para><para></para>
/// </summary>
/// <returns>
/// <para>TransformerHandler ready to transform SAX events.</para>
/// </returns>
/// <java-name>
/// newTransformerHandler
/// </java-name>
[Dot42.DexImport("newTransformerHandler", "(Ljavax/xml/transform/Source;)Ljavax/xml/transform/sax/TransformerHandler;", AccessFlags = 1025)]
public abstract global::Javax.Xml.Transform.Sax.ITransformerHandler NewTransformerHandler(global::Javax.Xml.Transform.ISource src) /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Get a TransformerHandler object that can process SAX ContentHandler events into a Result, based on the transformation instructions specified by the argument.</para><para></para>
/// </summary>
/// <returns>
/// <para>TransformerHandler ready to transform SAX events.</para>
/// </returns>
/// <java-name>
/// newTransformerHandler
/// </java-name>
[Dot42.DexImport("newTransformerHandler", "(Ljavax/xml/transform/Templates;)Ljavax/xml/transform/sax/TransformerHandler;", AccessFlags = 1025)]
public abstract global::Javax.Xml.Transform.Sax.ITransformerHandler NewTransformerHandler(global::Javax.Xml.Transform.ITemplates src) /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Get a TransformerHandler object that can process SAX ContentHandler events into a Result. The transformation is defined as an identity (or copy) transformation, for example to copy a series of SAX parse events into a DOM tree.</para><para></para>
/// </summary>
/// <returns>
/// <para>A non-null reference to a TransformerHandler, that may be used as a ContentHandler for SAX parse events.</para>
/// </returns>
/// <java-name>
/// newTransformerHandler
/// </java-name>
[Dot42.DexImport("newTransformerHandler", "()Ljavax/xml/transform/sax/TransformerHandler;", AccessFlags = 1025)]
public abstract global::Javax.Xml.Transform.Sax.ITransformerHandler NewTransformerHandler() /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Get a TemplatesHandler object that can process SAX ContentHandler events into a Templates object.</para><para></para>
/// </summary>
/// <returns>
/// <para>A non-null reference to a TransformerHandler, that may be used as a ContentHandler for SAX parse events.</para>
/// </returns>
/// <java-name>
/// newTemplatesHandler
/// </java-name>
[Dot42.DexImport("newTemplatesHandler", "()Ljavax/xml/transform/sax/TemplatesHandler;", AccessFlags = 1025)]
public abstract global::Javax.Xml.Transform.Sax.ITemplatesHandler NewTemplatesHandler() /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Create an XMLFilter that uses the given Source as the transformation instructions.</para><para></para>
/// </summary>
/// <returns>
/// <para>An XMLFilter object, or null if this feature is not supported.</para>
/// </returns>
/// <java-name>
/// newXMLFilter
/// </java-name>
[Dot42.DexImport("newXMLFilter", "(Ljavax/xml/transform/Source;)Lorg/xml/sax/XMLFilter;", AccessFlags = 1025)]
public abstract global::Org.Xml.Sax.IXMLFilter NewXMLFilter(global::Javax.Xml.Transform.ISource src) /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Create an XMLFilter that uses the given Source as the transformation instructions.</para><para></para>
/// </summary>
/// <returns>
/// <para>An XMLFilter object, or null if this feature is not supported.</para>
/// </returns>
/// <java-name>
/// newXMLFilter
/// </java-name>
[Dot42.DexImport("newXMLFilter", "(Ljavax/xml/transform/Templates;)Lorg/xml/sax/XMLFilter;", AccessFlags = 1025)]
public abstract global::Org.Xml.Sax.IXMLFilter NewXMLFilter(global::Javax.Xml.Transform.ITemplates src) /* MethodBuilder.Create */ ;
}
/// <summary>
/// <para>Acts as an holder for SAX-style Source.</para><para>Note that XSLT requires namespace support. Attempting to transform an input source that is not generated with a namespace-aware parser may result in errors. Parsers can be made namespace aware by calling the javax.xml.parsers.SAXParserFactory#setNamespaceAware(boolean awareness) method.</para><para><para> </para><para></para><title>Revision:</title><para>446598 </para>, <title>Date:</title><para>2006-09-15 05:55:40 -0700 (Fri, 15 Sep 2006) </para></para>
/// </summary>
/// <java-name>
/// javax/xml/transform/sax/SAXSource
/// </java-name>
[Dot42.DexImport("javax/xml/transform/sax/SAXSource", AccessFlags = 33)]
public partial class SAXSource : global::Javax.Xml.Transform.ISource
/* scope: __dot42__ */
{
/// <summary>
/// <para>If javax.xml.transform.TransformerFactory#getFeature returns true when passed this value as an argument, the Transformer supports Source input of this type. </para>
/// </summary>
/// <java-name>
/// FEATURE
/// </java-name>
[Dot42.DexImport("FEATURE", "Ljava/lang/String;", AccessFlags = 25)]
public const string FEATURE = "http://javax.xml.transform.sax.SAXSource/feature";
/// <summary>
/// <para>Zero-argument default constructor. If this constructor is used, and no SAX source is set using setInputSource(InputSource inputSource) , then the <code>Transformer</code> will create an empty source org.xml.sax.InputSource using new InputSource().</para><para><para>javax.xml.transform.Transformer::transform(Source xmlSource, Result outputTarget) </para></para>
/// </summary>
[Dot42.DexImport("<init>", "()V", AccessFlags = 1)]
public SAXSource() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Create a <code>SAXSource</code>, using an org.xml.sax.XMLReader and a SAX InputSource. The javax.xml.transform.Transformer or javax.xml.transform.sax.SAXTransformerFactory will set itself to be the reader's org.xml.sax.ContentHandler, and then will call reader.parse(inputSource).</para><para></para>
/// </summary>
[Dot42.DexImport("<init>", "(Lorg/xml/sax/XMLReader;Lorg/xml/sax/InputSource;)V", AccessFlags = 1)]
public SAXSource(global::Org.Xml.Sax.IXMLReader reader, global::Org.Xml.Sax.InputSource inputSource) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Create a <code>SAXSource</code>, using a SAX <code>InputSource</code>. The javax.xml.transform.Transformer or javax.xml.transform.sax.SAXTransformerFactory creates a reader via org.xml.sax.helpers.XMLReaderFactory (if setXMLReader is not used), sets itself as the reader's org.xml.sax.ContentHandler, and calls reader.parse(inputSource).</para><para></para>
/// </summary>
[Dot42.DexImport("<init>", "(Lorg/xml/sax/InputSource;)V", AccessFlags = 1)]
public SAXSource(global::Org.Xml.Sax.InputSource inputSource) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Set the XMLReader to be used for the Source.</para><para></para>
/// </summary>
/// <java-name>
/// setXMLReader
/// </java-name>
[Dot42.DexImport("setXMLReader", "(Lorg/xml/sax/XMLReader;)V", AccessFlags = 1)]
public virtual void SetXMLReader(global::Org.Xml.Sax.IXMLReader reader) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Get the XMLReader to be used for the Source.</para><para></para>
/// </summary>
/// <returns>
/// <para>A valid XMLReader or XMLFilter reference, or null. </para>
/// </returns>
/// <java-name>
/// getXMLReader
/// </java-name>
[Dot42.DexImport("getXMLReader", "()Lorg/xml/sax/XMLReader;", AccessFlags = 1)]
public virtual global::Org.Xml.Sax.IXMLReader GetXMLReader() /* MethodBuilder.Create */
{
return default(global::Org.Xml.Sax.IXMLReader);
}
/// <summary>
/// <para>Set the SAX InputSource to be used for the Source.</para><para></para>
/// </summary>
/// <java-name>
/// setInputSource
/// </java-name>
[Dot42.DexImport("setInputSource", "(Lorg/xml/sax/InputSource;)V", AccessFlags = 1)]
public virtual void SetInputSource(global::Org.Xml.Sax.InputSource inputSource) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Get the SAX InputSource to be used for the Source.</para><para></para>
/// </summary>
/// <returns>
/// <para>A valid InputSource reference, or null. </para>
/// </returns>
/// <java-name>
/// getInputSource
/// </java-name>
[Dot42.DexImport("getInputSource", "()Lorg/xml/sax/InputSource;", AccessFlags = 1)]
public virtual global::Org.Xml.Sax.InputSource GetInputSource() /* MethodBuilder.Create */
{
return default(global::Org.Xml.Sax.InputSource);
}
/// <summary>
/// <para>Set the system identifier for this Source. If an input source has already been set, it will set the system ID or that input source, otherwise it will create a new input source.</para><para>The system identifier is optional if there is a byte stream or a character stream, but it is still useful to provide one, since the application can use it to resolve relative URIs and can include it in error messages and warnings (the parser will attempt to open a connection to the URI only if no byte stream or character stream is specified).</para><para></para>
/// </summary>
/// <java-name>
/// setSystemId
/// </java-name>
[Dot42.DexImport("setSystemId", "(Ljava/lang/String;)V", AccessFlags = 1)]
public virtual void SetSystemId(string systemId) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Get the base ID (URI or system ID) from where URIs will be resolved.</para><para></para>
/// </summary>
/// <returns>
/// <para>Base URL for the <code>Source</code>, or <code>null</code>. </para>
/// </returns>
/// <java-name>
/// getSystemId
/// </java-name>
[Dot42.DexImport("getSystemId", "()Ljava/lang/String;", AccessFlags = 1)]
public virtual string GetSystemId() /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Attempt to obtain a SAX InputSource object from a Source object.</para><para></para>
/// </summary>
/// <returns>
/// <para>An InputSource, or null if Source can not be converted. </para>
/// </returns>
/// <java-name>
/// sourceToInputSource
/// </java-name>
[Dot42.DexImport("sourceToInputSource", "(Ljavax/xml/transform/Source;)Lorg/xml/sax/InputSource;", AccessFlags = 9)]
public static global::Org.Xml.Sax.InputSource SourceToInputSource(global::Javax.Xml.Transform.ISource source) /* MethodBuilder.Create */
{
return default(global::Org.Xml.Sax.InputSource);
}
/// <summary>
/// <para>Get the XMLReader to be used for the Source.</para><para></para>
/// </summary>
/// <returns>
/// <para>A valid XMLReader or XMLFilter reference, or null. </para>
/// </returns>
/// <java-name>
/// getXMLReader
/// </java-name>
public global::Org.Xml.Sax.IXMLReader XMLReader
{
[Dot42.DexImport("getXMLReader", "()Lorg/xml/sax/XMLReader;", AccessFlags = 1)]
get{ return GetXMLReader(); }
[Dot42.DexImport("setXMLReader", "(Lorg/xml/sax/XMLReader;)V", AccessFlags = 1)]
set{ SetXMLReader(value); }
}
/// <summary>
/// <para>Get the SAX InputSource to be used for the Source.</para><para></para>
/// </summary>
/// <returns>
/// <para>A valid InputSource reference, or null. </para>
/// </returns>
/// <java-name>
/// getInputSource
/// </java-name>
public global::Org.Xml.Sax.InputSource InputSource
{
[Dot42.DexImport("getInputSource", "()Lorg/xml/sax/InputSource;", AccessFlags = 1)]
get{ return GetInputSource(); }
[Dot42.DexImport("setInputSource", "(Lorg/xml/sax/InputSource;)V", AccessFlags = 1)]
set{ SetInputSource(value); }
}
/// <summary>
/// <para>Get the base ID (URI or system ID) from where URIs will be resolved.</para><para></para>
/// </summary>
/// <returns>
/// <para>Base URL for the <code>Source</code>, or <code>null</code>. </para>
/// </returns>
/// <java-name>
/// getSystemId
/// </java-name>
public string SystemId
{
[Dot42.DexImport("getSystemId", "()Ljava/lang/String;", AccessFlags = 1)]
get{ return GetSystemId(); }
[Dot42.DexImport("setSystemId", "(Ljava/lang/String;)V", AccessFlags = 1)]
set{ SetSystemId(value); }
}
}
/// <summary>
/// <para>A SAX ContentHandler that may be used to process SAX parse events (parsing transformation instructions) into a Templates object.</para><para>Note that TemplatesHandler does not need to implement LexicalHandler.</para>
/// </summary>
/// <java-name>
/// javax/xml/transform/sax/TemplatesHandler
/// </java-name>
[Dot42.DexImport("javax/xml/transform/sax/TemplatesHandler", AccessFlags = 1537)]
public partial interface ITemplatesHandler : global::Org.Xml.Sax.IContentHandler
/* scope: __dot42__ */
{
/// <summary>
/// <para>When a TemplatesHandler object is used as a ContentHandler for the parsing of transformation instructions, it creates a Templates object, which the caller can get once the SAX events have been completed.</para><para></para>
/// </summary>
/// <returns>
/// <para>The Templates object that was created during the SAX event process, or null if no Templates object has been created. </para>
/// </returns>
/// <java-name>
/// getTemplates
/// </java-name>
[Dot42.DexImport("getTemplates", "()Ljavax/xml/transform/Templates;", AccessFlags = 1025)]
global::Javax.Xml.Transform.ITemplates GetTemplates() /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Set the base ID (URI or system ID) for the Templates object created by this builder. This must be set in order to resolve relative URIs in the stylesheet. This must be called before the startDocument event.</para><para></para>
/// </summary>
/// <java-name>
/// setSystemId
/// </java-name>
[Dot42.DexImport("setSystemId", "(Ljava/lang/String;)V", AccessFlags = 1025)]
void SetSystemId(string systemID) /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Get the base ID (URI or system ID) from where relative URLs will be resolved. </para>
/// </summary>
/// <returns>
/// <para>The systemID that was set with setSystemId. </para>
/// </returns>
/// <java-name>
/// getSystemId
/// </java-name>
[Dot42.DexImport("getSystemId", "()Ljava/lang/String;", AccessFlags = 1025)]
string GetSystemId() /* MethodBuilder.Create */ ;
}
/// <summary>
/// <para>Acts as an holder for a transformation Result.</para><para><para> </para></para>
/// </summary>
/// <java-name>
/// javax/xml/transform/sax/SAXResult
/// </java-name>
[Dot42.DexImport("javax/xml/transform/sax/SAXResult", AccessFlags = 33)]
public partial class SAXResult : global::Javax.Xml.Transform.IResult
/* scope: __dot42__ */
{
/// <summary>
/// <para>If javax.xml.transform.TransformerFactory#getFeature returns true when passed this value as an argument, the Transformer supports Result output of this type. </para>
/// </summary>
/// <java-name>
/// FEATURE
/// </java-name>
[Dot42.DexImport("FEATURE", "Ljava/lang/String;", AccessFlags = 25)]
public const string FEATURE = "http://javax.xml.transform.sax.SAXResult/feature";
/// <summary>
/// <para>Zero-argument default constructor. </para>
/// </summary>
[Dot42.DexImport("<init>", "()V", AccessFlags = 1)]
public SAXResult() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Create a SAXResult that targets a SAX2 org.xml.sax.ContentHandler.</para><para></para>
/// </summary>
[Dot42.DexImport("<init>", "(Lorg/xml/sax/ContentHandler;)V", AccessFlags = 1)]
public SAXResult(global::Org.Xml.Sax.IContentHandler handler) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Set the target to be a SAX2 org.xml.sax.ContentHandler.</para><para></para>
/// </summary>
/// <java-name>
/// setHandler
/// </java-name>
[Dot42.DexImport("setHandler", "(Lorg/xml/sax/ContentHandler;)V", AccessFlags = 1)]
public virtual void SetHandler(global::Org.Xml.Sax.IContentHandler handler) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Get the org.xml.sax.ContentHandler that is the Result.</para><para></para>
/// </summary>
/// <returns>
/// <para>The ContentHandler that is to be transformation output. </para>
/// </returns>
/// <java-name>
/// getHandler
/// </java-name>
[Dot42.DexImport("getHandler", "()Lorg/xml/sax/ContentHandler;", AccessFlags = 1)]
public virtual global::Org.Xml.Sax.IContentHandler GetHandler() /* MethodBuilder.Create */
{
return default(global::Org.Xml.Sax.IContentHandler);
}
/// <summary>
/// <para>Set the SAX2 org.xml.sax.ext.LexicalHandler for the output.</para><para>This is needed to handle XML comments and the like. If the lexical handler is not set, an attempt should be made by the transformer to cast the org.xml.sax.ContentHandler to a <code>LexicalHandler</code>.</para><para></para>
/// </summary>
/// <java-name>
/// setLexicalHandler
/// </java-name>
[Dot42.DexImport("setLexicalHandler", "(Lorg/xml/sax/ext/LexicalHandler;)V", AccessFlags = 1)]
public virtual void SetLexicalHandler(global::Org.Xml.Sax.Ext.ILexicalHandler handler) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Get a SAX2 org.xml.sax.ext.LexicalHandler for the output.</para><para></para>
/// </summary>
/// <returns>
/// <para>A <code>LexicalHandler</code>, or null. </para>
/// </returns>
/// <java-name>
/// getLexicalHandler
/// </java-name>
[Dot42.DexImport("getLexicalHandler", "()Lorg/xml/sax/ext/LexicalHandler;", AccessFlags = 1)]
public virtual global::Org.Xml.Sax.Ext.ILexicalHandler GetLexicalHandler() /* MethodBuilder.Create */
{
return default(global::Org.Xml.Sax.Ext.ILexicalHandler);
}
/// <summary>
/// <para>Method setSystemId Set the systemID that may be used in association with the org.xml.sax.ContentHandler.</para><para></para>
/// </summary>
/// <java-name>
/// setSystemId
/// </java-name>
[Dot42.DexImport("setSystemId", "(Ljava/lang/String;)V", AccessFlags = 1)]
public virtual void SetSystemId(string systemId) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Get the system identifier that was set with setSystemId.</para><para></para>
/// </summary>
/// <returns>
/// <para>The system identifier that was set with setSystemId, or null if setSystemId was not called. </para>
/// </returns>
/// <java-name>
/// getSystemId
/// </java-name>
[Dot42.DexImport("getSystemId", "()Ljava/lang/String;", AccessFlags = 1)]
public virtual string GetSystemId() /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Get the org.xml.sax.ContentHandler that is the Result.</para><para></para>
/// </summary>
/// <returns>
/// <para>The ContentHandler that is to be transformation output. </para>
/// </returns>
/// <java-name>
/// getHandler
/// </java-name>
public global::Org.Xml.Sax.IContentHandler Handler
{
[Dot42.DexImport("getHandler", "()Lorg/xml/sax/ContentHandler;", AccessFlags = 1)]
get{ return GetHandler(); }
[Dot42.DexImport("setHandler", "(Lorg/xml/sax/ContentHandler;)V", AccessFlags = 1)]
set{ SetHandler(value); }
}
/// <summary>
/// <para>Get a SAX2 org.xml.sax.ext.LexicalHandler for the output.</para><para></para>
/// </summary>
/// <returns>
/// <para>A <code>LexicalHandler</code>, or null. </para>
/// </returns>
/// <java-name>
/// getLexicalHandler
/// </java-name>
public global::Org.Xml.Sax.Ext.ILexicalHandler LexicalHandler
{
[Dot42.DexImport("getLexicalHandler", "()Lorg/xml/sax/ext/LexicalHandler;", AccessFlags = 1)]
get{ return GetLexicalHandler(); }
[Dot42.DexImport("setLexicalHandler", "(Lorg/xml/sax/ext/LexicalHandler;)V", AccessFlags = 1)]
set{ SetLexicalHandler(value); }
}
/// <summary>
/// <para>Get the system identifier that was set with setSystemId.</para><para></para>
/// </summary>
/// <returns>
/// <para>The system identifier that was set with setSystemId, or null if setSystemId was not called. </para>
/// </returns>
/// <java-name>
/// getSystemId
/// </java-name>
public string SystemId
{
[Dot42.DexImport("getSystemId", "()Ljava/lang/String;", AccessFlags = 1)]
get{ return GetSystemId(); }
[Dot42.DexImport("setSystemId", "(Ljava/lang/String;)V", AccessFlags = 1)]
set{ SetSystemId(value); }
}
}
}
| |
using ClosedXML.Excel.Drawings;
using System;
using System.Drawing;
using System.IO;
namespace ClosedXML.Excel
{
public enum XLWorksheetVisibility { Visible, Hidden, VeryHidden }
public interface IXLWorksheet : IXLRangeBase, IXLProtectable<IXLSheetProtection, XLSheetProtectionElements>
{
/// <summary>
/// Gets the workbook that contains this worksheet
/// </summary>
XLWorkbook Workbook { get; }
/// <summary>
/// Gets or sets the default column width for this worksheet.
/// </summary>
Double ColumnWidth { get; set; }
/// <summary>
/// Gets or sets the default row height for this worksheet.
/// </summary>
Double RowHeight { get; set; }
/// <summary>
/// Gets or sets the name (caption) of this worksheet.
/// </summary>
String Name { get; set; }
/// <summary>
/// Gets or sets the position of the sheet.
/// <para>When setting the Position all other sheets' positions are shifted accordingly.</para>
/// </summary>
Int32 Position { get; set; }
/// <summary>
/// Gets an object to manipulate the sheet's print options.
/// </summary>
IXLPageSetup PageSetup { get; }
/// <summary>
/// Gets an object to manipulate the Outline levels.
/// </summary>
IXLOutline Outline { get; }
/// <summary>
/// Gets the first row of the worksheet.
/// </summary>
IXLRow FirstRow();
/// <summary>
/// Gets the first row of the worksheet that contains a cell with a value.
/// <para>Formatted empty cells do not count.</para>
/// </summary>
IXLRow FirstRowUsed();
/// <summary>
/// Gets the first row of the worksheet that contains a cell with a value.
/// </summary>
/// <param name="includeFormats">If set to <c>true</c> formatted empty cells will count as used.</param>
[Obsolete("Use the overload with XLCellsUsedOptions")]
IXLRow FirstRowUsed(Boolean includeFormats);
IXLRow FirstRowUsed(XLCellsUsedOptions options);
/// <summary>
/// Gets the last row of the worksheet.
/// </summary>
IXLRow LastRow();
/// <summary>
/// Gets the last row of the worksheet that contains a cell with a value.
/// </summary>
IXLRow LastRowUsed();
/// <summary>
/// Gets the last row of the worksheet that contains a cell with a value.
/// </summary>
/// <param name="includeFormats">If set to <c>true</c> formatted empty cells will count as used.</param>
[Obsolete("Use the overload with XLCellsUsedOptions")]
IXLRow LastRowUsed(Boolean includeFormats);
IXLRow LastRowUsed(XLCellsUsedOptions options);
/// <summary>
/// Gets the first column of the worksheet.
/// </summary>
IXLColumn FirstColumn();
/// <summary>
/// Gets the first column of the worksheet that contains a cell with a value.
/// </summary>
IXLColumn FirstColumnUsed();
/// <summary>
/// Gets the first column of the worksheet that contains a cell with a value.
/// </summary>
/// <param name="includeFormats">If set to <c>true</c> formatted empty cells will count as used.</param>
[Obsolete("Use the overload with XLCellsUsedOptions")]
IXLColumn FirstColumnUsed(Boolean includeFormats);
IXLColumn FirstColumnUsed(XLCellsUsedOptions options);
/// <summary>
/// Gets the last column of the worksheet.
/// </summary>
IXLColumn LastColumn();
/// <summary>
/// Gets the last column of the worksheet that contains a cell with a value.
/// </summary>
IXLColumn LastColumnUsed();
/// <summary>
/// Gets the last column of the worksheet that contains a cell with a value.
/// </summary>
/// <param name="includeFormats">If set to <c>true</c> formatted empty cells will count as used.</param>
[Obsolete("Use the overload with XLCellsUsedOptions")]
IXLColumn LastColumnUsed(Boolean includeFormats);
IXLColumn LastColumnUsed(XLCellsUsedOptions options);
/// <summary>
/// Gets a collection of all columns in this worksheet.
/// </summary>
IXLColumns Columns();
/// <summary>
/// Gets a collection of the specified columns in this worksheet, separated by commas.
/// <para>e.g. Columns("G:H"), Columns("10:11,13:14"), Columns("P:Q,S:T"), Columns("V")</para>
/// </summary>
/// <param name="columns">The columns to return.</param>
IXLColumns Columns(String columns);
/// <summary>
/// Gets a collection of the specified columns in this worksheet.
/// </summary>
/// <param name="firstColumn">The first column to return.</param>
/// <param name="lastColumn">The last column to return.</param>
IXLColumns Columns(String firstColumn, String lastColumn);
/// <summary>
/// Gets a collection of the specified columns in this worksheet.
/// </summary>
/// <param name="firstColumn">The first column to return.</param>
/// <param name="lastColumn">The last column to return.</param>
IXLColumns Columns(Int32 firstColumn, Int32 lastColumn);
/// <summary>
/// Gets a collection of all rows in this worksheet.
/// </summary>
IXLRows Rows();
/// <summary>
/// Gets a collection of the specified rows in this worksheet, separated by commas.
/// <para>e.g. Rows("4:5"), Rows("7:8,10:11"), Rows("13")</para>
/// </summary>
/// <param name="rows">The rows to return.</param>
IXLRows Rows(String rows);
/// <summary>
/// Gets a collection of the specified rows in this worksheet.
/// </summary>
/// <param name="firstRow">The first row to return.</param>
/// <param name="lastRow">The last row to return.</param>
/// <returns></returns>
IXLRows Rows(Int32 firstRow, Int32 lastRow);
/// <summary>
/// Gets the specified row of the worksheet.
/// </summary>
/// <param name="row">The worksheet's row.</param>
IXLRow Row(Int32 row);
/// <summary>
/// Gets the specified column of the worksheet.
/// </summary>
/// <param name="column">The worksheet's column.</param>
IXLColumn Column(Int32 column);
/// <summary>
/// Gets the specified column of the worksheet.
/// </summary>
/// <param name="column">The worksheet's column.</param>
IXLColumn Column(String column);
/// <summary>
/// Gets the cell at the specified row and column.
/// </summary>
/// <param name="row">The cell's row.</param>
/// <param name="column">The cell's column.</param>
IXLCell Cell(int row, int column);
/// <summary>Gets the cell at the specified address.</summary>
/// <param name="cellAddressInRange">The cell address in the worksheet.</param>
IXLCell Cell(string cellAddressInRange);
/// <summary>
/// Gets the cell at the specified row and column.
/// </summary>
/// <param name="row">The cell's row.</param>
/// <param name="column">The cell's column.</param>
IXLCell Cell(int row, string column);
/// <summary>Gets the cell at the specified address.</summary>
/// <param name="cellAddressInRange">The cell address in the worksheet.</param>
IXLCell Cell(IXLAddress cellAddressInRange);
/// <summary>
/// Returns the specified range.
/// </summary>
/// <param name="rangeAddress">The range boundaries.</param>
IXLRange Range(IXLRangeAddress rangeAddress);
/// <summary>Returns the specified range.</summary>
/// <para>e.g. Range("A1"), Range("A1:C2")</para>
/// <param name="rangeAddress">The range boundaries.</param>
IXLRange Range(string rangeAddress);
/// <summary>Returns the specified range.</summary>
/// <param name="firstCell">The first cell in the range.</param>
/// <param name="lastCell"> The last cell in the range.</param>
IXLRange Range(IXLCell firstCell, IXLCell lastCell);
/// <summary>Returns the specified range.</summary>
/// <param name="firstCellAddress">The first cell address in the worksheet.</param>
/// <param name="lastCellAddress"> The last cell address in the worksheet.</param>
IXLRange Range(string firstCellAddress, string lastCellAddress);
/// <summary>Returns the specified range.</summary>
/// <param name="firstCellAddress">The first cell address in the worksheet.</param>
/// <param name="lastCellAddress"> The last cell address in the worksheet.</param>
IXLRange Range(IXLAddress firstCellAddress, IXLAddress lastCellAddress);
/// <summary>Returns a collection of ranges, separated by commas.</summary>
/// <para>e.g. Ranges("A1"), Ranges("A1:C2"), Ranges("A1:B2,D1:D4")</para>
/// <param name="ranges">The ranges to return.</param>
IXLRanges Ranges(string ranges);
/// <summary>Returns the specified range.</summary>
/// <param name="firstCellRow"> The first cell's row of the range to return.</param>
/// <param name="firstCellColumn">The first cell's column of the range to return.</param>
/// <param name="lastCellRow"> The last cell's row of the range to return.</param>
/// <param name="lastCellColumn"> The last cell's column of the range to return.</param>
/// <returns>.</returns>
IXLRange Range(int firstCellRow, int firstCellColumn, int lastCellRow, int lastCellColumn);
/// <summary>Gets the number of rows in this worksheet.</summary>
int RowCount();
/// <summary>Gets the number of columns in this worksheet.</summary>
int ColumnCount();
/// <summary>
/// Collapses all outlined rows.
/// </summary>
IXLWorksheet CollapseRows();
/// <summary>
/// Collapses all outlined columns.
/// </summary>
IXLWorksheet CollapseColumns();
/// <summary>
/// Expands all outlined rows.
/// </summary>
IXLWorksheet ExpandRows();
/// <summary>
/// Expands all outlined columns.
/// </summary>
IXLWorksheet ExpandColumns();
/// <summary>
/// Collapses the outlined rows of the specified level.
/// </summary>
/// <param name="outlineLevel">The outline level.</param>
IXLWorksheet CollapseRows(Int32 outlineLevel);
/// <summary>
/// Collapses the outlined columns of the specified level.
/// </summary>
/// <param name="outlineLevel">The outline level.</param>
IXLWorksheet CollapseColumns(Int32 outlineLevel);
/// <summary>
/// Expands the outlined rows of the specified level.
/// </summary>
/// <param name="outlineLevel">The outline level.</param>
IXLWorksheet ExpandRows(Int32 outlineLevel);
/// <summary>
/// Expands the outlined columns of the specified level.
/// </summary>
/// <param name="outlineLevel">The outline level.</param>
IXLWorksheet ExpandColumns(Int32 outlineLevel);
/// <summary>
/// Deletes this worksheet.
/// </summary>
void Delete();
/// <summary>
/// Gets an object to manage this worksheet's named ranges.
/// </summary>
IXLNamedRanges NamedRanges { get; }
/// <summary>
/// Gets the specified named range.
/// </summary>
/// <param name="rangeName">Name of the range.</param>
IXLNamedRange NamedRange(String rangeName);
/// <summary>
/// Gets an object to manage how the worksheet is going to displayed by Excel.
/// </summary>
IXLSheetView SheetView { get; }
/// <summary>
/// Gets the Excel table of the given index
/// </summary>
/// <param name="index">Index of the table to return</param>
IXLTable Table(Int32 index);
/// <summary>
/// Gets the Excel table of the given name
/// </summary>
/// <param name="name">Name of the table to return</param>
IXLTable Table(String name);
/// <summary>
/// Gets an object to manage this worksheet's Excel tables
/// </summary>
IXLTables Tables { get; }
/// <summary>
/// Copies the
/// </summary>
/// <param name="newSheetName"></param>
/// <returns></returns>
IXLWorksheet CopyTo(String newSheetName);
IXLWorksheet CopyTo(String newSheetName, Int32 position);
IXLWorksheet CopyTo(XLWorkbook workbook);
IXLWorksheet CopyTo(XLWorkbook workbook, String newSheetName);
IXLWorksheet CopyTo(XLWorkbook workbook, String newSheetName, Int32 position);
IXLRange RangeUsed();
[Obsolete("Use the overload with XLCellsUsedOptions")]
IXLRange RangeUsed(bool includeFormats);
IXLRange RangeUsed(XLCellsUsedOptions options);
IXLDataValidations DataValidations { get; }
XLWorksheetVisibility Visibility { get; set; }
IXLWorksheet Hide();
IXLWorksheet Unhide();
IXLSortElements SortRows { get; }
IXLSortElements SortColumns { get; }
IXLRange Sort();
IXLRange Sort(String columnsToSortBy, XLSortOrder sortOrder = XLSortOrder.Ascending, Boolean matchCase = false, Boolean ignoreBlanks = true);
IXLRange Sort(Int32 columnToSortBy, XLSortOrder sortOrder = XLSortOrder.Ascending, Boolean matchCase = false, Boolean ignoreBlanks = true);
IXLRange SortLeftToRight(XLSortOrder sortOrder = XLSortOrder.Ascending, Boolean matchCase = false, Boolean ignoreBlanks = true);
//IXLCharts Charts { get; }
Boolean ShowFormulas { get; set; }
Boolean ShowGridLines { get; set; }
Boolean ShowOutlineSymbols { get; set; }
Boolean ShowRowColHeaders { get; set; }
Boolean ShowRuler { get; set; }
Boolean ShowWhiteSpace { get; set; }
Boolean ShowZeros { get; set; }
IXLWorksheet SetShowFormulas(); IXLWorksheet SetShowFormulas(Boolean value);
IXLWorksheet SetShowGridLines(); IXLWorksheet SetShowGridLines(Boolean value);
IXLWorksheet SetShowOutlineSymbols(); IXLWorksheet SetShowOutlineSymbols(Boolean value);
IXLWorksheet SetShowRowColHeaders(); IXLWorksheet SetShowRowColHeaders(Boolean value);
IXLWorksheet SetShowRuler(); IXLWorksheet SetShowRuler(Boolean value);
IXLWorksheet SetShowWhiteSpace(); IXLWorksheet SetShowWhiteSpace(Boolean value);
IXLWorksheet SetShowZeros(); IXLWorksheet SetShowZeros(Boolean value);
XLColor TabColor { get; set; }
IXLWorksheet SetTabColor(XLColor color);
Boolean TabSelected { get; set; }
Boolean TabActive { get; set; }
IXLWorksheet SetTabSelected(); IXLWorksheet SetTabSelected(Boolean value);
IXLWorksheet SetTabActive(); IXLWorksheet SetTabActive(Boolean value);
IXLPivotTable PivotTable(String name);
IXLPivotTables PivotTables { get; }
Boolean RightToLeft { get; set; }
IXLWorksheet SetRightToLeft(); IXLWorksheet SetRightToLeft(Boolean value);
IXLAutoFilter AutoFilter { get; }
[Obsolete("Use the overload with XLCellsUsedOptions")]
IXLRows RowsUsed(Boolean includeFormats, Func<IXLRow, Boolean> predicate = null);
IXLRows RowsUsed(XLCellsUsedOptions options = XLCellsUsedOptions.AllContents, Func<IXLRow, Boolean> predicate = null);
IXLRows RowsUsed(Func<IXLRow, Boolean> predicate);
[Obsolete("Use the overload with XLCellsUsedOptions")]
IXLColumns ColumnsUsed(Boolean includeFormats, Func<IXLColumn, Boolean> predicate = null);
IXLColumns ColumnsUsed(XLCellsUsedOptions options = XLCellsUsedOptions.AllContents, Func<IXLColumn, Boolean> predicate = null);
IXLColumns ColumnsUsed(Func<IXLColumn, Boolean> predicate);
IXLRanges MergedRanges { get; }
IXLConditionalFormats ConditionalFormats { get; }
IXLSparklineGroups SparklineGroups { get; }
IXLRanges SelectedRanges { get; }
IXLCell ActiveCell { get; set; }
Object Evaluate(String expression);
/// <summary>
/// Force recalculation of all cell formulas.
/// </summary>
void RecalculateAllFormulas();
String Author { get; set; }
IXLPictures Pictures { get; }
IXLPicture Picture(String pictureName);
IXLPicture AddPicture(Stream stream);
IXLPicture AddPicture(Stream stream, String name);
IXLPicture AddPicture(Stream stream, XLPictureFormat format);
IXLPicture AddPicture(Stream stream, XLPictureFormat format, String name);
IXLPicture AddPicture(Bitmap bitmap);
IXLPicture AddPicture(Bitmap bitmap, String name);
IXLPicture AddPicture(String imageFile);
IXLPicture AddPicture(String imageFile, String name);
}
}
| |
/// This code was generated by
/// \ / _ _ _| _ _
/// | (_)\/(_)(_|\/| |(/_ v1.0.0
/// / /
using System;
using System.Collections.Generic;
using Twilio.Base;
using Twilio.Converters;
namespace Twilio.Rest.Preview.Understand.Assistant.FieldType
{
/// <summary>
/// PLEASE NOTE that this class contains preview products that are subject to change. Use them with caution. If you
/// currently do not have developer preview access, please contact [email protected].
///
/// FetchFieldValueOptions
/// </summary>
public class FetchFieldValueOptions : IOptions<FieldValueResource>
{
/// <summary>
/// The assistant_sid
/// </summary>
public string PathAssistantSid { get; }
/// <summary>
/// The field_type_sid
/// </summary>
public string PathFieldTypeSid { get; }
/// <summary>
/// The sid
/// </summary>
public string PathSid { get; }
/// <summary>
/// Construct a new FetchFieldValueOptions
/// </summary>
/// <param name="pathAssistantSid"> The assistant_sid </param>
/// <param name="pathFieldTypeSid"> The field_type_sid </param>
/// <param name="pathSid"> The sid </param>
public FetchFieldValueOptions(string pathAssistantSid, string pathFieldTypeSid, string pathSid)
{
PathAssistantSid = pathAssistantSid;
PathFieldTypeSid = pathFieldTypeSid;
PathSid = pathSid;
}
/// <summary>
/// Generate the necessary parameters
/// </summary>
public List<KeyValuePair<string, string>> GetParams()
{
var p = new List<KeyValuePair<string, string>>();
return p;
}
}
/// <summary>
/// PLEASE NOTE that this class contains preview products that are subject to change. Use them with caution. If you
/// currently do not have developer preview access, please contact [email protected].
///
/// ReadFieldValueOptions
/// </summary>
public class ReadFieldValueOptions : ReadOptions<FieldValueResource>
{
/// <summary>
/// The assistant_sid
/// </summary>
public string PathAssistantSid { get; }
/// <summary>
/// The field_type_sid
/// </summary>
public string PathFieldTypeSid { get; }
/// <summary>
/// An ISO language-country string of the value. For example: en-US
/// </summary>
public string Language { get; set; }
/// <summary>
/// Construct a new ReadFieldValueOptions
/// </summary>
/// <param name="pathAssistantSid"> The assistant_sid </param>
/// <param name="pathFieldTypeSid"> The field_type_sid </param>
public ReadFieldValueOptions(string pathAssistantSid, string pathFieldTypeSid)
{
PathAssistantSid = pathAssistantSid;
PathFieldTypeSid = pathFieldTypeSid;
}
/// <summary>
/// Generate the necessary parameters
/// </summary>
public override List<KeyValuePair<string, string>> GetParams()
{
var p = new List<KeyValuePair<string, string>>();
if (Language != null)
{
p.Add(new KeyValuePair<string, string>("Language", Language));
}
if (PageSize != null)
{
p.Add(new KeyValuePair<string, string>("PageSize", PageSize.ToString()));
}
return p;
}
}
/// <summary>
/// PLEASE NOTE that this class contains preview products that are subject to change. Use them with caution. If you
/// currently do not have developer preview access, please contact [email protected].
///
/// CreateFieldValueOptions
/// </summary>
public class CreateFieldValueOptions : IOptions<FieldValueResource>
{
/// <summary>
/// The assistant_sid
/// </summary>
public string PathAssistantSid { get; }
/// <summary>
/// The field_type_sid
/// </summary>
public string PathFieldTypeSid { get; }
/// <summary>
/// An ISO language-country string of the value.
/// </summary>
public string Language { get; }
/// <summary>
/// A user-provided string that uniquely identifies this resource as an alternative to the sid. Unique up to 64 characters long.
/// </summary>
public string Value { get; }
/// <summary>
/// A value that indicates this field value is a synonym of. Empty if the value is not a synonym.
/// </summary>
public string SynonymOf { get; set; }
/// <summary>
/// Construct a new CreateFieldValueOptions
/// </summary>
/// <param name="pathAssistantSid"> The assistant_sid </param>
/// <param name="pathFieldTypeSid"> The field_type_sid </param>
/// <param name="language"> An ISO language-country string of the value. </param>
/// <param name="value"> A user-provided string that uniquely identifies this resource as an alternative to the sid.
/// Unique up to 64 characters long. </param>
public CreateFieldValueOptions(string pathAssistantSid, string pathFieldTypeSid, string language, string value)
{
PathAssistantSid = pathAssistantSid;
PathFieldTypeSid = pathFieldTypeSid;
Language = language;
Value = value;
}
/// <summary>
/// Generate the necessary parameters
/// </summary>
public List<KeyValuePair<string, string>> GetParams()
{
var p = new List<KeyValuePair<string, string>>();
if (Language != null)
{
p.Add(new KeyValuePair<string, string>("Language", Language));
}
if (Value != null)
{
p.Add(new KeyValuePair<string, string>("Value", Value));
}
if (SynonymOf != null)
{
p.Add(new KeyValuePair<string, string>("SynonymOf", SynonymOf.ToString()));
}
return p;
}
}
/// <summary>
/// PLEASE NOTE that this class contains preview products that are subject to change. Use them with caution. If you
/// currently do not have developer preview access, please contact [email protected].
///
/// DeleteFieldValueOptions
/// </summary>
public class DeleteFieldValueOptions : IOptions<FieldValueResource>
{
/// <summary>
/// The assistant_sid
/// </summary>
public string PathAssistantSid { get; }
/// <summary>
/// The field_type_sid
/// </summary>
public string PathFieldTypeSid { get; }
/// <summary>
/// The sid
/// </summary>
public string PathSid { get; }
/// <summary>
/// Construct a new DeleteFieldValueOptions
/// </summary>
/// <param name="pathAssistantSid"> The assistant_sid </param>
/// <param name="pathFieldTypeSid"> The field_type_sid </param>
/// <param name="pathSid"> The sid </param>
public DeleteFieldValueOptions(string pathAssistantSid, string pathFieldTypeSid, string pathSid)
{
PathAssistantSid = pathAssistantSid;
PathFieldTypeSid = pathFieldTypeSid;
PathSid = pathSid;
}
/// <summary>
/// Generate the necessary parameters
/// </summary>
public List<KeyValuePair<string, string>> GetParams()
{
var p = new List<KeyValuePair<string, string>>();
return p;
}
}
}
| |
/*===============================================================================================
User Created Sound Example
Copyright (c), Firelight Technologies Pty, Ltd 2004-2011.
This example shows how create a sound with data filled by the user.
It shows a user created static sample, followed by a user created stream.
The former allocates all memory needed for the sound and is played back as a static sample,
while the latter streams the data in chunks as it plays, using far less memory.
===============================================================================================*/
using System;
using System.Drawing;
using System.Collections;
using System.ComponentModel;
using System.Windows.Forms;
using System.Data;
using System.Runtime.InteropServices;
namespace usercreatedsound
{
public class UserCreatedSound : System.Windows.Forms.Form
{
private FMOD.System system = null;
private FMOD.Sound sound = null;
private FMOD.Channel channel = null;
private FMOD.CREATESOUNDEXINFO createsoundexinfo = new FMOD.CREATESOUNDEXINFO();
private bool soundcreated = false;
FMOD.MODE mode = (FMOD.MODE._2D | FMOD.MODE.DEFAULT | FMOD.MODE.OPENUSER | FMOD.MODE.LOOP_NORMAL | FMOD.MODE.HARDWARE);
private System.Windows.Forms.Label label;
private System.Windows.Forms.Button button1;
private System.Windows.Forms.Button button2;
private System.Windows.Forms.Button exit_button;
private System.Windows.Forms.Button pause;
private System.Windows.Forms.Timer timer;
private System.Windows.Forms.StatusBar statusBar;
private System.ComponentModel.IContainer components;
public UserCreatedSound()
{
InitializeComponent();
}
protected override void Dispose( bool disposing )
{
if( disposing )
{
FMOD.RESULT result;
/*
Shut down
*/
if (sound != null)
{
result = sound.release();
ERRCHECK(result);
}
if (system != null)
{
result = system.close();
ERRCHECK(result);
result = system.release();
ERRCHECK(result);
}
if (components != null)
{
components.Dispose();
}
}
base.Dispose( disposing );
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
this.label = new System.Windows.Forms.Label();
this.button1 = new System.Windows.Forms.Button();
this.button2 = new System.Windows.Forms.Button();
this.exit_button = new System.Windows.Forms.Button();
this.pause = new System.Windows.Forms.Button();
this.timer = new System.Windows.Forms.Timer(this.components);
this.statusBar = new System.Windows.Forms.StatusBar();
this.SuspendLayout();
//
// label
//
this.label.Location = new System.Drawing.Point(8, 8);
this.label.Name = "label";
this.label.Size = new System.Drawing.Size(264, 32);
this.label.TabIndex = 6;
this.label.Text = "Copyright (c) Firelight Technologies 2004-2011";
this.label.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
//
// button1
//
this.button1.Location = new System.Drawing.Point(8, 48);
this.button1.Name = "button1";
this.button1.Size = new System.Drawing.Size(264, 40);
this.button1.TabIndex = 7;
this.button1.Text = "Play as a runtime decoded stream";
this.button1.Click += new System.EventHandler(this.button1_Click);
//
// button2
//
this.button2.Location = new System.Drawing.Point(8, 96);
this.button2.Name = "button2";
this.button2.Size = new System.Drawing.Size(264, 40);
this.button2.TabIndex = 8;
this.button2.Text = "Play as a static in memory sample";
this.button2.Click += new System.EventHandler(this.button2_Click);
//
// exit_button
//
this.exit_button.Location = new System.Drawing.Point(104, 192);
this.exit_button.Name = "exit_button";
this.exit_button.Size = new System.Drawing.Size(72, 24);
this.exit_button.TabIndex = 13;
this.exit_button.Text = "Exit";
this.exit_button.Click += new System.EventHandler(this.exit_button_Click);
//
// pause
//
this.pause.Location = new System.Drawing.Point(8, 144);
this.pause.Name = "pause";
this.pause.Size = new System.Drawing.Size(264, 40);
this.pause.TabIndex = 14;
this.pause.Text = "Pause / Resume";
this.pause.Click += new System.EventHandler(this.pause_Click);
//
// timer
//
this.timer.Enabled = true;
this.timer.Interval = 10;
this.timer.Tick += new System.EventHandler(this.timer_Tick);
//
// statusBar
//
this.statusBar.Location = new System.Drawing.Point(0, 219);
this.statusBar.Name = "statusBar";
this.statusBar.Size = new System.Drawing.Size(280, 24);
this.statusBar.TabIndex = 15;
//
// UserCreatedSound
//
this.AutoScaleBaseSize = new System.Drawing.Size(5, 13);
this.ClientSize = new System.Drawing.Size(280, 243);
this.Controls.Add(this.statusBar);
this.Controls.Add(this.pause);
this.Controls.Add(this.exit_button);
this.Controls.Add(this.button2);
this.Controls.Add(this.button1);
this.Controls.Add(this.label);
this.Name = "UserCreatedSound";
this.Text = "User Created Sound Example";
this.Load += new System.EventHandler(this.UserCreatedSound_Load);
this.ResumeLayout(false);
}
#endregion
[STAThread]
static void Main()
{
Application.Run(new UserCreatedSound());
}
private FMOD.SOUND_PCMREADCALLBACK pcmreadcallback = new FMOD.SOUND_PCMREADCALLBACK(PCMREADCALLBACK);
private FMOD.SOUND_PCMSETPOSCALLBACK pcmsetposcallback = new FMOD.SOUND_PCMSETPOSCALLBACK(PCMSETPOSCALLBACK);
private void UserCreatedSound_Load(object sender, System.EventArgs e)
{
uint version = 0;
FMOD.RESULT result;
uint channels = 2, frequency = 44100;
/*
Create a System object and initialize.
*/
result = FMOD.Factory.System_Create(ref system);
ERRCHECK(result);
result = system.getVersion(ref version);
ERRCHECK(result);
if (version < FMOD.VERSION.number)
{
MessageBox.Show("Error! You are using an old version of FMOD " + version.ToString("X") + ". This program requires " + FMOD.VERSION.number.ToString("X") + ".");
Application.Exit();
}
result = system.init(32, FMOD.INITFLAGS.NORMAL, (IntPtr)null);
ERRCHECK(result);
createsoundexinfo.cbsize = Marshal.SizeOf(createsoundexinfo);
createsoundexinfo.fileoffset = 0;
createsoundexinfo.length = frequency * channels * 2 * 2;
createsoundexinfo.numchannels = (int)channels;
createsoundexinfo.defaultfrequency = (int)frequency;
createsoundexinfo.format = FMOD.SOUND_FORMAT.PCM16;
createsoundexinfo.pcmreadcallback = pcmreadcallback;
createsoundexinfo.pcmsetposcallback = pcmsetposcallback;
createsoundexinfo.dlsname = null;
}
private void button1_Click(object sender, System.EventArgs e)
{
FMOD.RESULT result;
if(!soundcreated)
{
result = system.createSound(
(string)null,
(mode | FMOD.MODE.CREATESTREAM),
ref createsoundexinfo,
ref sound);
ERRCHECK(result);
soundcreated = true;
}
system.playSound(FMOD.CHANNELINDEX.FREE, sound, false, ref channel);
}
private void button2_Click(object sender, System.EventArgs e)
{
FMOD.RESULT result;
if(!soundcreated)
{
result = system.createSound(
(string)null,
mode,
ref createsoundexinfo,
ref sound);
ERRCHECK(result);
soundcreated = true;
}
system.playSound(FMOD.CHANNELINDEX.FREE, sound, false, ref channel);
}
private void pause_Click(object sender, System.EventArgs e)
{
bool paused = false;
if (channel != null)
{
channel.getPaused(ref paused);
channel.setPaused(!paused);
}
}
private void exit_button_Click(object sender, System.EventArgs e)
{
Application.Exit();
}
private void timer_Tick(object sender, System.EventArgs e)
{
if (channel != null)
{
FMOD.RESULT result;
uint ms = 0;
uint lenms = 0;
bool playing = false;
bool paused = false;
result = channel.isPlaying(ref playing);
if ((result != FMOD.RESULT.OK) && (result != FMOD.RESULT.ERR_INVALID_HANDLE) && (result != FMOD.RESULT.ERR_CHANNEL_STOLEN))
{
ERRCHECK(result);
}
result = channel.getPaused(ref paused);
if ((result != FMOD.RESULT.OK) && (result != FMOD.RESULT.ERR_INVALID_HANDLE) && (result != FMOD.RESULT.ERR_CHANNEL_STOLEN))
{
ERRCHECK(result);
}
result = channel.getPosition(ref ms, FMOD.TIMEUNIT.MS);
if ((result != FMOD.RESULT.OK) && (result != FMOD.RESULT.ERR_INVALID_HANDLE) && (result != FMOD.RESULT.ERR_CHANNEL_STOLEN))
{
ERRCHECK(result);
}
result = sound.getLength( ref lenms, FMOD.TIMEUNIT.MS);
if ((result != FMOD.RESULT.OK) && (result != FMOD.RESULT.ERR_INVALID_HANDLE) && (result != FMOD.RESULT.ERR_CHANNEL_STOLEN))
{
ERRCHECK(result);
}
statusBar.Text = "Time " + (ms /1000 / 60) + ":" + (ms / 1000 % 60) + ":" + (ms / 10 % 100) + "/" + (lenms / 1000 / 60) + ":" + (lenms / 1000 % 60) + ":" + (lenms / 10 % 100) + (paused ? " Paused " : playing ? " Playing" : " Stopped");
}
if (system != null)
{
system.update();
}
}
private void ERRCHECK(FMOD.RESULT result)
{
if (result != FMOD.RESULT.OK)
{
timer.Stop();
MessageBox.Show("FMOD error! " + result + " - " + FMOD.Error.String(result));
Environment.Exit(-1);
}
}
private static float t1 = 0, t2 = 0; // time
private static float v1 = 0, v2 = 0; // velocity
private static FMOD.RESULT PCMREADCALLBACK(IntPtr soundraw, IntPtr data, uint datalen)
{
unsafe
{
uint count;
short *stereo16bitbuffer = (short *)data.ToPointer();
for (count=0; count < (datalen>>2); count++) // >>2 = 16bit stereo (4 bytes per sample)
{
*stereo16bitbuffer++ = (short)(Math.Sin(t1) * 32767.0f); // left channel
*stereo16bitbuffer++ = (short)(Math.Sin(t2) * 32767.0f); // right channel
t1 += 0.01f + v1;
t2 += 0.0142f + v2;
v1 += (float)(Math.Sin(t1) * 0.002f);
v2 += (float)(Math.Sin(t2) * 0.002f);
}
}
return FMOD.RESULT.OK;
}
private static FMOD.RESULT PCMSETPOSCALLBACK(IntPtr soundraw, int subsound, uint pcmoffset, FMOD.TIMEUNIT postype)
{
/*
This is useful if the user calls Sound::setTime or Sound::setPosition and you want to seek your data accordingly.
*/
return FMOD.RESULT.OK;
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.Azure.Management.Dns.Models;
using Microsoft.Azure.Management.Network;
using Microsoft.Azure.Management.Network.Models;
using Microsoft.Azure.Management.Resources.Models;
using Microsoft.Rest.Azure;
using Microsoft.Rest.ClientRuntime.Azure.TestFramework;
using Xunit;
using SubResource = Microsoft.Azure.Management.Dns.Models.SubResource;
namespace Microsoft.Azure.Management.Dns.Testing
{
public class RecordSetScenarioTests
{
public class SingleRecordSetTestContext
{
public string ZoneName { get; set; }
public string RecordSetName { get; set; }
public string Location { get; set; }
public ResourceGroup ResourceGroup { get; set; }
public DnsManagementClient DnsClient { get; set; }
public NetworkManagementClient NetworkClient { get; set; }
public RecordedDelegatingHandler DnsHandler { get; set; }
public RecordedDelegatingHandler ResourcesHandler { get; set; }
public RecordedDelegatingHandler NetworkHandler { get; set; }
public IList<VirtualNetwork> RegistationVirtualNetworks { get; set; }
public IList<VirtualNetwork> ResolutionVirtualNetworks { get; set; }
public RecordSet TestRecordSkeleton
=> this.GetNewTestRecordSkeleton(this.RecordSetName);
public RecordSet GetNewTestRecordSkeleton(
string recordSetName,
uint ttl = 42)
{
return new RecordSet(name: recordSetName)
{
Etag = null,
TTL = ttl,
};
}
}
private static IEnumerable<SingleRecordSetTestContext> SetupSingleRecordSetTestContexts(
MockContext context)
{
return new[]
{
SetupSingleRecordSetTestForPublicZone(context),
SetupSingleRecordSetTestForPrivateZone(context),
};
}
private static SingleRecordSetTestContext SetupSingleRecordSetTestForPublicZone(
MockContext context)
{
var testContext = new SingleRecordSetTestContext();
testContext.ResourcesHandler = new RecordedDelegatingHandler
{
StatusCodeToReturn = System.Net.HttpStatusCode.OK
};
testContext.DnsHandler = new RecordedDelegatingHandler
{
StatusCodeToReturn = System.Net.HttpStatusCode.OK
};
testContext.DnsClient = ResourceGroupHelper.GetDnsClient(
context,
testContext.DnsHandler);
var resourceManagementClient =
ResourceGroupHelper.GetResourcesClient(
context,
testContext.ResourcesHandler);
testContext.ZoneName =
TestUtilities.GenerateName("hydratest.dnszone.com");
testContext.RecordSetName =
TestUtilities.GenerateName("hydratestdnsrec");
testContext.Location =
ResourceGroupHelper.GetResourceLocation(
resourceManagementClient,
"microsoft.network/dnszones");
testContext.ResourceGroup =
ResourceGroupHelper.CreateResourceGroup(
resourceManagementClient);
ResourceGroupHelper.CreateZone(
testContext.DnsClient,
testContext.ZoneName,
testContext.Location,
testContext.ResourceGroup);
return testContext;
}
private static SingleRecordSetTestContext SetupSingleRecordSetTestForPrivateZone(
MockContext context)
{
var testContext = new SingleRecordSetTestContext();
testContext.ResourcesHandler = new RecordedDelegatingHandler
{
StatusCodeToReturn = System.Net.HttpStatusCode.OK
};
testContext.DnsHandler = new RecordedDelegatingHandler
{
StatusCodeToReturn = System.Net.HttpStatusCode.OK
};
testContext.NetworkHandler = new RecordedDelegatingHandler
{
StatusCodeToReturn = System.Net.HttpStatusCode.OK
};
testContext.DnsClient = ResourceGroupHelper.GetDnsClient(
context,
testContext.DnsHandler);
testContext.NetworkClient = ResourceGroupHelper.GetNetworkClient(
context,
testContext.NetworkHandler);
var resourceManagementClient =
ResourceGroupHelper.GetResourcesClient(
context,
testContext.ResourcesHandler);
testContext.ZoneName =
TestUtilities.GenerateName("hydratest.dnszone.com");
testContext.RecordSetName =
TestUtilities.GenerateName("hydratestdnsrec");
testContext.Location =
ResourceGroupHelper.GetResourceLocation(
resourceManagementClient,
"microsoft.network/dnszones");
testContext.ResourceGroup =
ResourceGroupHelper.CreateResourceGroup(
resourceManagementClient);
testContext.RegistationVirtualNetworks = new List<VirtualNetwork>
{
ResourceGroupHelper.CreateVirtualNetwork(testContext.ResourceGroup.Name, testContext.NetworkClient)
};
testContext.ResolutionVirtualNetworks = new List<VirtualNetwork>
{
ResourceGroupHelper.CreateVirtualNetwork(testContext.ResourceGroup.Name, testContext.NetworkClient)
};
ResourceGroupHelper.CreatePrivateZone(
testContext.DnsClient,
testContext.ZoneName,
testContext.Location,
testContext.RegistationVirtualNetworks.Select(vNet => new SubResource { Id = vNet.Id }).ToList(),
testContext.ResolutionVirtualNetworks.Select(vNet => new SubResource { Id = vNet.Id }).ToList(),
testContext.ResourceGroup);
return testContext;
}
[Fact]
public void CrudRecordSetFullCycle()
{
using (
MockContext context = MockContext.Start(this.GetType())
)
{
var testContexts = SetupSingleRecordSetTestContexts(context);
foreach (var testContext in testContexts)
{
var recordSetToBeCreated = testContext.TestRecordSkeleton;
recordSetToBeCreated.ARecords = new List<ARecord>
{
new ARecord {Ipv4Address = "123.32.1.0"}
};
recordSetToBeCreated.TTL = 60;
// Create the records clean, verify response
var createResponse = testContext.DnsClient.RecordSets
.CreateOrUpdate(
testContext.ResourceGroup.Name,
testContext.ZoneName,
testContext.RecordSetName,
RecordType.A,
ifMatch: null,
ifNoneMatch: null,
parameters: recordSetToBeCreated);
Assert.True(
TestHelpers.AreEqual(
recordSetToBeCreated,
createResponse,
ignoreEtag: true),
"Response body of Create does not match expectations");
Assert.False(string.IsNullOrWhiteSpace(createResponse.Etag));
// Retrieve the zone after create, verify response
var getresponse = testContext.DnsClient.RecordSets.Get(
testContext.ResourceGroup.Name,
testContext.ZoneName,
testContext.RecordSetName,
RecordType.A);
Assert.True(
TestHelpers.AreEqual(
createResponse,
getresponse,
ignoreEtag: false),
"Response body of Get does not match expectations");
// Call Update on the object returned by Create (important distinction from Get below)
Models.RecordSet createdRecordSet = createResponse;
createdRecordSet.TTL = 120;
createdRecordSet.Metadata = new Dictionary<string, string>
{
{"tag1", "value1"},
{"tag2", "value2"}
};
createdRecordSet.ARecords = new List<ARecord>
{
new ARecord {Ipv4Address = "123.32.1.0"},
new ARecord {Ipv4Address = "101.10.0.1"}
};
var updateResponse = testContext.DnsClient.RecordSets
.CreateOrUpdate(
testContext.ResourceGroup.Name,
testContext.ZoneName,
testContext.RecordSetName,
RecordType.A,
ifMatch: null,
ifNoneMatch: null,
parameters: createdRecordSet);
Assert.True(
TestHelpers.AreEqual(
createdRecordSet,
updateResponse,
ignoreEtag: true),
"Response body of Update does not match expectations");
Assert.False(string.IsNullOrWhiteSpace(updateResponse.Etag));
// Retrieve the records after create, verify response
getresponse = testContext.DnsClient.RecordSets.Get(
testContext.ResourceGroup.Name,
testContext.ZoneName,
testContext.RecordSetName,
RecordType.A);
Assert.True(
TestHelpers.AreEqual(updateResponse, getresponse),
"Response body of Get does not match expectations");
// Call Update on the object returned by Get (important distinction from Create above)
Models.RecordSet retrievedRecordSet = getresponse;
retrievedRecordSet.TTL = 180;
retrievedRecordSet.ARecords = new List<ARecord>
{
new ARecord {Ipv4Address = "123.32.1.0"},
new ARecord {Ipv4Address = "101.10.0.1"},
new ARecord {Ipv4Address = "22.33.44.55"},
};
updateResponse = testContext.DnsClient.RecordSets.CreateOrUpdate
(
testContext.ResourceGroup.Name,
testContext.ZoneName,
testContext.RecordSetName,
RecordType.A,
ifMatch: null,
ifNoneMatch: null,
parameters: retrievedRecordSet);
Assert.True(
TestHelpers.AreEqual(
retrievedRecordSet,
updateResponse,
ignoreEtag: true),
"Response body of Update does not match expectations");
Assert.False(string.IsNullOrWhiteSpace(updateResponse.Etag));
// Delete the record set
testContext.DnsClient.RecordSets.Delete(
testContext.ResourceGroup.Name,
testContext.ZoneName,
testContext.RecordSetName,
RecordType.A,
ifMatch: null);
// Delete the zone
testContext.DnsClient.Zones.Delete(
testContext.ResourceGroup.Name,
testContext.ZoneName,
ifMatch: null);
}
}
}
[Fact]
public void CreateGetA()
{
Action<RecordSet> setTestRecords = createParams =>
{
createParams.ARecords = new List<ARecord>
{
new ARecord {Ipv4Address = "120.63.230.220"},
new ARecord {Ipv4Address = "4.3.2.1"},
};
return;
};
this.RecordSetCreateGet(RecordType.A, setTestRecords, isPrivateZoneEnabled: true);
}
[Fact]
public void CreateGetAliasRecord()
{
Action<RecordSet> referencedTestRecords = createParams =>
{
//createParams.TargetResource = new SubResource("/subscriptions/726f8cd6-6459-4db4-8e6d-2cd2716904e2/resourceGroups/test/providers/Microsoft.Network/trafficManagerProfiles/testpp2");
createParams.ARecords = new List<ARecord>
{
new ARecord {Ipv4Address = "120.63.230.220"},
new ARecord {Ipv4Address = "4.3.2.1"},
};
return;
};
this.RecordSetCreateGetAlias(RecordType.A, referencedTestRecords);
}
[Fact]
public void CreateGetAaaa()
{
Action<RecordSet> setTestRecords = createParams =>
{
createParams.AaaaRecords = new List<AaaaRecord>
{
new AaaaRecord {Ipv6Address = "0:0:0:0:0:ffff:783f:e6dc"},
new AaaaRecord {Ipv6Address = "0:0:0:0:0:ffff:403:201"},
};
return;
};
this.RecordSetCreateGet(RecordType.AAAA, setTestRecords, isPrivateZoneEnabled: true);
}
[Fact]
public void CreateGetMx()
{
Action<RecordSet> setTestRecords = createParams =>
{
createParams.MxRecords = new List<MxRecord>
{
new MxRecord {Exchange = "mail1.scsfsm.com", Preference = 1},
new MxRecord {Exchange = "mail2.scsfsm.com", Preference = 2},
};
return;
};
this.RecordSetCreateGet(RecordType.MX, setTestRecords, isPrivateZoneEnabled: true);
}
[Fact]
public void CreateGetNs()
{
Action<RecordSet> setTestRecords = createParams =>
{
createParams.NsRecords = new List<NsRecord>
{
new NsRecord {Nsdname = "ns1.scsfsm.com"},
new NsRecord {Nsdname = "ns2.scsfsm.com"},
};
return;
};
this.RecordSetCreateGet(RecordType.NS, setTestRecords, isPrivateZoneEnabled: false);
}
[Fact]
public void CreateGetPtr()
{
Action<RecordSet> setTestRecords = createParams =>
{
createParams.PtrRecords = new List<PtrRecord>
{
new PtrRecord {Ptrdname = "www1.scsfsm.com"},
new PtrRecord {Ptrdname = "www2.scsfsm.com"},
};
return;
};
this.RecordSetCreateGet(RecordType.PTR, setTestRecords, isPrivateZoneEnabled: true);
}
[Fact]
public void CreateGetSrv()
{
Action<RecordSet> setTestRecords = createParams =>
{
createParams.SrvRecords = new List<SrvRecord>
{
new SrvRecord
{
Target = "bt2.scsfsm.com",
Priority = 0,
Weight = 2,
Port = 44
},
new SrvRecord
{
Target = "bt1.scsfsm.com",
Priority = 1,
Weight = 1,
Port = 45
},
};
return;
};
this.RecordSetCreateGet(RecordType.SRV, setTestRecords, isPrivateZoneEnabled: true);
}
[Fact]
public void CreateGetTxt()
{
Action<RecordSet> setTestRecords = createParams =>
{
createParams.TxtRecords = new List<TxtRecord>
{
new TxtRecord {Value = new[] {"lorem"}.ToList()},
new TxtRecord {Value = new[] {"ipsum"}.ToList()},
};
return;
};
this.RecordSetCreateGet(RecordType.TXT, setTestRecords, isPrivateZoneEnabled: true);
}
[Fact]
public void CreateGetCaa()
{
Action<RecordSet> setTestRecords = createParams =>
{
createParams.CaaRecords = new List<CaaRecord>
{
new CaaRecord() { Flags = 0, Tag = "issue", Value = "contoso.com" },
new CaaRecord() { Flags = 0, Tag = "issue", Value = "fabrikam.com" },
};
return;
};
this.RecordSetCreateGet(RecordType.CAA, setTestRecords, isPrivateZoneEnabled: true);
}
[Fact]
public void CreateGetCname()
{
Action<RecordSet> setTestRecords = createParams =>
{
createParams.CnameRecord = new CnameRecord
{
Cname = "www.contoroso.com",
};
return;
};
this.RecordSetCreateGet(RecordType.CNAME, setTestRecords, isPrivateZoneEnabled: true);
}
[Fact]
public void UpdateSoa()
{
using (
MockContext context = MockContext.Start(this.GetType())
)
{
var testContexts = SetupSingleRecordSetTestContexts(context);
foreach (var testContext in testContexts)
{
// SOA for the zone should already exist
var getresponse = testContext.DnsClient.RecordSets.Get(
testContext.ResourceGroup.Name,
testContext.ZoneName,
"@",
RecordType.SOA);
RecordSet soaResource = getresponse;
Assert.NotNull(soaResource);
Assert.NotNull(soaResource.SoaRecord);
soaResource.SoaRecord.ExpireTime = 123;
soaResource.SoaRecord.MinimumTtl = 1234;
soaResource.SoaRecord.RefreshTime = 12345;
soaResource.SoaRecord.RetryTime = 123456;
var updateParameters = soaResource;
var updateResponse = testContext.DnsClient.RecordSets
.CreateOrUpdate(
testContext.ResourceGroup.Name,
testContext.ZoneName,
"@",
RecordType.SOA,
ifMatch: null,
ifNoneMatch: null,
parameters: updateParameters);
Assert.True(
TestHelpers.AreEqual(
soaResource,
updateResponse,
ignoreEtag: true),
"Response body of Update does not match expectations");
getresponse = testContext.DnsClient.RecordSets.Get(
testContext.ResourceGroup.Name,
testContext.ZoneName,
"@",
RecordType.SOA);
Assert.True(
TestHelpers.AreEqual(updateResponse, getresponse),
"Response body of Get does not match expectations");
// SOA will get deleted with the zone
testContext.DnsClient.Zones.Delete(
testContext.ResourceGroup.Name,
testContext.ZoneName,
ifMatch: null);
}
}
}
[Fact]
public void ListRecordsInZoneOneType()
{
ListRecordsInZone(isCrossType: false);
}
[Fact]
public void ListRecordsInZoneAcrossTypes()
{
ListRecordsInZone(isCrossType: true);
}
[Fact]
public void ListRecordsInZoneWithSuffixAcrossTypes()
{
ListRecordsInZoneWithSuffixCrossType(isCrossType: true);
}
[Fact]
public void ListRecordsInZoneWithSuffix()
{
ListRecordsInZoneWithSuffixCrossType(isCrossType: false);
}
private void ListRecordsInZone(
bool isCrossType,
[System.Runtime.CompilerServices.CallerMemberName] string methodName
= "testframework_failed")
{
using (
MockContext context = MockContext.Start(
this.GetType(),
methodName))
{
var testContexts = SetupSingleRecordSetTestContexts(context);
foreach (var testContext in testContexts)
{
var recordSetNames = new[]
{
TestUtilities.GenerateName("hydratestrec"),
TestUtilities.GenerateName("hydratestrec"),
TestUtilities.GenerateName("hydratestrec")
};
RecordSetScenarioTests.CreateRecordSets(
testContext,
recordSetNames);
if (isCrossType)
{
var listresponse1 = testContext.DnsClient.RecordSets
.ListByDnsZone(
testContext.ResourceGroup.Name,
testContext.ZoneName);
var listresponse2 = testContext.DnsClient.RecordSets
.ListAllByDnsZone(
testContext.ResourceGroup.Name,
testContext.ZoneName);
foreach (var listresponse in new[] { listresponse1, listresponse2 })
{
// not checking for the record count as this will return standard SOA and auth NS records as well
Assert.NotNull(listresponse);
Assert.True(
listresponse.Any(
recordSetReturned =>
string.Equals(
recordSetNames[0],
recordSetReturned.Name))
&&
listresponse.Any(
recordSetReturned =>
string.Equals(
recordSetNames[1],
recordSetReturned.Name))
&&
listresponse.Any(
recordSetReturned =>
string.Equals(
recordSetNames[2],
recordSetReturned.Name)),
"The returned records do not meet expectations");
}
}
else
{
var listresponse = testContext.DnsClient.RecordSets
.ListByType(
testContext.ResourceGroup.Name,
testContext.ZoneName,
RecordType.TXT);
Assert.NotNull(listresponse);
Assert.Equal(2, listresponse.Count());
Assert.True(
listresponse.Any(
recordSetReturned =>
string.Equals(
recordSetNames[0],
recordSetReturned.Name))
&&
listresponse.Any(
recordSetReturned =>
string.Equals(
recordSetNames[1],
recordSetReturned.Name)),
"The returned records do not meet expectations");
}
RecordSetScenarioTests.DeleteRecordSetsAndZone(
testContext,
recordSetNames);
}
}
}
private void ListRecordsInZoneWithSuffixCrossType(
bool isCrossType,
[System.Runtime.CompilerServices.CallerMemberName] string methodName
= "testframework_failed")
{
using (
MockContext context = MockContext.Start(
this.GetType(),
methodName))
{
var testContexts = SetupSingleRecordSetTestContexts(context);
foreach (var testContext in testContexts)
{
string subzoneName = "contoso";
var recordSetNames = new[]
{
TestUtilities.GenerateName("hydratestrec"),
TestUtilities.GenerateName("hydratestrec"),
TestUtilities.GenerateName("hydratestrec")
}.Select(x => x + "." + subzoneName).ToArray();
RecordSetScenarioTests.CreateRecordSets(
testContext,
recordSetNames);
if (isCrossType)
{
var listresponse = testContext.DnsClient.RecordSets
.ListByDnsZone(
testContext.ResourceGroup.Name,
testContext.ZoneName,
recordsetnamesuffix: subzoneName);
Assert.NotNull(listresponse);
Assert.Equal(listresponse.Count(), recordSetNames.Length);
Assert.True(
listresponse.Any(
recordSetReturned =>
string.Equals(
recordSetNames[0],
recordSetReturned.Name))
&&
listresponse.Any(
recordSetReturned =>
string.Equals(
recordSetNames[1],
recordSetReturned.Name))
&&
listresponse.Any(
recordSetReturned =>
string.Equals(
recordSetNames[2],
recordSetReturned.Name)),
"The returned records do not meet expectations");
}
else
{
var listresponse = testContext.DnsClient.RecordSets
.ListByType(
testContext.ResourceGroup.Name,
testContext.ZoneName,
RecordType.TXT,
recordsetnamesuffix: subzoneName);
Assert.NotNull(listresponse);
Assert.Equal(2, listresponse.Count());
Assert.True(
listresponse.Any(
recordSetReturned =>
string.Equals(
recordSetNames[0],
recordSetReturned.Name))
&&
listresponse.Any(
recordSetReturned =>
string.Equals(
recordSetNames[1],
recordSetReturned.Name)),
"The returned records do not meet expectations");
}
RecordSetScenarioTests.DeleteRecordSetsAndZone(
testContext,
recordSetNames);
}
}
}
[Fact]
public void ListRecordsInZoneOneTypeWithTop()
{
this.ListRecordsInZoneWithTop(isCrossType: false);
}
[Fact]
public void ListRecordsInZoneAcrossTypesWithTop()
{
this.ListRecordsInZoneWithTop(isCrossType: true);
}
private void ListRecordsInZoneWithTop(
bool isCrossType,
[System.Runtime.CompilerServices.CallerMemberName] string methodName
= "testframework_failed")
{
using (
MockContext context = MockContext.Start(
this.GetType(),
methodName))
{
var testContexts = SetupSingleRecordSetTestContexts(context);
foreach (var testContext in testContexts)
{
var recordSetNames = new[]
{
TestUtilities.GenerateName("hydratestrec") + ".com",
TestUtilities.GenerateName("hydratestrec") + ".com",
TestUtilities.GenerateName("hydratestrec") + ".com"
};
RecordSetScenarioTests.CreateRecordSets(
testContext,
recordSetNames);
IPage<RecordSet> listResponse;
if (isCrossType)
{
// Using top = 3, it will pick up SOA, NS and the first TXT
listResponse = testContext.DnsClient.RecordSets
.ListByDnsZone(
testContext.ResourceGroup.Name,
testContext.ZoneName,
3);
// verify if TXT is in the list
Assert.True(
listResponse.Where(rs => rs.Type == "TXT")
.All(
listedRecordSet =>
recordSetNames.Any(
createdName =>
createdName == listedRecordSet.Name)),
"The returned records do not meet expectations");
}
else
{
// Using top = 3, it will pick up SOA, NS and the first TXT, process it and return just the TXT
listResponse = testContext.DnsClient.RecordSets.ListByType(
testContext.ResourceGroup.Name,
testContext.ZoneName,
RecordType.TXT,
3);
Assert.True(
listResponse.All(
listedRecordSet =>
recordSetNames.Any(
createdName =>
createdName == listedRecordSet.Name)),
"The returned records do not meet expectations");
}
RecordSetScenarioTests.DeleteRecordSetsAndZone(
testContext,
recordSetNames);
}
}
}
[Fact]
public void UpdateRecordSetPreconditionFailed()
{
using (
MockContext context = MockContext.Start(this.GetType())
)
{
var testContexts = SetupSingleRecordSetTestContexts(context);
foreach (var testContext in testContexts)
{
var createParameters = testContext.TestRecordSkeleton;
createParameters.CnameRecord = new CnameRecord
{
Cname = "www.contoso.example.com"
};
var createResponse = testContext.DnsClient.RecordSets
.CreateOrUpdate(
testContext.ResourceGroup.Name,
testContext.ZoneName,
testContext.RecordSetName,
RecordType.CNAME,
ifMatch: null,
ifNoneMatch: null,
parameters: createParameters);
var updateParameters = createResponse;
// expect Precondition Failed 412
TestHelpers.AssertThrows<CloudException>(
() => testContext.DnsClient.RecordSets.CreateOrUpdate(
testContext.ResourceGroup.Name,
testContext.ZoneName,
testContext.RecordSetName,
RecordType.CNAME,
ifMatch: "somegibberish",
ifNoneMatch: null,
parameters: updateParameters),
exceptionAsserts: ex => ex.Body.Code == "PreconditionFailed");
// expect Precondition Failed 412
TestHelpers.AssertThrows<CloudException>(
() => testContext.DnsClient.RecordSets.Delete(
testContext.ResourceGroup.Name,
testContext.ZoneName,
testContext.RecordSetName,
RecordType.CNAME,
ifMatch: "somegibberish"),
exceptionAsserts: ex => ex.Body.Code == "PreconditionFailed");
testContext.DnsClient.RecordSets.Delete(
testContext.ResourceGroup.Name,
testContext.ZoneName,
testContext.RecordSetName,
RecordType.CNAME,
ifMatch: null);
testContext.DnsClient.Zones.Delete(
testContext.ResourceGroup.Name,
testContext.ZoneName,
ifMatch: null);
}
}
}
private void RecordSetCreateGet(
RecordType recordType,
Action<RecordSet> setRecordsAction,
bool isPrivateZoneEnabled,
[System.Runtime.CompilerServices.CallerMemberName] string methodName
= "testframework_failed")
{
using (
MockContext context = MockContext.Start(
this.GetType(),
methodName))
{
var testContexts = isPrivateZoneEnabled
? SetupSingleRecordSetTestContexts(context)
: new [] { SetupSingleRecordSetTestForPublicZone(context) };
foreach (var testContext in testContexts)
{
var createParameters = testContext.TestRecordSkeleton;
setRecordsAction(createParameters);
var createResponse = testContext.DnsClient.RecordSets
.CreateOrUpdate(
testContext.ResourceGroup.Name,
testContext.ZoneName,
testContext.RecordSetName,
recordType,
ifMatch: null,
ifNoneMatch: null,
parameters: createParameters);
Assert.True(
TestHelpers.AreEqual(
createParameters,
createResponse,
ignoreEtag: true),
"Response body of Create does not match expectations");
Assert.False(string.IsNullOrWhiteSpace(createResponse.Etag));
var getresponse = testContext.DnsClient.RecordSets.Get(
testContext.ResourceGroup.Name,
testContext.ZoneName,
testContext.RecordSetName,
recordType);
Assert.True(
TestHelpers.AreEqual(
createResponse,
getresponse,
ignoreEtag: false),
"Response body of Get does not match expectations");
// BUG 2364951: should work without specifying ETag
testContext.DnsClient.RecordSets.Delete(
testContext.ResourceGroup.Name,
testContext.ZoneName,
testContext.RecordSetName,
recordType,
ifMatch: null);
testContext.DnsClient.Zones.Delete(
testContext.ResourceGroup.Name,
testContext.ZoneName,
ifMatch: null);
}
}
}
private void RecordSetCreateGetAlias(
RecordType recordType,
Action<RecordSet> setRecordsAction,
[System.Runtime.CompilerServices.CallerMemberName] string methodName
= "testframework_failed")
{
using (
MockContext context = MockContext.Start(
this.GetType(),
methodName))
{
var testContext = SetupSingleRecordSetTestForPublicZone(context);
var createParameters = testContext.TestRecordSkeleton;
setRecordsAction(createParameters);
// Create referenced test record
var createResponse = testContext.DnsClient.RecordSets
.CreateOrUpdate(
testContext.ResourceGroup.Name,
testContext.ZoneName,
testContext.RecordSetName,
recordType,
ifMatch: null,
ifNoneMatch: null,
parameters: createParameters);
Assert.True(
TestHelpers.AreEqual(
createParameters,
createResponse,
ignoreEtag: true),
"Response body of Create does not match expectations");
Assert.False(string.IsNullOrWhiteSpace(createResponse.Etag));
var aliasParams = new RecordSet();
aliasParams.TTL = createParameters.TTL;
aliasParams.TargetResource = new SubResource(createResponse.Id);
var aliasResponse = testContext.DnsClient.RecordSets
.CreateOrUpdate(
testContext.ResourceGroup.Name,
testContext.ZoneName,
testContext.RecordSetName+"alias",
recordType,
ifMatch: null,
ifNoneMatch: null,
parameters: aliasParams );
Assert.True(
TestHelpers.AreEqual(
aliasParams,
aliasResponse,
ignoreEtag: true),
"Response body of Create does not match expectations");
var getresponse = testContext.DnsClient.RecordSets.Get(
testContext.ResourceGroup.Name,
testContext.ZoneName,
testContext.RecordSetName,
recordType);
Assert.True(
TestHelpers.AreEqual(
createResponse,
getresponse,
ignoreEtag: false),
"Response body of Get does not match expectations");
var reference = testContext.DnsClient.DnsResourceReference.GetByTargetResources(new[] { aliasParams.TargetResource });
Assert.Equal(1, reference.DnsResourceReferences.Count);
// BUG 2364951: should work without specifying ETag
testContext.DnsClient.RecordSets.Delete(
testContext.ResourceGroup.Name,
testContext.ZoneName,
testContext.RecordSetName,
recordType,
ifMatch: null);
testContext.DnsClient.Zones.Delete(
testContext.ResourceGroup.Name,
testContext.ZoneName,
ifMatch: null);
}
}
#region Helper methods
internal static void CreateRecordSets(
SingleRecordSetTestContext testContext,
string[] recordSetNames)
{
var createParameters1 =
testContext.GetNewTestRecordSkeleton(recordSetNames[0]);
createParameters1.TxtRecords = new List<TxtRecord>
{
new TxtRecord {Value = new[] {"text1"}.ToList()}
};
var createParameters2 =
testContext.GetNewTestRecordSkeleton(recordSetNames[1]);
createParameters2.TxtRecords = new List<TxtRecord>
{
new TxtRecord {Value = new[] {"text1"}.ToList()}
};
var createParameters3 =
testContext.GetNewTestRecordSkeleton(recordSetNames[2]);
createParameters3.AaaaRecords = new List<AaaaRecord>
{
new AaaaRecord {Ipv6Address = "123::45"}
};
testContext.DnsClient.RecordSets.CreateOrUpdate(
testContext.ResourceGroup.Name,
testContext.ZoneName,
createParameters1.Name,
RecordType.TXT,
ifMatch: null,
ifNoneMatch: null,
parameters: createParameters1);
testContext.DnsClient.RecordSets.CreateOrUpdate(
testContext.ResourceGroup.Name,
testContext.ZoneName,
createParameters2.Name,
RecordType.TXT,
ifMatch: null,
ifNoneMatch: null,
parameters: createParameters2);
testContext.DnsClient.RecordSets.CreateOrUpdate(
testContext.ResourceGroup.Name,
testContext.ZoneName,
createParameters3.Name,
RecordType.AAAA,
ifMatch: null,
ifNoneMatch: null,
parameters: createParameters3);
}
internal static void DeleteRecordSetsAndZone(
SingleRecordSetTestContext testContext,
string[] recordSetNames)
{
testContext.DnsClient.RecordSets.Delete(
testContext.ResourceGroup.Name,
testContext.ZoneName,
recordSetNames[0],
RecordType.TXT,
ifMatch: null);
testContext.DnsClient.RecordSets.Delete(
testContext.ResourceGroup.Name,
testContext.ZoneName,
recordSetNames[1],
RecordType.TXT,
ifMatch: null);
testContext.DnsClient.RecordSets.Delete(
testContext.ResourceGroup.Name,
testContext.ZoneName,
recordSetNames[2],
RecordType.AAAA,
ifMatch: null);
testContext.DnsClient.Zones.Delete(
testContext.ResourceGroup.Name,
testContext.ZoneName,
ifMatch: null);
}
#endregion
}
}
| |
//
// IFDRenderer.cs: Outputs an IFD structure into TIFF IFD bytes.
//
// Author:
// Ruben Vermeersch ([email protected])
// Mike Gemuende ([email protected])
//
// Copyright (C) 2009 Ruben Vermeersch
// Copyright (C) 2009 Mike Gemuende
//
// This library is free software; you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License version
// 2.1 as published by the Free Software Foundation.
//
// This library is distributed in the hope that it will be useful, but
// WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
// USA
//
using System;
using TagLib.IFD.Entries;
namespace TagLib.IFD
{
/// <summary>
/// This class contains all the IFD rendering code.
/// </summary>
public class IFDRenderer {
#region Private Fields
/// <summary>
/// The IFD structure that will be rendered.
/// </summary>
private readonly IFDStructure structure;
/// <summary>
/// If IFD should be encoded in BigEndian or not.
/// </summary>
private readonly bool is_bigendian;
/// <summary>
/// A <see cref="System.UInt32"/> value with the offset of the
/// current IFD. All offsets inside the IFD must be adjusted
/// according to this given offset.
/// </summary>
private readonly uint ifd_offset;
#endregion
#region Constructors
/// <summary>
/// Constructor. Will render the given IFD structure.
/// </summary>
/// <param name="is_bigendian">
/// If IFD should be encoded in BigEndian or not.
/// </param>
/// <param name="structure">
/// The IFD structure that will be rendered.
/// </param>
/// <param name="ifd_offset">
/// A <see cref="System.UInt32"/> value with the offset of the
/// current IFD. All offsets inside the IFD must be adjusted
/// according to this given offset.
/// </param>
public IFDRenderer (bool is_bigendian, IFDStructure structure, uint ifd_offset)
{
this.is_bigendian = is_bigendian;
this.structure = structure;
this.ifd_offset = ifd_offset;
}
#endregion
#region Public Methods
/// <summary>
/// Renders the current instance to a <see cref="ByteVector"/>.
/// </summary>
/// <returns>
/// A <see cref="ByteVector"/> containing the rendered IFD.
/// </returns>
public ByteVector Render ()
{
ByteVector ifd_data = new ByteVector ();
uint current_offset = ifd_offset;
var directories = structure.directories;
for (int index = 0; index < directories.Count; index++) {
ByteVector data = RenderIFD (directories [index], current_offset, index == directories.Count - 1);
current_offset += (uint) data.Count;
ifd_data.Add (data);
}
return ifd_data;
}
#endregion
#region Private Methods
/// <summary>
/// Renders the IFD to an ByteVector where the offset of the IFD
/// itself is <paramref name="ifd_offset"/> and all offsets
/// contained in the IFD are adjusted accroding it.
/// </summary>
/// <param name="directory">
/// A <see cref="IFDDirectory"/> with the directory to render.
/// </param>
/// <param name="ifd_offset">
/// A <see cref="System.UInt32"/> with the offset of the IFD
/// </param>
/// <param name="last">
/// A <see cref="System.Boolean"/> which is true, if the IFD is
/// the last one, i.e. the offset to the next IFD, which is
/// stored inside the IFD, is 0. If the value is false, the
/// offset to the next IFD is set that it starts directly after
/// the current one.
/// </param>
/// <returns>
/// A <see cref="ByteVector"/> with the rendered IFD.
/// </returns>
private ByteVector RenderIFD (IFDDirectory directory, uint ifd_offset, bool last)
{
if (directory.Count > (int)UInt16.MaxValue)
throw new Exception (String.Format ("Directory has too much entries: {0}", directory.Count));
ushort entry_count = (ushort) directory.Count;
// ifd_offset + size of entry_count + entries + next ifd offset
uint data_offset = ifd_offset + 2 + 12 * (uint) entry_count + 4;
// store the entries itself
ByteVector entry_data = new ByteVector ();
// store the data referenced by the entries
ByteVector offset_data = new ByteVector ();
entry_data.Add (ByteVector.FromUShort (entry_count, is_bigendian));
foreach (IFDEntry entry in directory.Values)
RenderEntryData (entry, entry_data, offset_data, data_offset);
if (last)
entry_data.Add ("\0\0\0\0");
else
entry_data.Add (ByteVector.FromUInt ((uint) (data_offset + offset_data.Count), is_bigendian));
if (data_offset - ifd_offset != entry_data.Count)
throw new Exception (String.Format ("Expected IFD data size was {0} but is {1}", data_offset - ifd_offset, entry_data.Count));
entry_data.Add (offset_data);
return entry_data;
}
#endregion
#region Protected Methods
/// <summary>
/// Adds the data of a single entry to <paramref name="entry_data"/>.
/// </summary>
/// <param name="entry_data">
/// A <see cref="ByteVector"/> to add the entry to.
/// </param>
/// <param name="tag">
/// A <see cref="System.UInt16"/> with the tag of the entry.
/// </param>
/// <param name="type">
/// A <see cref="System.UInt16"/> with the type of the entry.
/// </param>
/// <param name="count">
/// A <see cref="System.UInt32"/> with the data count of the entry,
/// </param>
/// <param name="offset">
/// A <see cref="System.UInt32"/> with the offset field of the entry.
/// </param>
protected void RenderEntry (ByteVector entry_data, ushort tag, ushort type, uint count, uint offset)
{
entry_data.Add (ByteVector.FromUShort (tag, is_bigendian));
entry_data.Add (ByteVector.FromUShort (type, is_bigendian));
entry_data.Add (ByteVector.FromUInt (count, is_bigendian));
entry_data.Add (ByteVector.FromUInt (offset, is_bigendian));
}
/// <summary>
/// Renders a complete entry together with the data. The entry itself
/// is stored in <paramref name="entry_data"/> and the data of the
/// entry is stored in <paramref name="offset_data"/> if it cannot be
/// stored in the offset. This method is called for every <see
/// cref="IFDEntry"/> of this IFD and can be overwritten in subclasses
/// to provide special behavior.
/// </summary>
/// <param name="entry">
/// A <see cref="IFDEntry"/> with the entry to render.
/// </param>
/// <param name="entry_data">
/// A <see cref="ByteVector"/> to add the entry to.
/// </param>
/// <param name="offset_data">
/// A <see cref="ByteVector"/> to add the entry data to if it cannot be
/// stored in the offset field.
/// </param>
/// <param name="data_offset">
/// A <see cref="System.UInt32"/> with the offset, were the data of the
/// entries starts. It is needed to adjust the offsets of the entries
/// itself.
/// </param>
protected virtual void RenderEntryData (IFDEntry entry, ByteVector entry_data, ByteVector offset_data, uint data_offset)
{
ushort tag = (ushort) entry.Tag;
uint offset = (uint) (data_offset + offset_data.Count);
ushort type;
uint count;
ByteVector data = entry.Render (is_bigendian, offset, out type, out count);
// store data in offset, if it is smaller than 4 byte
if (data.Count <= 4) {
while (data.Count < 4)
data.Add ("\0");
offset = data.ToUInt (is_bigendian);
data = null;
}
// preserve word boundary of offsets
if (data != null && data.Count % 2 != 0)
data.Add ("\0");
RenderEntry (entry_data, tag, type, count, offset);
offset_data.Add (data);
}
/// <summary>
/// Constructs a new IFD Renderer used to render a <see cref="SubIFDEntry"/>.
/// </summary>
/// <param name="is_bigendian">
/// If IFD should be encoded in BigEndian or not.
/// </param>
/// <param name="structure">
/// The IFD structure that will be rendered.
/// </param>
/// <param name="ifd_offset">
/// A <see cref="System.UInt32"/> value with the offset of the
/// current IFD. All offsets inside the IFD must be adjusted
/// according to this given offset.
/// </param>
protected virtual IFDRenderer CreateSubRenderer (bool is_bigendian, IFDStructure structure, uint ifd_offset)
{
return new IFDRenderer (is_bigendian, structure, ifd_offset);
}
#endregion
}
}
| |
#if !DXCORE3
using DevExpress.Mvvm.UI.Native;
using NUnit.Framework;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text.RegularExpressions;
using System.Threading;
namespace DevExpress.Mvvm.UI.Tests {
[TestFixture, Platform("NET")]
public class JumpActionsManagerTests : AsyncTestFixture {
string applicationID;
[SetUp]
public void SetUp() {
applicationID = Guid.NewGuid().ToString();
TestProcess.AddApplication("launcher.exe", p => {
using(JumpActionsManagerClient client = new JumpActionsManagerClient(MillisecondsTimeout, p)) {
GC.SuppressFinalize(client);
client.Run(p.CommandLineArgs.Skip(1).ToArray(), s => new TestProcess(applicationID, this, s, p.ProcessID + " launcher").Start());
}
});
}
[TearDown]
public void TearDown() {
TestProcess.ClearApplications();
JumpActionsManagerBase.ClearMemoryMappedFiles();
applicationID = null;
}
[Test]
public void RegisterJumpItem_ActivateIt_CheckActionExecuted() {
try {
TestJumpAction jumpAction = new TestJumpAction();
TestProcess.AddApplication("test.exe", p => {
using(JumpActionsManager jumpActionsManager = new JumpActionsManager(p, MillisecondsTimeout)) {
GC.SuppressFinalize(jumpActionsManager);
jumpAction.CommandId = "Run Command!";
jumpAction.Action = () => {
p.GetBreakpoint("action").Reach();
};
jumpActionsManager.BeginUpdate();
try {
jumpActionsManager.RegisterAction(jumpAction, "/DO=", () => "launcher.exe");
} finally {
jumpActionsManager.EndUpdate();
}
p.GetBreakpoint("registered").Reach();
p.DoEvents();
}
});
TestProcess process = new TestProcess(applicationID, this, new ProcessStartInfo("test.exe"), "1");
process.Start();
process.GetBreakpoint("registered").WaitAndContinue();
new TestProcess(applicationID, this, jumpAction.StartInfo, "2").Start();
process.GetBreakpoint("action").WaitAndContinue();
} finally {
TestProcess.WaitAllProcessesExit();
}
}
[Test]
public void RegisterJumpItem_CloseProgram_ActivateItem_CheckActionExecuted() {
try {
TestJumpAction jumpAction = new TestJumpAction();
TestProcess.AddApplication("test.exe", p => {
using(JumpActionsManager jumpActionsManager = new JumpActionsManager(p, MillisecondsTimeout)) {
GC.SuppressFinalize(jumpActionsManager);
jumpAction.CommandId = "Run Command!";
jumpAction.Action = () => {
p.GetBreakpoint("action").Reach();
};
jumpActionsManager.BeginUpdate();
try {
jumpActionsManager.RegisterAction(jumpAction, "/DO=", () => "launcher.exe");
} finally {
jumpActionsManager.EndUpdate();
}
p.GetBreakpoint("registered").Reach();
p.DoEvents();
}
});
TestProcess process = new TestProcess(applicationID, this, new ProcessStartInfo("test.exe"), "13");
process.Start();
process.GetBreakpoint("registered").WaitAndContinue();
process.SendCloseMessage();
process.WaitExit();
new TestProcess(applicationID, this, jumpAction.StartInfo, "14").Start();
process = TestProcess.WaitProcessStart("test.exe", this);
process.GetBreakpoint("registered").WaitAndContinue();
process.GetBreakpoint("action").WaitAndContinue();
} finally {
TestProcess.WaitAllProcessesExit();
}
}
[Test]
public void RegisterJumpItem_StartAnotherInstance_KillAnotherInstance_CloseProgram() {
try {
int actionIndex = 0;
TestJumpAction[] jumpActions = new TestJumpAction[] { new TestJumpAction(), new TestJumpAction() };
TestProcess.AddApplication("test.exe", p => {
JumpActionsManager jumpActionsManager = new JumpActionsManager(p, MillisecondsTimeout);
GC.SuppressFinalize(jumpActionsManager);
try {
TestJumpAction jumpAction = jumpActions[actionIndex++];
jumpAction.CommandId = "Run Command!";
jumpAction.Action = () => {
p.GetBreakpoint("action").Reach();
};
jumpActionsManager.BeginUpdate();
try {
jumpActionsManager.RegisterAction(jumpAction, "/DO=", () => "launcher.exe");
if(p.CommandLineArgs.Skip(1).Any())
p.EnvironmentExit();
} finally {
if(!p.DoEnvironmentExit)
jumpActionsManager.EndUpdate();
}
p.GetBreakpoint("registered").Reach();
p.DoEvents();
} finally {
if(!p.DoEnvironmentExit)
jumpActionsManager.Dispose();
}
});
TestProcess process = new TestProcess(applicationID, this, new ProcessStartInfo("test.exe"), "11");
process.Start();
process.GetBreakpoint("registered").WaitAndContinue();
TestProcess processToKill = new TestProcess(applicationID, this, new ProcessStartInfo("test.exe", "kill"), "12");
processToKill.Start();
processToKill.WaitExit();
process.SendCloseMessage();
process.WaitExit();
} finally {
TestProcess.WaitAllProcessesExit();
}
}
[Test]
public void RegisterJumpItem_KillProcess_ActivateItem_CheckActionExecuted() {
try {
TestJumpAction jumpAction = new TestJumpAction();
TestProcess.AddApplication("test.exe", p => {
JumpActionsManager jumpActionsManager = new JumpActionsManager(p, MillisecondsTimeout);
GC.SuppressFinalize(jumpActionsManager);
try {
jumpAction.CommandId = "Run Command!";
jumpAction.Action = () => {
p.GetBreakpoint("action").Reach();
};
jumpActionsManager.BeginUpdate();
try {
jumpActionsManager.RegisterAction(jumpAction, "/DO=", () => "launcher.exe");
} finally {
jumpActionsManager.EndUpdate();
}
p.GetBreakpoint("registered").Reach();
if(!p.CommandLineArgs.Skip(1).Any())
p.EnvironmentExit();
p.DoEvents();
} finally {
if(!p.DoEnvironmentExit)
jumpActionsManager.Dispose();
}
});
TestProcess process = new TestProcess(applicationID, this, new ProcessStartInfo("test.exe"), "9");
process.Start();
process.GetBreakpoint("registered").WaitAndContinue();
process.WaitExit();
new TestProcess(applicationID, this, jumpAction.StartInfo, "10").Start();
process = TestProcess.WaitProcessStart("test.exe", this);
process.GetBreakpoint("registered").WaitAndContinue();
process.GetBreakpoint("action").WaitAndContinue();
} finally {
TestProcess.WaitAllProcessesExit();
}
}
[Test]
public void StartTwoInstances_RegisterJumpItem_KillFirstInstance_ActivateItem_CheckActionExecuted() {
try {
int actionIndex = 0;
TestJumpAction[] jumpActions = new TestJumpAction[] { new TestJumpAction(), new TestJumpAction() };
TestProcess.AddApplication("test.exe", p => {
JumpActionsManager jumpActionsManager = new JumpActionsManager(p, MillisecondsTimeout);
GC.SuppressFinalize(jumpActionsManager);
try {
TestJumpAction jumpAction = jumpActions[actionIndex++];
jumpAction.CommandId = "Run Command!";
jumpAction.Action = () => {
p.GetBreakpoint("action").Reach();
};
jumpActionsManager.BeginUpdate();
try {
jumpActionsManager.RegisterAction(jumpAction, "/DO=", () => "launcher.exe");
} finally {
jumpActionsManager.EndUpdate();
}
p.GetBreakpoint("registered").Reach();
if(!p.CommandLineArgs.Skip(1).Any())
p.EnvironmentExit();
p.DoEvents();
} finally {
if(!p.DoEnvironmentExit)
jumpActionsManager.Dispose();
}
});
TestProcess process1 = new TestProcess(applicationID, this, new ProcessStartInfo("test.exe"), "3");
TestProcess process2 = new TestProcess(applicationID, this, new ProcessStartInfo("test.exe", "do_not_kill"), "4");
process1.Start();
process2.Start();
process1.GetBreakpoint("registered").Wait();
process2.GetBreakpoint("registered").Wait();
process1.GetBreakpoint("registered").Continue();
process2.GetBreakpoint("registered").Continue();
process1.WaitExit();
new TestProcess(applicationID, this, jumpActions[1].StartInfo, "8").Start();
process2.GetBreakpoint("action").WaitAndContinue();
process2.SendCloseMessage();
process2.WaitExit();
} finally {
TestProcess.WaitAllProcessesExit();
}
}
[Test]
public void StartTwoInstances_RegisterJumpItem_KillSecondInstance_ActivateItem_CheckActionExecuted() {
try {
int actionIndex = 0;
TestJumpAction[] jumpActions = new TestJumpAction[] { new TestJumpAction(), new TestJumpAction() };
TestProcess.AddApplication("test.exe", p => {
JumpActionsManager jumpActionsManager = new JumpActionsManager(p, MillisecondsTimeout);
GC.SuppressFinalize(jumpActionsManager);
try {
TestJumpAction jumpAction = jumpActions[actionIndex++];
jumpAction.CommandId = "Run Command!";
jumpAction.Action = () => {
p.GetBreakpoint("action").Reach();
};
jumpActionsManager.BeginUpdate();
try {
jumpActionsManager.RegisterAction(jumpAction, "/DO=", () => "launcher.exe");
} finally {
jumpActionsManager.EndUpdate();
}
p.GetBreakpoint("registered").Reach();
if(!p.CommandLineArgs.Skip(1).Any())
p.EnvironmentExit();
p.DoEvents();
} finally {
if(!p.DoEnvironmentExit)
jumpActionsManager.Dispose();
}
});
TestProcess process1 = new TestProcess(applicationID, this, new ProcessStartInfo("test.exe", "do_not_kill"), "5");
TestProcess process2 = new TestProcess(applicationID, this, new ProcessStartInfo("test.exe"), "6");
process1.Start();
process2.Start();
process1.GetBreakpoint("registered").Wait();
process2.GetBreakpoint("registered").Wait();
process1.GetBreakpoint("registered").Continue();
process2.GetBreakpoint("registered").Continue();
process2.WaitExit();
new TestProcess(applicationID, this, jumpActions[0].StartInfo, "7").Start();
process1.GetBreakpoint("action").WaitAndContinue();
process1.SendCloseMessage();
process1.WaitExit();
} finally {
TestProcess.WaitAllProcessesExit();
}
}
}
public class TestJumpAction : IJumpAction {
public string CommandId { get; set; }
public string ApplicationPath { get; set; }
public string Arguments { get; set; }
public string WorkingDirectory { get; set; }
public ProcessStartInfo StartInfo { get; private set; }
public Action Action { get; set; }
public void SetStartInfo(string applicationPath, string arguments) {
StartInfo = new ProcessStartInfo(applicationPath, arguments);
}
public void Execute() {
if(Action != null)
Action();
}
}
public class TestBreakpointNotReachedException : Exception {
public TestBreakpointNotReachedException(string message, Exception innerException) : base(message, innerException) { }
}
public class TestBreakpoint : AsyncTestObjectBase {
readonly ManualResetEvent e1 = new ManualResetEvent(false);
readonly ManualResetEvent e2 = new ManualResetEvent(false);
readonly TestProcess process;
readonly string name;
public TestBreakpoint(string name, AsyncTestFixture fixture, TestProcess process)
: base(fixture) {
this.name = name;
this.process = process;
}
public void Reach() {
e1.Set();
Fixture.WaitOne(e2);
}
public void Wait() {
try {
try {
Fixture.WaitOne(e1);
} catch(TimeoutException e) {
throw new TestBreakpointNotReachedException(name, e);
} finally {
try {
process.Check();
} catch(Exception f) {
throw new Exception(string.Format("Exception in TestBreakpoint.Wait() ({0})", name), f);
}
}
} catch {
process.SendCloseMessage();
throw;
}
}
public void Continue() {
e2.Set();
}
public void WaitAndContinue() {
Wait();
Continue();
}
}
public class TestProcess : AsyncTestObjectBase, ICurrentProcess {
static readonly Dictionary<string, Action<TestProcess>> registeredApplications = new Dictionary<string, Action<TestProcess>>();
public static void ClearApplications() {
registeredApplications.Clear();
}
public static void AddApplication(string fileName, Action<TestProcess> entryPoint) {
registeredApplications.Add(fileName, entryPoint);
}
class TestProcessExitException : Exception { }
readonly ProcessStartInfo startInfo;
readonly Action<TestProcess> entryPoint;
readonly ConcurrentDictionary<string, TestBreakpoint> breakpoints;
Exception exception;
ManualResetEvent waitProcess;
volatile bool stopMainLoop = false;
static Dictionary<TestProcess, bool> runningProcesses = new Dictionary<TestProcess, bool>();
static Dictionary<string, Tuple<AutoResetEvent, AutoResetEvent>> waitStart = new Dictionary<string, Tuple<AutoResetEvent, AutoResetEvent>>();
Thread thread = null;
readonly string applicationID;
public TestProcess(string applicationID, AsyncTestFixture fixture, ProcessStartInfo startInfo, string processID)
: base(fixture) {
this.applicationID = applicationID;
ProcessID = processID;
this.startInfo = startInfo;
this.entryPoint = registeredApplications[startInfo.FileName];
breakpoints = new ConcurrentDictionary<string, TestBreakpoint>();
}
public TestBreakpoint GetBreakpoint(string name) { return breakpoints.GetOrAdd(name, s => new TestBreakpoint(name, Fixture, this)); }
public string ExecutablePath { get { return this.startInfo.FileName; } }
public string ProcessID { get; private set; }
public IEnumerable<string> CommandLineArgs {
get {
yield return startInfo.FileName;
foreach(string arg in Regex.Matches(startInfo.Arguments, @"[^ ""]+|""[^""]*""").Cast<Match>().Select(m => m.Value).ToArray()) {
if(arg.Length >= 2 && arg[0] == '\"' && arg[arg.Length - 1] == '\"')
yield return arg.Substring(1, arg.Length - 2);
else
yield return arg;
}
}
}
public bool DoEnvironmentExit { get; private set; }
public void EnvironmentExit() {
DoEnvironmentExit = true;
throw new TestProcessExitException();
}
public static TestProcess WaitProcessStart(string applicationName, AsyncTestFixture fixture) {
AutoResetEvent wait, cont;
lock(runningProcesses) {
TestProcess runningProcess = runningProcesses.Keys.Where(p => string.Equals(p.startInfo.FileName, applicationName, StringComparison.Ordinal)).FirstOrDefault();
if(runningProcess != null) return runningProcess;
wait = new AutoResetEvent(false);
cont = new AutoResetEvent(false);
waitStart.Add(applicationName, new Tuple<AutoResetEvent, AutoResetEvent>(wait, cont));
}
fixture.WaitOne(wait);
TestProcess process = runningProcesses.Keys.Where(p => string.Equals(p.startInfo.FileName, applicationName, StringComparison.Ordinal)).Single();
cont.Set();
return process;
}
public static void WaitAllProcessesExit() {
try {
List<TestProcess> runningProcessesList;
lock(runningProcesses) {
runningProcessesList = new List<TestProcess>(runningProcesses.Keys);
}
foreach(TestProcess process in runningProcessesList)
process.SendCloseMessage();
foreach(TestProcess process in runningProcessesList)
process.WaitExit();
Assert.AreEqual(0, runningProcesses.Count);
} finally {
List<TestProcess> runningProcessesList;
lock(runningProcesses) {
runningProcessesList = new List<TestProcess>(runningProcesses.Keys);
}
foreach(TestProcess process in runningProcessesList) {
if(process.thread != null)
process.thread.Abort();
}
}
}
public void Start() {
lock(runningProcesses) {
if(thread != null)
throw new InvalidOperationException();
waitProcess = new ManualResetEvent(false);
thread = new Thread(() => {
try {
entryPoint(this);
} catch(TestProcessExitException) {
JumpActionsManagerBase.EmulateProcessKill(this);
} catch(Exception e) {
exception = e;
} finally {
lock(runningProcesses)
runningProcesses.Remove(this);
waitProcess.Set();
}
});
thread.Start();
runningProcesses.Add(this, true);
Tuple<AutoResetEvent, AutoResetEvent> onStart;
if(waitStart.TryGetValue(startInfo.FileName, out onStart)) {
onStart.Item1.Set();
Fixture.WaitOne(onStart.Item2);
waitStart.Remove(startInfo.FileName);
}
}
}
public void Check() {
if(exception != null)
throw new AggregateException(exception.Message, exception);
}
public void SendCloseMessage() {
stopMainLoop = true;
}
public void WaitExit() {
if(waitProcess != null)
Fixture.WaitOne(waitProcess);
Check();
}
public void DoEvents() {
DateTime start = DateTime.Now;
while(Fixture.TestTimeout > DateTime.Now - start) {
if(stopMainLoop) return;
DispatcherHelper.DoEvents();
}
throw new Exception(string.Format("Test process timeout ({0})", ProcessID));
}
string ICurrentProcess.ApplicationId { get { return applicationID + Uri.EscapeDataString(ExecutablePath); } }
}
}
#endif
| |
namespace Microsoft.Protocols.TestSuites.MS_WWSP
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Xml;
using System.Xml.Schema;
using Microsoft.Protocols.TestSuites.Common;
using Microsoft.Protocols.TestTools;
/// <summary>
/// This class contains common help methods.
/// </summary>
public static class AdapterHelper
{
#region Variables
/// <summary>
/// Used to read configuration property from PTF configuration and capture requirements.
/// </summary>
private static ITestSite site;
#endregion Variables
#region Adapter help methods
/// <summary>
/// Initialize object of "Site".
/// </summary>
/// <param name="currentSite">An object provides logging, assertions, and SUT adapters for test code onto its execution context.</param>
public static void Initialize(ITestSite currentSite)
{
site = currentSite;
}
/// <summary>
/// Get attribute value from response data of an operation.
/// </summary>
/// <param name="attributeName">The specified attribute name.</param>
/// <param name="xmlNode">The response data of an operation.</param>
/// <returns>The attribute value.</returns>
public static string GetAttributeValueFromXml(string attributeName, XmlNode xmlNode)
{
string attributeValue = null;
if (xmlNode.Name.Equals("ToDoData"))
{
try
{
// The structure of this statement is decided by the template data structure.
attributeValue = xmlNode.FirstChild.FirstChild.FirstChild.Attributes.GetNamedItem(attributeName).Value;
}
catch
{
// If can't find the expected XML node, will be caught here and set the value to null.
attributeValue = null;
}
}
else
{
try
{
attributeValue = xmlNode.Attributes.GetNamedItem(attributeName).Value;
}
catch
{
// If can't find the expected XML node, will be caught here and set the value to null.
attributeValue = null;
}
}
return attributeValue;
}
/// <summary>
/// Get the specified XML node from the specified XML fragment.
/// </summary>
/// <param name="nodeName">The name of specified XML node.</param>
/// <param name="xmlNode">The specified XML fragment.</param>
/// <returns>The result of the research.</returns>
public static XmlNode GetNodeFromXML(string nodeName, XmlNode xmlNode)
{
XmlNamespaceManager nsmgr = new XmlNamespaceManager(xmlNode.OwnerDocument.NameTable);
nsmgr.AddNamespace("wf", "http://schemas.microsoft.com/sharepoint/soap/workflow/");
XmlNode elementNode = xmlNode.SelectSingleNode("//wf:" + nodeName, nsmgr);
site.Assert.IsNotNull(elementNode, "The element Node should not be null.");
return elementNode;
}
/// <summary>
/// The method is used to verify whether specified elementName is existed in the specified lastRawXml.
/// </summary>
/// <param name="xmlElement">The XML element.</param>
/// <param name="elementName">The element name which need to check whether it is existed.</param>
/// <returns>If the XML response has contain element, true means include, otherwise false.</returns>
public static bool HasElement(XmlElement xmlElement, string elementName)
{
// Verify whether elementName is existed.
// If server response XML contains elementName, true will be returned. otherwise false will be returned.
if (xmlElement.GetElementsByTagName(elementName).Count > 0)
{
return true;
}
else
{
return false;
}
}
/// <summary>
/// Verify the schema definition for the workflow association data, if there is error in validation, method will throw an XmlSchemaValidationException.
/// </summary>
/// <param name="workFlowAssociationData">A parameter represents the data of the association</param>
public static void VerifyWorkflowAssociationSchema(XmlNode workFlowAssociationData)
{
if (null == workFlowAssociationData || string.IsNullOrEmpty(workFlowAssociationData.OuterXml))
{
throw new ArgumentException("The [workFlowAssociationData] parameter should be have instance and contain valid OuterXml");
}
// Validate the WorkFlowAssociation schema for TemplateData.WorkflowTemplates.WorkflowTemplate.AssocationData.
bool enableworkFlowAssociationValidation = Common.GetConfigurationPropertyValue<bool>("ValidateWorkFlowAssociation", site);
string contents = string.Empty;
if (enableworkFlowAssociationValidation)
{
// WorkFlowAssociationData element might contain an inner text which is the association content or contain a child element which includes the association content.
if (!workFlowAssociationData.HasChildNodes)
{
contents = workFlowAssociationData.InnerText;
}
else
{
contents = workFlowAssociationData.FirstChild.InnerText;
}
if (!string.IsNullOrEmpty(contents))
{
List<string> schemaDefinitions = LoadWorkflowAssociationSchemas();
// Get schema definitions from xsd file.
ValidationResult validationResult = XmlValidator.ValidateXml(schemaDefinitions, contents);
// If have validation error, throw new validation exception
if (validationResult != ValidationResult.Success)
{
throw new XmlSchemaValidationException(
string.Format(
"There are schema validation errors or warnings when validating the Workflow association data in response of GetTemplatesForItem operation, the result is {0}",
XmlValidator.GenerateValidationResult()));
}
}
}
}
/// <summary>
/// A method used to check if the specified value is zero or more combination of the bitmasks.
/// </summary>
/// <param name="value">A parameter represents the value which is used to check.</param>
/// <param name="bitMasks">A parameter represents the array which is used limit the value specified in value parameter</param>
/// <returns>Returns true indicating the value is valid.</returns>
public static bool IsValueValid(long value, long[] bitMasks)
{
// Retrieve all the combinations(more bitmasks).
List<long> combinations = GetCombinationsFromBitMasks(bitMasks);
// Add zero to the possible values.
combinations.Add(0);
return combinations.Contains(value);
}
/// <summary>
/// Get the WorkFlow Template Item By specified Name
/// </summary>
/// <param name="templateName">A parameter represents the template name which will be used to find out the template item</param>
/// <param name="templateData">A parameter represents response of GetTemplatesForItem operation.</param>
/// <returns>A return represents the template item data.</returns>
public static TemplateDataWorkflowTemplate GetWorkFlowTemplateItemByName(string templateName, TemplateData templateData)
{
if (string.IsNullOrEmpty(templateName) || null == templateData)
{
string errMsg = string.Format(
"All Parameters should not be null or empty: templateName[{0}] getTemplatesForItemResult[{1}]",
string.IsNullOrEmpty(templateName) ? "NullOrEmpty" : "Valid",
null == templateData ? "Null" : "Valid");
throw new ArgumentException(errMsg);
}
if (null == templateData.WorkflowTemplates)
{
site.Assert.Fail("Could not get the valid TemplateData from the response of GetTemplatesForItem operation.");
}
TemplateDataWorkflowTemplate[] templates = templateData.WorkflowTemplates;
var expectedTemplateItems = from templateItem in templates
where templateItem.Name.Equals(templateName, StringComparison.OrdinalIgnoreCase)
select templateItem;
TemplateDataWorkflowTemplate matchTemplateItem = null;
int itemsCounter = expectedTemplateItems.Count();
if (1 < itemsCounter)
{
site.Assert.Fail("The response of GetTemplatesForItem operation should contain only one matched TemplateData item.");
}
else if (0 == itemsCounter)
{
return matchTemplateItem;
}
else
{
matchTemplateItem = expectedTemplateItems.ElementAt(0);
}
return matchTemplateItem;
}
/// <summary>
/// Get the association data from specified templateItem in response of GetTemplatesForItem operation.
/// </summary>
/// <param name="templateName">A parameter represents the template name which will be used to find out the template item</param>
/// <param name="templateData">A parameter represents response of GetTemplatesForItem operation which contains the association data.</param>
/// <returns>A return represents the association data.</returns>
public static XmlNode GetAssociationDataFromTemplateItem(string templateName, TemplateData templateData)
{
TemplateDataWorkflowTemplate currentWorkflowTemplateItem = GetWorkFlowTemplateItemByName(templateName, templateData);
if (null == currentWorkflowTemplateItem)
{
site.Assert.Fail(
"The response of getTemplatesForItem operation should contain template item with expected name[{0}]",
templateName);
}
return currentWorkflowTemplateItem.AssociationData;
}
#endregion Adapter help methods
#region Extend methods
/// <summary>
/// It is extend method and used to compare fields' value between two instance of ClaimReleaseTaskResponseClaimReleaseTaskResult type.
/// </summary>
/// <param name="currentclaimResultInstance">A parameter represents the current instance of ClaimReleaseTaskResponseClaimReleaseTaskResult type.</param>
/// <param name="targetclaimResultInstance">A parameter represents the target instance of ClaimReleaseTaskResponseClaimReleaseTaskResult type which will be compared.</param>
/// <returns>Return true indicating the current claimResultInstance is equal to target claimResultInstance.</returns>
public static bool AreEquals(this ClaimReleaseTaskResponseClaimReleaseTaskResult currentclaimResultInstance, ClaimReleaseTaskResponseClaimReleaseTaskResult targetclaimResultInstance)
{
if (null == targetclaimResultInstance)
{
return false;
}
ClaimReleaseTaskResponseClaimReleaseTaskResultTaskData currentTaskData = currentclaimResultInstance.TaskData;
ClaimReleaseTaskResponseClaimReleaseTaskResultTaskData targetTaskData = targetclaimResultInstance.TaskData;
bool compareResult = string.Equals(currentTaskData.AssignedTo, targetTaskData.AssignedTo, StringComparison.OrdinalIgnoreCase);
compareResult = compareResult && int.Equals(currentTaskData.ItemId, targetTaskData.ItemId);
compareResult = compareResult && Guid.Equals(currentTaskData.ListId, targetTaskData.ListId);
compareResult = compareResult && string.Equals(currentTaskData.TaskGroup, targetTaskData.TaskGroup);
return compareResult;
}
#endregion Extend methods
#region private methods
/// <summary>
/// Load the workflow Association Schema definitions
/// </summary>
/// <returns>A return represents the schema definitions of workflow association</returns>
private static List<string> LoadWorkflowAssociationSchemas()
{
string workflowAssociationSchemaFile = Common.GetConfigurationPropertyValue("WorkFlowAssociationXsdFile", site);
if (string.IsNullOrEmpty(workflowAssociationSchemaFile))
{
throw new Exception("The workflowAssociationSchemaFile property value should not be empty when enable the Association data schema validation.");
}
// Process the workflowAssociation SchemaFile for different SUT in Microsoft Products
if (workflowAssociationSchemaFile.IndexOf(@"[SUTVersionShortName]", StringComparison.OrdinalIgnoreCase) > 0)
{
workflowAssociationSchemaFile = workflowAssociationSchemaFile.ToLower();
string expectedSutPlaceHolderValue = string.Empty;
string currentVersion = Common.GetConfigurationPropertyValue("SUTVersion", site);
if (currentVersion.Equals("SharePointServer2007", StringComparison.OrdinalIgnoreCase))
{
expectedSutPlaceHolderValue = "2007";
}
else if (currentVersion.Equals("SharePointServer2010", StringComparison.OrdinalIgnoreCase))
{
expectedSutPlaceHolderValue = "2010";
}
else if (currentVersion.Equals("SharePointServer2013", StringComparison.OrdinalIgnoreCase))
{
expectedSutPlaceHolderValue = "2013";
}
else if (currentVersion.Equals("SharePointServer2016", StringComparison.OrdinalIgnoreCase))
{
expectedSutPlaceHolderValue = "2016";
}
else if (currentVersion.Equals("SharePointServer2019", StringComparison.OrdinalIgnoreCase))
{
expectedSutPlaceHolderValue = "2019";
}
else if (currentVersion.Equals("SharePointServerSubscriptionEditionPreview", StringComparison.OrdinalIgnoreCase))
{
expectedSutPlaceHolderValue = "SubscriptionEditionPreview";
}
else
{
throw new Exception("Could Not Generate correct workflowAssociation Schema File name.");
}
workflowAssociationSchemaFile = workflowAssociationSchemaFile.Replace("[SUTVersionShortName]".ToLower(), expectedSutPlaceHolderValue);
}
#region Process multiple schema definitions in one file.
XmlDocument doc = new XmlDocument();
doc.Load(workflowAssociationSchemaFile);
XmlElement rootElement = doc.DocumentElement;
List<string> schemaDefinitions = new List<string>();
// if it is single "Schema definition" in this file.
if (rootElement.LocalName.Equals("schema", StringComparison.OrdinalIgnoreCase))
{
schemaDefinitions.Add(rootElement.OuterXml);
return schemaDefinitions;
}
// multiple "Schema definitions" in this file, and test suite will use "SchemaXsds" xml element to contain multiple definitions.
if (!rootElement.LocalName.Equals("SchemaXsds", StringComparison.OrdinalIgnoreCase))
{
throw new Exception("The workflow association schema definition file's root element should be [SchemaXsds] or [schema].");
}
if (!rootElement.HasChildNodes)
{
throw new Exception("The workflow association schema definition file should contain at least one schema definition under [SchemaXsds] element.");
}
var validSchemaDefinitionNode = from XmlNode schemaItem in rootElement.ChildNodes
where schemaItem.LocalName.Equals("schema", StringComparison.OrdinalIgnoreCase)
select schemaItem;
foreach (XmlNode schemadefinition in validSchemaDefinitionNode)
{
schemaDefinitions.Add(schemadefinition.OuterXml);
}
return schemaDefinitions;
#endregion
}
/// <summary>
/// Get all the combinations of the bitmasks.
/// </summary>
/// <param name="bitMasks">The bitmask array.</param>
/// <returns>All the combinations of the bitmasks.</returns>
private static List<long> GetCombinationsFromBitMasks(long[] bitMasks)
{
List<long> combinations = new List<long>();
int countOfBitMasks = bitMasks.Length;
for (int index = 1; index <= countOfBitMasks; ++index)
{
Combination(bitMasks, index, ref combinations);
}
return combinations;
}
/// <summary>
/// Get the combinations from the specified count of bitmasks.
/// </summary>
/// <param name="bitMasks">The bitmask array.</param>
/// <param name="countCom">The count of bitmasks the combinations contains.</param>
/// <param name="combinations">The combination values.</param>
/// <returns>"true": success; "false": failed.</returns>
private static bool Combination(long[] bitMasks, int countCom, ref List<long> combinations)
{
int length = bitMasks.Length;
if (length < countCom)
{
return false;
}
long[] array = new long[length];
long indexFirst = 0;
// Initialize array.
for (indexFirst = 0; indexFirst < length; indexFirst++)
{
array[indexFirst] = 0;
}
// Calculate possible bitmask values and add them into "combinations".
long indexSecond = 0;
while (indexSecond >= 0)
{
if (array[indexSecond] < (length - countCom + indexSecond + 1))
{
indexFirst = indexSecond;
array[indexSecond]++;
}
else
{
indexSecond--;
continue;
}
for (; indexFirst < countCom - 1; indexFirst++)
{
array[indexFirst + 1] = array[indexFirst] + 1;
}
if (indexFirst == countCom - 1)
{
long result = 0L;
for (int idxBit = 0; idxBit < countCom; ++idxBit)
{
result += bitMasks[array[idxBit] - 1];
}
combinations.Add(result);
}
indexSecond = indexFirst;
}
return true;
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.ComponentModel.Composition;
using System.Diagnostics;
using System.Linq;
using NuGet.VisualStudio.Resources;
namespace NuGet.VisualStudio
{
[PartCreationPolicy(CreationPolicy.Shared)]
[Export(typeof(IVsPackageSourceProvider))]
[Export(typeof(IPackageSourceProvider))]
public class VsPackageSourceProvider : IVsPackageSourceProvider
{
private static readonly string OfficialFeedName = VsResources.OfficialSourceName;
private static readonly PackageSource NuGetDefaultSource = new PackageSource(NuGetConstants.DefaultFeedUrl, OfficialFeedName);
private static readonly PackageSource Windows8Source = new PackageSource(NuGetConstants.VSExpressForWindows8FeedUrl,
VsResources.VisualStudioExpressForWindows8SourceName,
isEnabled: true,
isOfficial: true);
private static readonly Dictionary<PackageSource, PackageSource> _feedsToMigrate = new Dictionary<PackageSource, PackageSource>
{
{ new PackageSource(NuGetConstants.V1FeedUrl, OfficialFeedName), NuGetDefaultSource },
{ new PackageSource(NuGetConstants.V2LegacyFeedUrl, OfficialFeedName), NuGetDefaultSource },
};
internal const string ActivePackageSourceSectionName = "activePackageSource";
private readonly IPackageSourceProvider _packageSourceProvider;
private readonly IVsShellInfo _vsShellInfo;
private readonly ISettings _settings;
private readonly ISolutionManager _solutionManager;
private bool _initialized;
private List<PackageSource> _packageSources;
private PackageSource _activePackageSource;
[ImportingConstructor]
public VsPackageSourceProvider(
ISettings settings,
IVsShellInfo vsShellInfo,
ISolutionManager solutionManager) :
this(settings, new PackageSourceProvider(settings, new[] { NuGetDefaultSource }, _feedsToMigrate), vsShellInfo, solutionManager)
{
}
public VsPackageSourceProvider(
ISettings settings,
IVsShellInfo vsShellInfo) :
this(settings, vsShellInfo, null)
{
}
internal VsPackageSourceProvider(
ISettings settings,
IPackageSourceProvider packageSourceProvider,
IVsShellInfo vsShellInfo)
:this(settings, packageSourceProvider, vsShellInfo, null)
{
}
private VsPackageSourceProvider(
ISettings settings,
IPackageSourceProvider packageSourceProvider,
IVsShellInfo vsShellInfo,
ISolutionManager solutionManager)
{
if (settings == null)
{
throw new ArgumentNullException("settings");
}
if (packageSourceProvider == null)
{
throw new ArgumentNullException("packageSourceProvider");
}
if (vsShellInfo == null)
{
throw new ArgumentNullException("vsShellInfo");
}
_packageSourceProvider = packageSourceProvider;
_solutionManager = solutionManager;
_settings = settings;
_vsShellInfo = vsShellInfo;
if (null != _solutionManager)
{
_solutionManager.SolutionClosed += OnSolutionOpenedOrClosed;
_solutionManager.SolutionOpened += OnSolutionOpenedOrClosed;
}
}
private void OnSolutionOpenedOrClosed(object sender, EventArgs e)
{
_initialized = false;
}
public PackageSource ActivePackageSource
{
get
{
EnsureInitialized();
return _activePackageSource;
}
set
{
EnsureInitialized();
if (value != null &&
!IsAggregateSource(value) &&
!_packageSources.Contains(value) &&
!value.Name.Equals(OfficialFeedName, StringComparison.CurrentCultureIgnoreCase))
{
throw new ArgumentException(VsResources.PackageSource_Invalid, "value");
}
_activePackageSource = value;
PersistActivePackageSource(_settings, _activePackageSource);
}
}
internal static IEnumerable<PackageSource> DefaultSources
{
get { return new[] { NuGetDefaultSource }; }
}
internal static Dictionary<PackageSource, PackageSource> FeedsToMigrate
{
get { return _feedsToMigrate; }
}
public IEnumerable<PackageSource> LoadPackageSources()
{
EnsureInitialized();
// assert that we are not returning aggregate source
Debug.Assert(_packageSources == null || !_packageSources.Any(IsAggregateSource));
return _packageSources;
}
public void SavePackageSources(IEnumerable<PackageSource> sources)
{
if (sources == null)
{
throw new ArgumentNullException("sources");
}
EnsureInitialized();
Debug.Assert(!sources.Any(IsAggregateSource));
ActivePackageSource = null;
_packageSources.Clear();
_packageSources.AddRange(sources);
PersistPackageSources(_packageSourceProvider, _vsShellInfo, _packageSources);
}
public void DisablePackageSource(PackageSource source)
{
// There's no scenario for this method to get called, so do nothing here.
Debug.Fail("This method shouldn't get called.");
}
public bool IsPackageSourceEnabled(PackageSource source)
{
EnsureInitialized();
var sourceInUse = _packageSources.FirstOrDefault(ps => ps.Equals(source));
return sourceInUse != null && sourceInUse.IsEnabled;
}
private void EnsureInitialized()
{
while (!_initialized)
{
_initialized = true;
_packageSources = _packageSourceProvider.LoadPackageSources().ToList();
// Unlike NuGet Core, Visual Studio has the concept of an official package source.
// We find the official source, if present, and set its IsOfficial it.
var officialPackageSource = _packageSources.FirstOrDefault(IsOfficialPackageSource);
if (officialPackageSource == null)
{
// if there is no official source, add one, but make it disabled
officialPackageSource = NuGetDefaultSource.Clone();
officialPackageSource.IsEnabled = false;
_packageSources.Add(officialPackageSource);
}
officialPackageSource.IsOfficial = true;
// When running Visual Studio Express for Windows 8, we insert the curated feed at the top
if (_vsShellInfo.IsVisualStudioExpressForWindows8)
{
bool windows8SourceIsEnabled = _packageSourceProvider.IsPackageSourceEnabled(Windows8Source);
// defensive coding: make sure we don't add duplicated win8 source
_packageSources.RemoveAll(p => p.Equals(Windows8Source));
// Windows8Source is a static object which is meant for doing comparison only.
// To add it to the list of package sources, we make a clone of it first.
var windows8SourceClone = Windows8Source.Clone();
windows8SourceClone.IsEnabled = windows8SourceIsEnabled;
_packageSources.Insert(0, windows8SourceClone);
}
InitializeActivePackageSource();
}
}
private void InitializeActivePackageSource()
{
_activePackageSource = DeserializeActivePackageSource(_settings, _vsShellInfo);
PackageSource migratedActiveSource;
bool activeSourceChanged = false;
if (_activePackageSource == null)
{
// If there are no sources, pick the first source that's enabled.
activeSourceChanged = true;
_activePackageSource = NuGetDefaultSource;
}
else if (_feedsToMigrate.TryGetValue(_activePackageSource, out migratedActiveSource))
{
// Check if we need to migrate the active source.
activeSourceChanged = true;
_activePackageSource = migratedActiveSource;
}
if (activeSourceChanged)
{
PersistActivePackageSource(_settings, _activePackageSource);
}
}
private static void PersistActivePackageSource(ISettings settings, PackageSource activePackageSource)
{
settings.DeleteSection(ActivePackageSourceSectionName);
if (activePackageSource != null)
{
settings.SetValue(ActivePackageSourceSectionName, activePackageSource.Name, activePackageSource.Source);
}
}
private static PackageSource DeserializeActivePackageSource(
ISettings settings,
IVsShellInfo vsShellInfo)
{
var settingValues = settings.GetValues(ActivePackageSourceSectionName);
PackageSource packageSource = null;
if (settingValues != null && settingValues.Any())
{
KeyValuePair<string, string> setting = settingValues.First();
if (IsAggregateSource(setting.Key, setting.Value))
{
packageSource = AggregatePackageSource.Instance;
}
else
{
packageSource = new PackageSource(setting.Value, setting.Key);
}
}
if (packageSource != null)
{
// guard against corrupted data if the active package source is not enabled
packageSource.IsEnabled = true;
// Unlike NuGet Core, Visual Studio has the concept of an official package source.
// If the active package source is the official source, we need to set its IsOfficial it.
if (IsOfficialPackageSource(packageSource))
{
packageSource.IsOfficial = true;
}
}
return packageSource;
}
private static void PersistPackageSources(IPackageSourceProvider packageSourceProvider, IVsShellInfo vsShellInfo, List<PackageSource> packageSources)
{
bool windows8SourceIsDisabled = false;
// When running Visual Studio Express For Windows 8, we will have previously added a curated package source.
// But we don't want to persist it, so remove it from the list.
if (vsShellInfo.IsVisualStudioExpressForWindows8)
{
PackageSource windows8SourceInUse = packageSources.Find(p => p.Equals(Windows8Source));
Debug.Assert(windows8SourceInUse != null);
if (windows8SourceInUse != null)
{
packageSources = packageSources.Where(ps => !ps.Equals(Windows8Source)).ToList();
windows8SourceIsDisabled = !windows8SourceInUse.IsEnabled;
}
}
// Starting from version 1.3, we persist the package sources to the nuget.config file instead of VS registry.
// assert that we are not saving aggregate source
Debug.Assert(!packageSources.Any(p => IsAggregateSource(p.Name, p.Source)));
packageSourceProvider.SavePackageSources(packageSources);
if (windows8SourceIsDisabled)
{
packageSourceProvider.DisablePackageSource(Windows8Source);
}
}
private static bool IsAggregateSource(string name, string source)
{
PackageSource aggregate = AggregatePackageSource.Instance;
return aggregate.Name.Equals(name, StringComparison.CurrentCultureIgnoreCase) ||
aggregate.Source.Equals(source, StringComparison.InvariantCultureIgnoreCase);
}
private static bool IsAggregateSource(PackageSource packageSource)
{
return IsAggregateSource(packageSource.Name, packageSource.Source);
}
private static bool IsOfficialPackageSource(PackageSource packageSource)
{
if (packageSource == null)
{
return false;
}
return packageSource.Equals(NuGetDefaultSource);
}
}
}
| |
using System;
using System.Threading.Tasks;
using System.Collections.Generic;
using System.Numerics;
using Nethereum.Hex.HexTypes;
using Nethereum.ABI.FunctionEncoding.Attributes;
using Nethereum.Web3;
using Nethereum.RPC.Eth.DTOs;
using Nethereum.Contracts.CQS;
using Nethereum.Contracts.ContractHandlers;
using Nethereum.Contracts;
using System.Threading;
using Nethereum.ENS.BaseRegistrarImplementation.ContractDefinition;
namespace Nethereum.ENS
{
public partial class BaseRegistrarImplementationService
{
public static Task<TransactionReceipt> DeployContractAndWaitForReceiptAsync(Nethereum.Web3.Web3 web3, BaseRegistrarImplementationDeployment baseRegistrarImplementationDeployment, CancellationTokenSource cancellationTokenSource = null)
{
return web3.Eth.GetContractDeploymentHandler<BaseRegistrarImplementationDeployment>().SendRequestAndWaitForReceiptAsync(baseRegistrarImplementationDeployment, cancellationTokenSource);
}
public static Task<string> DeployContractAsync(Nethereum.Web3.Web3 web3, BaseRegistrarImplementationDeployment baseRegistrarImplementationDeployment)
{
return web3.Eth.GetContractDeploymentHandler<BaseRegistrarImplementationDeployment>().SendRequestAsync(baseRegistrarImplementationDeployment);
}
public static async Task<BaseRegistrarImplementationService> DeployContractAndGetServiceAsync(Nethereum.Web3.Web3 web3, BaseRegistrarImplementationDeployment baseRegistrarImplementationDeployment, CancellationTokenSource cancellationTokenSource = null)
{
var receipt = await DeployContractAndWaitForReceiptAsync(web3, baseRegistrarImplementationDeployment, cancellationTokenSource).ConfigureAwait(false);
return new BaseRegistrarImplementationService(web3, receipt.ContractAddress);
}
protected Nethereum.Web3.Web3 Web3 { get; }
public ContractHandler ContractHandler { get; }
public BaseRegistrarImplementationService(Nethereum.Web3.Web3 web3, string contractAddress)
{
Web3 = web3;
ContractHandler = web3.Eth.GetContractHandler(contractAddress);
}
public Task<BigInteger> GRACE_PERIODQueryAsync(GRACE_PERIODFunction gRacePeriodFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<GRACE_PERIODFunction, BigInteger>(gRacePeriodFunction, blockParameter);
}
public Task<BigInteger> GRACE_PERIODQueryAsync(BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<GRACE_PERIODFunction, BigInteger>(null, blockParameter);
}
public Task<string> AddControllerRequestAsync(AddControllerFunction addControllerFunction)
{
return ContractHandler.SendRequestAsync(addControllerFunction);
}
public Task<TransactionReceipt> AddControllerRequestAndWaitForReceiptAsync(AddControllerFunction addControllerFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(addControllerFunction, cancellationToken);
}
public Task<string> AddControllerRequestAsync(string controller)
{
var addControllerFunction = new AddControllerFunction();
addControllerFunction.Controller = controller;
return ContractHandler.SendRequestAsync(addControllerFunction);
}
public Task<TransactionReceipt> AddControllerRequestAndWaitForReceiptAsync(string controller, CancellationTokenSource cancellationToken = null)
{
var addControllerFunction = new AddControllerFunction();
addControllerFunction.Controller = controller;
return ContractHandler.SendRequestAndWaitForReceiptAsync(addControllerFunction, cancellationToken);
}
public Task<string> ApproveRequestAsync(ApproveFunction approveFunction)
{
return ContractHandler.SendRequestAsync(approveFunction);
}
public Task<TransactionReceipt> ApproveRequestAndWaitForReceiptAsync(ApproveFunction approveFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(approveFunction, cancellationToken);
}
public Task<string> ApproveRequestAsync(string to, BigInteger tokenId)
{
var approveFunction = new ApproveFunction();
approveFunction.To = to;
approveFunction.TokenId = tokenId;
return ContractHandler.SendRequestAsync(approveFunction);
}
public Task<TransactionReceipt> ApproveRequestAndWaitForReceiptAsync(string to, BigInteger tokenId, CancellationTokenSource cancellationToken = null)
{
var approveFunction = new ApproveFunction();
approveFunction.To = to;
approveFunction.TokenId = tokenId;
return ContractHandler.SendRequestAndWaitForReceiptAsync(approveFunction, cancellationToken);
}
public Task<bool> AvailableQueryAsync(AvailableFunction availableFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<AvailableFunction, bool>(availableFunction, blockParameter);
}
public Task<bool> AvailableQueryAsync(BigInteger id, BlockParameter blockParameter = null)
{
var availableFunction = new AvailableFunction();
availableFunction.Id = id;
return ContractHandler.QueryAsync<AvailableFunction, bool>(availableFunction, blockParameter);
}
public Task<BigInteger> BalanceOfQueryAsync(BalanceOfFunction balanceOfFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<BalanceOfFunction, BigInteger>(balanceOfFunction, blockParameter);
}
public Task<BigInteger> BalanceOfQueryAsync(string owner, BlockParameter blockParameter = null)
{
var balanceOfFunction = new BalanceOfFunction();
balanceOfFunction.Owner = owner;
return ContractHandler.QueryAsync<BalanceOfFunction, BigInteger>(balanceOfFunction, blockParameter);
}
public Task<byte[]> BaseNodeQueryAsync(BaseNodeFunction baseNodeFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<BaseNodeFunction, byte[]>(baseNodeFunction, blockParameter);
}
public Task<byte[]> BaseNodeQueryAsync(BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<BaseNodeFunction, byte[]>(null, blockParameter);
}
public Task<bool> ControllersQueryAsync(ControllersFunction controllersFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<ControllersFunction, bool>(controllersFunction, blockParameter);
}
public Task<bool> ControllersQueryAsync(string returnValue1, BlockParameter blockParameter = null)
{
var controllersFunction = new ControllersFunction();
controllersFunction.ReturnValue1 = returnValue1;
return ContractHandler.QueryAsync<ControllersFunction, bool>(controllersFunction, blockParameter);
}
public Task<string> EnsQueryAsync(EnsFunction ensFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<EnsFunction, string>(ensFunction, blockParameter);
}
public Task<string> EnsQueryAsync(BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<EnsFunction, string>(null, blockParameter);
}
public Task<string> GetApprovedQueryAsync(GetApprovedFunction getApprovedFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<GetApprovedFunction, string>(getApprovedFunction, blockParameter);
}
public Task<string> GetApprovedQueryAsync(BigInteger tokenId, BlockParameter blockParameter = null)
{
var getApprovedFunction = new GetApprovedFunction();
getApprovedFunction.TokenId = tokenId;
return ContractHandler.QueryAsync<GetApprovedFunction, string>(getApprovedFunction, blockParameter);
}
public Task<bool> IsApprovedForAllQueryAsync(IsApprovedForAllFunction isApprovedForAllFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<IsApprovedForAllFunction, bool>(isApprovedForAllFunction, blockParameter);
}
public Task<bool> IsApprovedForAllQueryAsync(string owner, string operatorx, BlockParameter blockParameter = null)
{
var isApprovedForAllFunction = new IsApprovedForAllFunction();
isApprovedForAllFunction.Owner = owner;
isApprovedForAllFunction.Operator = operatorx;
return ContractHandler.QueryAsync<IsApprovedForAllFunction, bool>(isApprovedForAllFunction, blockParameter);
}
public Task<bool> IsOwnerQueryAsync(IsOwnerFunction isOwnerFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<IsOwnerFunction, bool>(isOwnerFunction, blockParameter);
}
public Task<bool> IsOwnerQueryAsync(BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<IsOwnerFunction, bool>(null, blockParameter);
}
public Task<BigInteger> NameExpiresQueryAsync(NameExpiresFunction nameExpiresFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<NameExpiresFunction, BigInteger>(nameExpiresFunction, blockParameter);
}
public Task<BigInteger> NameExpiresQueryAsync(BigInteger id, BlockParameter blockParameter = null)
{
var nameExpiresFunction = new NameExpiresFunction();
nameExpiresFunction.Id = id;
return ContractHandler.QueryAsync<NameExpiresFunction, BigInteger>(nameExpiresFunction, blockParameter);
}
public Task<string> OwnerQueryAsync(OwnerFunction ownerFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<OwnerFunction, string>(ownerFunction, blockParameter);
}
public Task<string> OwnerQueryAsync(BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<OwnerFunction, string>(null, blockParameter);
}
public Task<string> OwnerOfQueryAsync(OwnerOfFunction ownerOfFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<OwnerOfFunction, string>(ownerOfFunction, blockParameter);
}
public Task<string> OwnerOfQueryAsync(BigInteger tokenId, BlockParameter blockParameter = null)
{
var ownerOfFunction = new OwnerOfFunction();
ownerOfFunction.TokenId = tokenId;
return ContractHandler.QueryAsync<OwnerOfFunction, string>(ownerOfFunction, blockParameter);
}
public Task<string> ReclaimRequestAsync(ReclaimFunction reclaimFunction)
{
return ContractHandler.SendRequestAsync(reclaimFunction);
}
public Task<TransactionReceipt> ReclaimRequestAndWaitForReceiptAsync(ReclaimFunction reclaimFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(reclaimFunction, cancellationToken);
}
public Task<string> ReclaimRequestAsync(BigInteger id, string owner)
{
var reclaimFunction = new ReclaimFunction();
reclaimFunction.Id = id;
reclaimFunction.Owner = owner;
return ContractHandler.SendRequestAsync(reclaimFunction);
}
public Task<TransactionReceipt> ReclaimRequestAndWaitForReceiptAsync(BigInteger id, string owner, CancellationTokenSource cancellationToken = null)
{
var reclaimFunction = new ReclaimFunction();
reclaimFunction.Id = id;
reclaimFunction.Owner = owner;
return ContractHandler.SendRequestAndWaitForReceiptAsync(reclaimFunction, cancellationToken);
}
public Task<string> RegisterRequestAsync(RegisterFunction registerFunction)
{
return ContractHandler.SendRequestAsync(registerFunction);
}
public Task<TransactionReceipt> RegisterRequestAndWaitForReceiptAsync(RegisterFunction registerFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(registerFunction, cancellationToken);
}
public Task<string> RegisterRequestAsync(BigInteger id, string owner, BigInteger duration)
{
var registerFunction = new RegisterFunction();
registerFunction.Id = id;
registerFunction.Owner = owner;
registerFunction.Duration = duration;
return ContractHandler.SendRequestAsync(registerFunction);
}
public Task<TransactionReceipt> RegisterRequestAndWaitForReceiptAsync(BigInteger id, string owner, BigInteger duration, CancellationTokenSource cancellationToken = null)
{
var registerFunction = new RegisterFunction();
registerFunction.Id = id;
registerFunction.Owner = owner;
registerFunction.Duration = duration;
return ContractHandler.SendRequestAndWaitForReceiptAsync(registerFunction, cancellationToken);
}
public Task<string> RegisterOnlyRequestAsync(RegisterOnlyFunction registerOnlyFunction)
{
return ContractHandler.SendRequestAsync(registerOnlyFunction);
}
public Task<TransactionReceipt> RegisterOnlyRequestAndWaitForReceiptAsync(RegisterOnlyFunction registerOnlyFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(registerOnlyFunction, cancellationToken);
}
public Task<string> RegisterOnlyRequestAsync(BigInteger id, string owner, BigInteger duration)
{
var registerOnlyFunction = new RegisterOnlyFunction();
registerOnlyFunction.Id = id;
registerOnlyFunction.Owner = owner;
registerOnlyFunction.Duration = duration;
return ContractHandler.SendRequestAsync(registerOnlyFunction);
}
public Task<TransactionReceipt> RegisterOnlyRequestAndWaitForReceiptAsync(BigInteger id, string owner, BigInteger duration, CancellationTokenSource cancellationToken = null)
{
var registerOnlyFunction = new RegisterOnlyFunction();
registerOnlyFunction.Id = id;
registerOnlyFunction.Owner = owner;
registerOnlyFunction.Duration = duration;
return ContractHandler.SendRequestAndWaitForReceiptAsync(registerOnlyFunction, cancellationToken);
}
public Task<string> RemoveControllerRequestAsync(RemoveControllerFunction removeControllerFunction)
{
return ContractHandler.SendRequestAsync(removeControllerFunction);
}
public Task<TransactionReceipt> RemoveControllerRequestAndWaitForReceiptAsync(RemoveControllerFunction removeControllerFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(removeControllerFunction, cancellationToken);
}
public Task<string> RemoveControllerRequestAsync(string controller)
{
var removeControllerFunction = new RemoveControllerFunction();
removeControllerFunction.Controller = controller;
return ContractHandler.SendRequestAsync(removeControllerFunction);
}
public Task<TransactionReceipt> RemoveControllerRequestAndWaitForReceiptAsync(string controller, CancellationTokenSource cancellationToken = null)
{
var removeControllerFunction = new RemoveControllerFunction();
removeControllerFunction.Controller = controller;
return ContractHandler.SendRequestAndWaitForReceiptAsync(removeControllerFunction, cancellationToken);
}
public Task<string> RenewRequestAsync(RenewFunction renewFunction)
{
return ContractHandler.SendRequestAsync(renewFunction);
}
public Task<TransactionReceipt> RenewRequestAndWaitForReceiptAsync(RenewFunction renewFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(renewFunction, cancellationToken);
}
public Task<string> RenewRequestAsync(BigInteger id, BigInteger duration)
{
var renewFunction = new RenewFunction();
renewFunction.Id = id;
renewFunction.Duration = duration;
return ContractHandler.SendRequestAsync(renewFunction);
}
public Task<TransactionReceipt> RenewRequestAndWaitForReceiptAsync(BigInteger id, BigInteger duration, CancellationTokenSource cancellationToken = null)
{
var renewFunction = new RenewFunction();
renewFunction.Id = id;
renewFunction.Duration = duration;
return ContractHandler.SendRequestAndWaitForReceiptAsync(renewFunction, cancellationToken);
}
public Task<string> RenounceOwnershipRequestAsync(RenounceOwnershipFunction renounceOwnershipFunction)
{
return ContractHandler.SendRequestAsync(renounceOwnershipFunction);
}
public Task<string> RenounceOwnershipRequestAsync()
{
return ContractHandler.SendRequestAsync<RenounceOwnershipFunction>();
}
public Task<TransactionReceipt> RenounceOwnershipRequestAndWaitForReceiptAsync(RenounceOwnershipFunction renounceOwnershipFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(renounceOwnershipFunction, cancellationToken);
}
public Task<TransactionReceipt> RenounceOwnershipRequestAndWaitForReceiptAsync(CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync<RenounceOwnershipFunction>(null, cancellationToken);
}
public Task<string> SafeTransferFromRequestAsync(SafeTransferFromFunction safeTransferFromFunction)
{
return ContractHandler.SendRequestAsync(safeTransferFromFunction);
}
public Task<TransactionReceipt> SafeTransferFromRequestAndWaitForReceiptAsync(SafeTransferFromFunction safeTransferFromFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(safeTransferFromFunction, cancellationToken);
}
public Task<string> SafeTransferFromRequestAsync(string from, string to, BigInteger tokenId)
{
var safeTransferFromFunction = new SafeTransferFromFunction();
safeTransferFromFunction.From = from;
safeTransferFromFunction.To = to;
safeTransferFromFunction.TokenId = tokenId;
return ContractHandler.SendRequestAsync(safeTransferFromFunction);
}
public Task<TransactionReceipt> SafeTransferFromRequestAndWaitForReceiptAsync(string from, string to, BigInteger tokenId, CancellationTokenSource cancellationToken = null)
{
var safeTransferFromFunction = new SafeTransferFromFunction();
safeTransferFromFunction.From = from;
safeTransferFromFunction.To = to;
safeTransferFromFunction.TokenId = tokenId;
return ContractHandler.SendRequestAndWaitForReceiptAsync(safeTransferFromFunction, cancellationToken);
}
public Task<string> SafeTransferFromRequestAsync(SafeTransferFromFunction2 safeTransferFromFunction)
{
return ContractHandler.SendRequestAsync(safeTransferFromFunction);
}
public Task<TransactionReceipt> SafeTransferFromRequestAndWaitForReceiptAsync(SafeTransferFromFunction2 safeTransferFromFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(safeTransferFromFunction, cancellationToken);
}
public Task<string> SafeTransferFromRequestAsync(string from, string to, BigInteger tokenId, byte[] data)
{
var safeTransferFromFunction = new SafeTransferFromFunction2();
safeTransferFromFunction.From = from;
safeTransferFromFunction.To = to;
safeTransferFromFunction.TokenId = tokenId;
safeTransferFromFunction.Data = data;
return ContractHandler.SendRequestAsync(safeTransferFromFunction);
}
public Task<TransactionReceipt> SafeTransferFromRequestAndWaitForReceiptAsync(string from, string to, BigInteger tokenId, byte[] data, CancellationTokenSource cancellationToken = null)
{
var safeTransferFromFunction = new SafeTransferFromFunction2();
safeTransferFromFunction.From = from;
safeTransferFromFunction.To = to;
safeTransferFromFunction.TokenId = tokenId;
safeTransferFromFunction.Data = data;
return ContractHandler.SendRequestAndWaitForReceiptAsync(safeTransferFromFunction, cancellationToken);
}
public Task<string> SetApprovalForAllRequestAsync(SetApprovalForAllFunction setApprovalForAllFunction)
{
return ContractHandler.SendRequestAsync(setApprovalForAllFunction);
}
public Task<TransactionReceipt> SetApprovalForAllRequestAndWaitForReceiptAsync(SetApprovalForAllFunction setApprovalForAllFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(setApprovalForAllFunction, cancellationToken);
}
public Task<string> SetApprovalForAllRequestAsync(string to, bool approved)
{
var setApprovalForAllFunction = new SetApprovalForAllFunction();
setApprovalForAllFunction.To = to;
setApprovalForAllFunction.Approved = approved;
return ContractHandler.SendRequestAsync(setApprovalForAllFunction);
}
public Task<TransactionReceipt> SetApprovalForAllRequestAndWaitForReceiptAsync(string to, bool approved, CancellationTokenSource cancellationToken = null)
{
var setApprovalForAllFunction = new SetApprovalForAllFunction();
setApprovalForAllFunction.To = to;
setApprovalForAllFunction.Approved = approved;
return ContractHandler.SendRequestAndWaitForReceiptAsync(setApprovalForAllFunction, cancellationToken);
}
public Task<string> SetResolverRequestAsync(SetResolverFunction setResolverFunction)
{
return ContractHandler.SendRequestAsync(setResolverFunction);
}
public Task<TransactionReceipt> SetResolverRequestAndWaitForReceiptAsync(SetResolverFunction setResolverFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(setResolverFunction, cancellationToken);
}
public Task<string> SetResolverRequestAsync(string resolver)
{
var setResolverFunction = new SetResolverFunction();
setResolverFunction.Resolver = resolver;
return ContractHandler.SendRequestAsync(setResolverFunction);
}
public Task<TransactionReceipt> SetResolverRequestAndWaitForReceiptAsync(string resolver, CancellationTokenSource cancellationToken = null)
{
var setResolverFunction = new SetResolverFunction();
setResolverFunction.Resolver = resolver;
return ContractHandler.SendRequestAndWaitForReceiptAsync(setResolverFunction, cancellationToken);
}
public Task<bool> SupportsInterfaceQueryAsync(SupportsInterfaceFunction supportsInterfaceFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<SupportsInterfaceFunction, bool>(supportsInterfaceFunction, blockParameter);
}
public Task<bool> SupportsInterfaceQueryAsync(byte[] interfaceID, BlockParameter blockParameter = null)
{
var supportsInterfaceFunction = new SupportsInterfaceFunction();
supportsInterfaceFunction.InterfaceID = interfaceID;
return ContractHandler.QueryAsync<SupportsInterfaceFunction, bool>(supportsInterfaceFunction, blockParameter);
}
public Task<string> TransferFromRequestAsync(TransferFromFunction transferFromFunction)
{
return ContractHandler.SendRequestAsync(transferFromFunction);
}
public Task<TransactionReceipt> TransferFromRequestAndWaitForReceiptAsync(TransferFromFunction transferFromFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(transferFromFunction, cancellationToken);
}
public Task<string> TransferFromRequestAsync(string from, string to, BigInteger tokenId)
{
var transferFromFunction = new TransferFromFunction();
transferFromFunction.From = from;
transferFromFunction.To = to;
transferFromFunction.TokenId = tokenId;
return ContractHandler.SendRequestAsync(transferFromFunction);
}
public Task<TransactionReceipt> TransferFromRequestAndWaitForReceiptAsync(string from, string to, BigInteger tokenId, CancellationTokenSource cancellationToken = null)
{
var transferFromFunction = new TransferFromFunction();
transferFromFunction.From = from;
transferFromFunction.To = to;
transferFromFunction.TokenId = tokenId;
return ContractHandler.SendRequestAndWaitForReceiptAsync(transferFromFunction, cancellationToken);
}
public Task<string> TransferOwnershipRequestAsync(TransferOwnershipFunction transferOwnershipFunction)
{
return ContractHandler.SendRequestAsync(transferOwnershipFunction);
}
public Task<TransactionReceipt> TransferOwnershipRequestAndWaitForReceiptAsync(TransferOwnershipFunction transferOwnershipFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(transferOwnershipFunction, cancellationToken);
}
public Task<string> TransferOwnershipRequestAsync(string newOwner)
{
var transferOwnershipFunction = new TransferOwnershipFunction();
transferOwnershipFunction.NewOwner = newOwner;
return ContractHandler.SendRequestAsync(transferOwnershipFunction);
}
public Task<TransactionReceipt> TransferOwnershipRequestAndWaitForReceiptAsync(string newOwner, CancellationTokenSource cancellationToken = null)
{
var transferOwnershipFunction = new TransferOwnershipFunction();
transferOwnershipFunction.NewOwner = newOwner;
return ContractHandler.SendRequestAndWaitForReceiptAsync(transferOwnershipFunction, cancellationToken);
}
}
}
| |
using UnityEngine;
using UnityEditor;
using System.Collections;
using System.Reflection;
using System.Linq;
using System.Reflection.Emit;
using System.IO;
using System.Collections.Generic;
public class CustomAssetBuilder : AssetPostprocessor
{
private static List<string> recentlyImportedTypeNames = new List<string>();
static System.Type[] EnumerateAssetTypes()
{
var assembly = System.Reflection.Assembly.GetAssembly(typeof(CustomAsset));
return (
from t in assembly.GetTypes()
where t.IsSubclassOf(typeof(CustomAsset)) || t.GetCustomAttributes(typeof(CustomAssetAttribute),true).Length>0
select t
).Cast<System.Type>().ToArray();
}
static void OnPostprocessAllAssets(
string[] importedAssets,
string[] deletedAssets,
string[] movedAssets,
string[] movedFromAssetPaths)
{
recentlyImportedTypeNames.AddRange(importedAssets);
EditorApplication.update += ProcessTypes;
}
static void ProcessTypes ()
{
EditorApplication.update -= ProcessTypes;
foreach(var path in recentlyImportedTypeNames)
{
if(path==null)
continue;
var extension = Path.GetExtension(path);
if(extension != ".cs" && extension != ".js")
continue;
var name = Path.GetFileNameWithoutExtension(path);
var type = typeof(CustomAsset).Assembly.GetType(name, false, true);
if(type==null || !type.IsSubclassOf(typeof(ScriptableObject)))
continue;
Generate();
break;
}
recentlyImportedTypeNames.Clear();
}
//[MenuItem("Test/Generate test assembly")]
static void Generate()
{
/*
public sealed class AssetTypeBuilder
{
public static void Create()
{
ScriptableObjectUtils.CreateTheAsset<AssetType>();
}
}
*/
string dirName = "Assets/Plugins/Editor";
AssemblyName aName = new AssemblyName("ScriptableObjectBuilder");
string dllName = aName.Name+".dll";
// cleanup, remove existing dlls and ensure the output directory exists
System.IO.File.Delete("Assets/Plugins/Editor/"+dllName);
System.IO.Directory.CreateDirectory(dirName);
AssemblyBuilder ab =
System.AppDomain.CurrentDomain.DefineDynamicAssembly(
aName,
AssemblyBuilderAccess.RunAndSave,
dirName);
// For a single-module assembly, the module name is usually
// the assembly name plus an extension.
ModuleBuilder mb = ab.DefineDynamicModule(aName.Name, dllName);
foreach(var assetType in EnumerateAssetTypes())
{
string className = assetType.Name+"Builder";
TypeBuilder tb = mb.DefineType(className, TypeAttributes.Public | TypeAttributes.Sealed);
// Define a default constructor.
// For parameter types, pass the empty
// array of types or pass null.
ConstructorBuilder ctor0 = tb.DefineConstructor(MethodAttributes.Public, CallingConventions.Standard, System.Type.EmptyTypes);
ILGenerator ctor0IL = ctor0.GetILGenerator();
ctor0IL.Emit(OpCodes.Ldarg_0);
ctor0IL.Emit(OpCodes.Call, typeof(object).GetConstructor(System.Type.EmptyTypes));
ctor0IL.Emit(OpCodes.Ret);
// Define a method that accepts an integer argument and returns
// the product of that integer and the private field m_number. This
// time, the array of parameter types is created on the fly.
MethodBuilder meth = tb.DefineMethod(
"Create",
MethodAttributes.Public | MethodAttributes.Static,
typeof(void),
System.Type.EmptyTypes);
var attribParams = new object[] {"Assets/Create/"+assetType.Name};
var attribCtorInfo = typeof(MenuItem).GetConstructor(new System.Type[] {typeof(string)});
var methAttrib = new CustomAttributeBuilder(attribCtorInfo, attribParams);
meth.SetCustomAttribute(methAttrib);
ILGenerator methIL = meth.GetILGenerator();
methIL.Emit(OpCodes.Ldstr, "Creating scripted type: "+assetType.Name);
methIL.Emit(OpCodes.Call, typeof(Debug).GetMethod("Log", new System.Type[] {typeof(string)}));
methIL.Emit(OpCodes.Call, typeof(CustomAssetBuilder).GetMethod("CreateTheAsset").MakeGenericMethod(new System.Type[] {assetType}));
methIL.Emit(OpCodes.Ret);
// Finish the type.
tb.CreateType();
}
// The following line saves the single-module assembly. This
// requires AssemblyBuilderAccess to include Save. You can now
// type "ildasm MyDynamicAsm.dll" at the command prompt, and
// examine the assembly. You can also write a program that has
// a reference to the assembly, and use the MyDynamicType type.
//
ab.Save(aName.Name + ".dll");
}
/*
[MenuItem("Window/Asset Builder...")]
public static void OpenAssetBuilder()
{
EditorWindow.GetWindow<CustomAssetBuilder>();
}
// Use this for initialization
void Start ()
{
name = "Custom Asset Builder";
}
int current = -1;
void OnGUI ()
{
var types = EnumerateAssetTypes();
var typeNames = (from t in types select t.Name).Cast<string>().ToArray();
current = EditorGUILayout.Popup(current, typeNames);
if(current<0)
return;
foreach(var item in Selection.objects)
{
var selpath = AssetDatabase.GetAssetPath(item);
if (selpath == "")
{
// not an asset
GUILayout.Label("Non-Asset: " + item.name);
continue;
}
var dummypath = System.IO.Path.Combine(selpath, "fake.asset");
var assetpath = AssetDatabase.GenerateUniqueAssetPath(dummypath);
if (assetpath == "")
{
// couldn't generate a path, current asset must be a file
GUILayout.Label("File: " + item.name);
}
else
{
GUILayout.Label("Directory: " + item.name);
}
}
var type = types[current];
if(GUILayout.Button("Create"))
{
var result = ScriptableObject.CreateInstance(type);
AssetDatabase.CreateAsset(result, "Assets/HiThere."+type.Name+".asset");
}
}
*/
public static void CreateTheAsset<T> () where T : ScriptableObject
{
T asset = ScriptableObject.CreateInstance<T> ();
string path = AssetDatabase.GetAssetPath (Selection.activeObject);
if (path == "")
{
path = "Assets";
}
else if (Path.GetExtension (path) != "")
{
path = path.Replace (Path.GetFileName (AssetDatabase.GetAssetPath (Selection.activeObject)), "");
}
string assetPathAndName = AssetDatabase.GenerateUniqueAssetPath (path + "/New" + typeof(T).ToString() + "."+typeof(T).ToString()+".asset");
AssetDatabase.CreateAsset (asset, assetPathAndName);
AssetDatabase.SaveAssets ();
EditorUtility.FocusProjectWindow ();
Selection.activeObject = asset;
}
}
| |
//
// ContentDescriptor.cs: Provides a representation of an ASF Content Descriptor
// to be used in combination with ExtendedContentDescriptionObject.
//
// Author:
// Brian Nickel ([email protected])
//
// Copyright (C) 2006-2007 Brian Nickel
//
// This library is free software; you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License version
// 2.1 as published by the Free Software Foundation.
//
// This library is distributed in the hope that it will be useful, but
// WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
// USA
//
using System;
namespace TagLib.Asf {
/// <summary>
/// Indicates the type of data stored in a <see
/// cref="ContentDescriptor" /> or <see cref="DescriptionRecord" />
/// object.
/// </summary>
public enum DataType {
/// <summary>
/// The descriptor contains Unicode (UTF-16LE) text.
/// </summary>
Unicode = 0,
/// <summary>
/// The descriptor contains binary data.
/// </summary>
Bytes = 1,
/// <summary>
/// The descriptor contains a boolean value.
/// </summary>
Bool = 2,
/// <summary>
/// The descriptor contains a 4-byte DWORD value.
/// </summary>
DWord = 3,
/// <summary>
/// The descriptor contains a 8-byte QWORD value.
/// </summary>
QWord = 4,
/// <summary>
/// The descriptor contains a 2-byte WORD value.
/// </summary>
Word = 5,
/// <summary>
/// The descriptor contains a 16-byte GUID value.
/// </summary>
Guid = 6
}
/// <summary>
/// This class provides a representation of an ASF Content
/// Descriptor to be used in combination with <see
/// cref="ExtendedContentDescriptionObject" />.
/// </summary>
public class ContentDescriptor
{
#region Private Fields
/// <summary>
/// Contains the data type.
/// </summary>
private DataType type = DataType.Unicode;
/// <summary>
/// Contains the descriptor name.
/// </summary>
private string name = null;
/// <summary>
/// Contains the string value.
/// </summary>
private string strValue = null;
/// <summary>
/// Contains the byte value.
/// </summary>
private ByteVector byteValue = null;
/// <summary>
/// Contains the long value.
/// </summary>
private ulong longValue = 0;
#endregion
#region Constructors
/// <summary>
/// Constructs and initializes a new instance of <see
/// cref="ContentDescriptor" /> with a specified name and
/// and value.
/// </summary>
/// <param name="name">
/// A <see cref="string" /> object containing the name of the
/// new instance.
/// </param>
/// <param name="value">
/// A <see cref="string" /> object containing the value for
/// the new instance.
/// </param>
public ContentDescriptor (string name, string value)
{
this.name = name;
this.strValue = value;
}
/// <summary>
/// Constructs and initializes a new instance of <see
/// cref="ContentDescriptor" /> with a specified name and
/// and value.
/// </summary>
/// <param name="name">
/// A <see cref="string" /> object containing the name of the
/// new instance.
/// </param>
/// <param name="value">
/// A <see cref="ByteVector" /> object containing the value
/// for the new instance.
/// </param>
public ContentDescriptor (string name, ByteVector value)
{
this.name = name;
this.type = DataType.Bytes;
this.byteValue = new ByteVector (value);
}
/// <summary>
/// Constructs and initializes a new instance of <see
/// cref="ContentDescriptor" /> with a specified name and
/// and value.
/// </summary>
/// <param name="name">
/// A <see cref="string" /> object containing the name of the
/// new instance.
/// </param>
/// <param name="value">
/// A <see cref="uint" /> value containing the value
/// for the new instance.
/// </param>
public ContentDescriptor (string name, uint value)
{
this.name = name;
this.type = DataType.DWord;
this.longValue = value;
}
/// <summary>
/// Constructs and initializes a new instance of <see
/// cref="ContentDescriptor" /> with a specified name and
/// and value.
/// </summary>
/// <param name="name">
/// A <see cref="string" /> object containing the name of the
/// new instance.
/// </param>
/// <param name="value">
/// A <see cref="ulong" /> value containing the value
/// for the new instance.
/// </param>
public ContentDescriptor (string name, ulong value)
{
this.name = name;
this.type = DataType.QWord;
this.longValue = value;
}
/// <summary>
/// Constructs and initializes a new instance of <see
/// cref="ContentDescriptor" /> with a specified name and
/// and value.
/// </summary>
/// <param name="name">
/// A <see cref="string" /> object containing the name of the
/// new instance.
/// </param>
/// <param name="value">
/// A <see cref="ushort" /> value containing the value
/// for the new instance.
/// </param>
public ContentDescriptor (string name, ushort value)
{
this.name = name;
this.type = DataType.Word;
this.longValue = value;
}
/// <summary>
/// Constructs and initializes a new instance of <see
/// cref="ContentDescriptor" /> with a specified name and
/// and value.
/// </summary>
/// <param name="name">
/// A <see cref="string" /> object containing the name of the
/// new instance.
/// </param>
/// <param name="value">
/// A <see cref="bool" /> value containing the value
/// for the new instance.
/// </param>
public ContentDescriptor (string name, bool value)
{
this.name = name;
this.type = DataType.Bool;
this.longValue = value ? 1uL : 0;
}
/// <summary>
/// Constructs and initializes a new instance of <see
/// cref="ContentDescriptor" /> by reading its contents from
/// a file.
/// </summary>
/// <param name="file">
/// A <see cref="Asf.File" /> object to read the raw ASF
/// Description Record from.
/// </param>
/// <exception cref="ArgumentNullException">
/// <paramref name="file" /> is <see langword="null" />.
/// </exception>
/// <exception cref="CorruptFileException">
/// A valid descriptor could not be read.
/// </exception>
/// <remarks>
/// <paramref name="file" /> must be at a seek position at
/// which the descriptor can be read.
/// </remarks>
protected internal ContentDescriptor (Asf.File file)
{
if (file == null)
throw new ArgumentNullException ("file");
if (!Parse (file))
throw new CorruptFileException (
"Failed to parse content descriptor.");
}
#endregion
#region Public Properties
/// <summary>
/// Gets the name of the current instance.
/// </summary>
/// <value>
/// A <see cref="string" /> object containing the name of the
/// current instance.
/// </value>
public string Name {
get {return name;}
}
/// <summary>
/// Gets the type of data contained in the current instance.
/// </summary>
/// <value>
/// A <see cref="DataType" /> value indicating type of data
/// contained in the current instance.
/// </value>
public DataType Type {
get {return type;}
}
#endregion
#region Public Methods
/// <summary>
/// Gets a string representation of the current instance.
/// </summary>
/// <returns>
/// A <see cref="string" /> object containing the value of
/// the current instance.
/// </returns>
public override string ToString ()
{
if (type == DataType.Unicode)
return strValue;
if (type == DataType.Bytes)
return byteValue.ToString (StringType.UTF16LE);
return longValue.ToString ();
}
/// <summary>
/// Gets the binary contents of the current instance.
/// </summary>
/// <returns>
/// A <see cref="ByteVector" /> object containing the
/// contents of the current instance, or <see langword="null"
/// /> if <see cref="Type" /> is unequal to <see
/// cref="DataType.Bytes" />.
/// </returns>
public ByteVector ToByteVector ()
{
return byteValue;
}
/// <summary>
/// Gets the boolean value contained in the current instance.
/// </summary>
/// <returns>
/// A <see cref="bool" /> value containing the value of the
/// current instance.
/// </returns>
public bool ToBool ()
{
return longValue != 0;
}
/// <summary>
/// Gets the DWORD value contained in the current instance.
/// </summary>
/// <returns>
/// A <see cref="uint" /> value containing the value of the
/// current instance.
/// </returns>
public uint ToDWord ()
{
uint value;
if (type == DataType.Unicode && strValue != null &&
uint.TryParse (strValue, out value))
return value;
return (uint) longValue;
}
/// <summary>
/// Gets the QWORD value contained in the current instance.
/// </summary>
/// <returns>
/// A <see cref="ulong" /> value containing the value of the
/// current instance.
/// </returns>
public ulong ToQWord ()
{
ulong value;
if (type == DataType.Unicode && strValue != null &&
ulong.TryParse (strValue, out value))
return value;
return longValue;
}
/// <summary>
/// Gets the WORD value contained in the current instance.
/// </summary>
/// <returns>
/// A <see cref="ushort" /> value containing the value of the
/// current instance.
/// </returns>
public ushort ToWord ()
{
ushort value;
if (type == DataType.Unicode && strValue != null &&
ushort.TryParse (strValue, out value))
return value;
return (ushort) longValue;
}
/// <summary>
/// Renders the current instance as a raw ASF Description
/// Record.
/// </summary>
/// <returns>
/// A <see cref="ByteVector" /> object containing the
/// rendered version of the current instance.
/// </returns>
public ByteVector Render ()
{
ByteVector value = null;
switch (type)
{
case DataType.Unicode:
value = Object.RenderUnicode (strValue);
break;
case DataType.Bytes:
value = byteValue;
break;
case DataType.Bool:
case DataType.DWord:
value = Object.RenderDWord ((uint) longValue);
break;
case DataType.QWord:
value = Object.RenderQWord (longValue);
break;
case DataType.Word:
value = Object.RenderWord ((ushort) longValue);
break;
default:
return null;
}
ByteVector name = Object.RenderUnicode (this.name);
ByteVector output = new ByteVector ();
output.Add (Object.RenderWord ((ushort) name.Count));
output.Add (name);
output.Add (Object.RenderWord ((ushort) type));
output.Add (Object.RenderWord ((ushort) value.Count));
output.Add (value);
return output;
}
#endregion
#region Protected Methods
/// <summary>
/// Populates the current instance by reading in the contents
/// from a file.
/// </summary>
/// <param name="file">
/// A <see cref="Asf.File" /> object to read the raw ASF
/// Content Descriptor from.
/// </param>
/// <returns>
/// <see langword="true" /> if the data was read correctly.
/// Otherwise <see langword="false" />.
/// </returns>
protected bool Parse (Asf.File file)
{
int name_count = file.ReadWord ();
name = file.ReadUnicode (name_count);
type = (DataType) file.ReadWord ();
int value_count = file.ReadWord ();
switch (type)
{
case DataType.Word:
longValue = file.ReadWord ();
break;
case DataType.Bool:
longValue = file.ReadDWord ();
break;
case DataType.DWord:
longValue = file.ReadDWord ();
break;
case DataType.QWord:
longValue = file.ReadQWord ();
break;
case DataType.Unicode:
strValue = file.ReadUnicode (value_count);
break;
case DataType.Bytes:
byteValue = file.ReadBlock (value_count);
break;
default:
return false;
}
return true;
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Data.Common;
using System.Diagnostics.Contracts;
using System.Threading.Tasks;
namespace BusterWood.Mapper
{
public static partial class Extensions
{
static readonly DataReaderMapper _readerMapper = new DataReaderMapper();
internal static Func<DbDataReader, T> GetMappingFunc<T>(DbDataReader reader)
{
Contract.Requires(reader != null);
Delegate func = _readerMapper.GetOrCreateMappingFunc(typeof(T), reader);
return (Func<DbDataReader, T>)func;
}
/// <summary>Converts the <paramref name="reader"/> into a <see cref="DataSequence{T}"/></summary>
public static DataSequence<T> Read<T>(this DbDataReader reader, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(reader != null);
Contract.Requires(reader.IsClosed == false);
return new DataSequence<T>(reader, extraAction);
}
/// <summary>Converts the <paramref name="reader"/> into a <see cref="DataSequence{T}"/></summary>
public static async Task<DataSequence<T>> Read<T>(this Task<DbDataReader> reader, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(reader != null);
return new DataSequence<T>(await reader, extraAction);
}
/// <summary>Executes a <paramref name="cmd"/> and return a sequence of dynamic data</summary>
public static DynamicDataSequence ToDynamic(this DbDataReader reader)
{
Contract.Requires(reader != null);
return new DynamicDataSequence(reader);
}
/// <summary>Executes a <paramref name="cmd"/> and return a sequence of dynamic data</summary>
public static async Task<DynamicDataSequence> ToDynamicAsync(this Task<DbDataReader> reader)
{
Contract.Requires(reader != null);
Contract.Ensures(Contract.Result<Task<DynamicDataSequence>>() != null);
return new DynamicDataSequence(await reader);
}
/// <summary>Reads exactly one item from the reader</summary>
/// <exception cref="InvalidOperationException"> when zero values read or more than one value can be read</exception>
/// <remarks>The underlying <see cref="DbDataReader"/> is disposed after this method has been called</remarks>
public static T Single<T>(this DbDataReader reader, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(reader != null);
try
{
var map = GetMappingFunc<T>(reader);
if (!reader.Read()) throw new InvalidOperationException("Expected one value to be read but reader is empty");
var item = map(reader);
extraAction?.Invoke(reader, item);
if (reader.Read()) throw new InvalidOperationException("Expected one value to be read but more than one value can be read");
return item;
}
finally
{
if (!reader.NextResult())
reader.Close();
}
}
/// <summary>Reads exactly one item from the reader</summary>
/// <exception cref="InvalidOperationException"> when zero values read or more than one value can be read</exception>
/// <remarks>The underlying <see cref="DbDataReader"/> is disposed after this method has been called</remarks>
public static async Task<T> SingleAsync<T>(this DbDataReader reader, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(reader != null);
Contract.Ensures(Contract.Result<T>() != null);
try
{
var map = GetMappingFunc<T>(reader);
if (!await reader.ReadAsync()) throw new InvalidOperationException("Expected one value to be read but reader is empty");
var single = map(reader);
extraAction?.Invoke(reader, single);
if (await reader.ReadAsync()) throw new InvalidOperationException("Expected one value to be read but more than one value can be read");
return single;
}
finally
{
if (!await reader.NextResultAsync())
reader.Close();
}
}
/// <summary>Reads zero or one items from the reader</summary>
/// <remarks>Returns the default value of T if no values be read, i.e may return null</remarks>
/// <remarks>The underlying <see cref="DbDataReader"/> is disposed after this method has been called</remarks>
public static T SingleOrDefault<T>(this DbDataReader reader, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(reader != null);
try
{
var map = GetMappingFunc<T>(reader);
if (!reader.Read()) return default(T);
var item = map(reader);
extraAction?.Invoke(reader, item);
if (reader.Read()) throw new InvalidOperationException("Expected one value to be read but more than one value can be read");
return item;
}
finally
{
if (!reader.NextResult())
reader.Close();
}
}
/// <summary>Reads zero or one items from the reader</summary>
/// <remarks>Returns the default value of T if no values be read, i.e may return null</remarks>
/// <remarks>The underlying <see cref="DbDataReader"/> is disposed after this method has been called</remarks>
public static async Task<T> SingleOrDefaultAsync<T>(this DbDataReader reader, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(reader != null);
try
{
var map = GetMappingFunc<T>(reader);
if (!await reader.ReadAsync()) return default(T);
var item = map(reader);
extraAction?.Invoke(reader, item);
if (await reader.ReadAsync()) throw new InvalidOperationException("Expected one value to be read but more than one value can be read");
return item;
}
finally
{
if (!await reader.NextResultAsync())
reader.Close();
}
}
/// <summary>Reads all the records in the reader into a list</summary>
/// <remarks>The underlying <see cref="DbDataReader"/> is disposed after this method has been called</remarks>
public static List<T> ToList<T>(this DbDataReader reader, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(reader != null);
Contract.Ensures(Contract.Result<List<T>>() != null);
try
{
var map = GetMappingFunc<T>(reader);
var list = new List<T>();
while (reader.Read())
{
var item = map(reader);
extraAction?.Invoke(reader, item);
list.Add(item);
}
return list;
}
finally
{
if (!reader.NextResult())
reader.Close();
}
}
/// <summary>Reads all the records in the reader into a list</summary>
/// <remarks>The underlying <see cref="DbDataReader"/> is disposed after this method has been called</remarks>
public static async Task<List<T>> ToListAsync<T>(this DbDataReader reader, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(reader != null);
Contract.Ensures(Contract.Result<Task<List<T>>>() != null);
//Contract.Ensures(Contract.Result<Task<List<T>>>().Result != null);
try
{
var map = GetMappingFunc<T>(reader);
var list = new List<T>();
while (await reader.ReadAsync())
{
var item = map(reader);
extraAction?.Invoke(reader, item);
list.Add(item);
}
return list;
}
finally
{
if (!await reader.NextResultAsync())
reader.Close();
}
}
/// <summary>Reads all the records in the reader into a <see cref="HashSet{T}"/></summary>
/// <remarks>The underlying <see cref="DbDataReader"/> is disposed after this method has been called</remarks>
public static HashSet<T> ToHashSet<T>(this DbDataReader reader, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(reader != null);
Contract.Ensures(Contract.Result<HashSet<T>>() != null);
try
{
var map = GetMappingFunc<T>(reader);
var set = new HashSet<T>();
while (reader.Read())
{
var item = map(reader);
extraAction?.Invoke(reader, item);
set.Add(item);
}
return set;
}
finally
{
if (!reader.NextResult())
reader.Close();
}
}
/// <summary>Reads all the records in the reader into a <see cref="HashSet{T}"/></summary>
/// <remarks>The underlying <see cref="DbDataReader"/> is disposed after this method has been called</remarks>
public static async Task<HashSet<T>> ToHashSetAsync<T>(this DbDataReader reader, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(reader != null);
Contract.Ensures(Contract.Result<Task<HashSet<T>>>() != null);
Contract.Ensures(Contract.Result<Task<HashSet<T>>>().Result != null);
try
{
var map = GetMappingFunc<T>(reader);
var set = new HashSet<T>();
while (await reader.ReadAsync())
{
var item = map(reader);
extraAction?.Invoke(reader, item);
set.Add(item);
}
return set;
}
finally
{
if (!await reader.NextResultAsync())
reader.Close();
}
}
/// <summary>Reads all the records in the reader into a dictionary, using the supplied <paramref name="keyFunc"/> to generate the key</summary>
/// <remarks>The underlying <see cref="DbDataReader"/> is disposed after this method has been called</remarks>
public static Dictionary<TKey, T> ToDictionary<TKey, T>(this DbDataReader reader, Func<T, TKey> keyFunc, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(reader != null);
Contract.Ensures(Contract.Result<Dictionary<TKey, T>>() != null);
try
{
var map = GetMappingFunc<T>(reader);
var dict = new Dictionary<TKey, T>();
while (reader.Read())
{
T value = map(reader);
extraAction?.Invoke(reader, value);
TKey key = keyFunc(value);
dict.Add(key, value);
}
return dict;
}
finally
{
if (!reader.NextResult())
reader.Close();
}
}
/// <summary>Reads all the records in the reader into a dictionary, using the supplied <paramref name="keyFunc"/> to generate the key</summary>
/// <remarks>The underlying <see cref="DbDataReader"/> is disposed after this method has been called</remarks>
public static Dictionary<TKey, TValue> ToDictionary<T, TKey, TValue>(this DbDataReader reader, Func<T, TKey> keyFunc, Func<T, TValue> valueFunc, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(reader != null);
Contract.Requires(keyFunc != null);
Contract.Requires(valueFunc != null);
Contract.Ensures(Contract.Result<Dictionary<TKey, TValue>>() != null);
try
{
var map = GetMappingFunc<T>(reader);
var dict = new Dictionary<TKey, TValue>();
while (reader.Read())
{
T temp = map(reader);
extraAction?.Invoke(reader, temp);
TKey key = keyFunc(temp);
TValue value = valueFunc(temp);
dict.Add(key, value);
}
return dict;
}
finally
{
if (!reader.NextResult())
reader.Close();
}
}
/// <summary>Reads all the records in the reader into a dictionary, using the supplied <paramref name="keyFunc"/> to generate the key</summary>
/// <remarks>The underlying <see cref="DbDataReader"/> is disposed after this method has been called</remarks>
public static async Task<Dictionary<TKey, T>> ToDictionaryAsync<TKey, T>(this DbDataReader reader, Func<T, TKey> keyFunc, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(reader != null);
Contract.Requires(keyFunc != null);
Contract.Ensures(Contract.Result<Task<Dictionary<TKey, T>>>() != null);
Contract.Ensures(Contract.Result<Task<Dictionary<TKey, T>>>().Result != null);
try
{
var map = GetMappingFunc<T>(reader);
var dict = new Dictionary<TKey, T>();
while (await reader.ReadAsync())
{
T value = map(reader);
extraAction?.Invoke(reader, value);
TKey key = keyFunc(value);
dict.Add(key, value);
}
return dict;
}
finally
{
if (!await reader.NextResultAsync())
reader.Close();
}
}
/// <summary>Reads all the records in the reader into a dictionary, using the supplied <paramref name="keyFunc"/> to generate the key</summary>
/// <remarks>The underlying <see cref="DbDataReader"/> is disposed after this method has been called</remarks>
public static async Task<Dictionary<TKey, TValue>> ToDictionaryAsync<T, TKey, TValue>(this DbDataReader reader, Func<T, TKey> keyFunc, Func<T, TValue> valueFunc, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(reader != null);
Contract.Requires(keyFunc != null);
Contract.Requires(valueFunc != null);
Contract.Ensures(Contract.Result<Task<Dictionary<TKey, TValue>>>() != null);
//Contract.Ensures(Contract.Result<Task<Dictionary<TKey, TValue>>>().Result != null);
try
{
var map = GetMappingFunc<T>(reader);
var dict = new Dictionary<TKey, TValue>();
while (await reader.ReadAsync())
{
T temp = map(reader);
extraAction?.Invoke(reader, temp);
TKey key = keyFunc(temp);
TValue value = valueFunc(temp);
dict.Add(key, value);
}
return dict;
}
finally
{
if (!await reader.NextResultAsync())
reader.Close();
}
}
/// <summary>Reads all the records in the lookup, group by key, using the supplied <paramref name="keyFunc"/> to generate the key</summary>
/// <remarks>The underlying <see cref="DbDataReader"/> is disposed after this method has been called</remarks>
public static HashLookup<TKey, T> ToLookup<TKey, T>(this DbDataReader reader, Func<T, TKey> keyFunc, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(reader != null);
Contract.Ensures(Contract.Result<HashLookup<TKey, T>>() != null);
try
{
var map = GetMappingFunc<T>(reader);
var lookup = new HashLookup<TKey, T>();
while (reader.Read())
{
T value = map(reader);
extraAction?.Invoke(reader, value);
TKey key = keyFunc(value);
lookup.Add(key, value);
}
return lookup;
}
finally
{
if (!reader.NextResult())
reader.Close();
}
}
/// <summary>Reads all the records in the lookup, group by key, using the supplied <paramref name="keyFunc"/> to generate the key</summary>
/// <remarks>The underlying <see cref="DbDataReader"/> is disposed after this method has been called</remarks>
public static HashLookup<TKey, TValue> ToLookup<T, TKey, TValue>(this DbDataReader reader, Func<T, TKey> keyFunc, Func<T, TValue> valueFunc, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(reader != null);
Contract.Requires(keyFunc != null);
Contract.Requires(valueFunc != null);
Contract.Ensures(Contract.Result<HashLookup<TKey, TValue>>() != null);
try
{
var map = GetMappingFunc<T>(reader);
var lookup = new HashLookup<TKey, TValue>();
while (reader.Read())
{
T temp = map(reader);
extraAction?.Invoke(reader, temp);
TKey key = keyFunc(temp);
TValue value = valueFunc(temp);
lookup.Add(key, value);
}
return lookup;
}
finally
{
if (!reader.NextResult())
reader.Close();
}
}
/// <summary>Reads all the records in the lookup, group by key, using the supplied <paramref name="keyFunc"/> to generate the key</summary>
/// <remarks>The underlying <see cref="DbDataReader"/> is disposed after this method has been called</remarks>
public static async Task<HashLookup<TKey, T>> ToLookupAsync<TKey, T>(this DbDataReader reader, Func<T, TKey> keyFunc, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(reader != null);
Contract.Ensures(Contract.Result<Task<HashLookup<TKey, T>>>() != null);
//Contract.Ensures(Contract.Result<Task<HashLookup<TKey, T>>>().Result != null);
try
{
var map = GetMappingFunc<T>(reader);
var lookup = new HashLookup<TKey, T>();
while (await reader.ReadAsync())
{
T value = map(reader);
extraAction?.Invoke(reader, value);
TKey key = keyFunc(value);
lookup.Add(key, value);
}
return lookup;
}
finally
{
if (!await reader.NextResultAsync())
reader.Close();
}
}
/// <summary>Reads all the records in the lookup, group by key, using the supplied <paramref name="keyFunc"/> to generate the key</summary>
/// <remarks>The underlying <see cref="DbDataReader"/> is disposed after this method has been called</remarks>
public static async Task<HashLookup<TKey, TValue>> ToLookupAsync<T, TKey, TValue>(this DbDataReader reader, Func<T, TKey> keyFunc, Func<T, TValue> valueFunc, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(reader != null);
Contract.Requires(keyFunc != null);
Contract.Requires(valueFunc != null);
Contract.Ensures(Contract.Result<Task<HashLookup<TKey, TValue>>>() != null);
//Contract.Ensures(Contract.Result<Task<HashLookup<TKey, TValue>>>().Result != null);
try
{
var map = GetMappingFunc<T>(reader);
var lookup = new HashLookup<TKey, TValue>();
while (await reader.ReadAsync())
{
T temp = map(reader);
extraAction?.Invoke(reader, temp);
TKey key = keyFunc(temp);
TValue value = valueFunc(temp);
lookup.Add(key, value);
}
return lookup;
}
finally
{
if (!await reader.NextResultAsync())
reader.Close();
}
}
public static async Task<T> SingleOrDefaultAsync<T>(this Task<DbDataReader> task, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(task != null);
Contract.Ensures(Contract.Result<Task<T>>() != null);
var reader = await task;
return await reader.SingleOrDefaultAsync(extraAction);
}
public static async Task<T> SingleAsync<T>(this Task<DbDataReader> task, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(task != null);
Contract.Ensures(Contract.Result<Task<T>>() != null);
var reader = await task;
return await reader.SingleAsync(extraAction);
}
public static async Task<List<T>> ToListAsync<T>(this Task<DbDataReader> task, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(task != null);
Contract.Ensures(Contract.Result<Task<List<T>>>() != null);
//Contract.Ensures(Contract.Result<Task<List<T>>>().Result != null);
var reader = await task;
return await reader.ToListAsync(extraAction);
}
public static async Task<HashSet<T>> ToHashSetAsync<T>(this Task<DbDataReader> task, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(task != null);
Contract.Ensures(Contract.Result<Task<HashSet<T>>>() != null);
Contract.Ensures(Contract.Result<Task<HashSet<T>>>().Result != null);
var reader = await task;
return await reader.ToHashSetAsync(extraAction);
}
public static async Task<Dictionary<TKey, TValue>> ToDictionaryAsync<TKey, TValue>(this Task<DbDataReader> task, Func<TValue, TKey> keyFunc, Action<DbDataReader, TValue> extraAction = null)
{
Contract.Requires(keyFunc != null);
Contract.Requires(task != null);
Contract.Ensures(Contract.Result<Task<Dictionary<TKey, TValue>>>() != null);
Contract.Ensures(Contract.Result<Task<Dictionary<TKey, TValue>>>().Result != null);
var reader = await task;
return await reader.ToDictionaryAsync(keyFunc, extraAction);
}
public static async Task<HashLookup<TKey, TValue>> ToLookupAsync<TKey, TValue>(this Task<DbDataReader> task, Func<TValue, TKey> keyFunc, Action<DbDataReader, TValue> extraAction = null)
{
Contract.Requires(keyFunc != null);
Contract.Requires(task != null);
Contract.Ensures(Contract.Result<Task<HashLookup<TKey, TValue>>>() != null);
Contract.Ensures(Contract.Result<Task<HashLookup<TKey, TValue>>>().Result != null);
var reader = await task;
return await reader.ToLookupAsync(keyFunc, extraAction);
}
public static async Task<Dictionary<TKey, TValue>> ToDictionaryAsync<T, TKey, TValue>(this Task<DbDataReader> task, Func<T, TKey> keyFunc, Func<T, TValue> valueFunc, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(task != null);
Contract.Requires(keyFunc != null);
Contract.Requires(valueFunc != null);
Contract.Ensures(Contract.Result<Task<Dictionary<TKey, TValue>>>() != null);
Contract.Ensures(Contract.Result<Task<Dictionary<TKey, TValue>>>().Result != null);
var reader = await task;
return await reader.ToDictionaryAsync(keyFunc, valueFunc, extraAction);
}
public static async Task<HashLookup<TKey, TValue>> ToLookupAsync<T, TKey, TValue>(this Task<DbDataReader> task, Func<T, TKey> keyFunc, Func<T, TValue> valueFunc, Action<DbDataReader, T> extraAction = null)
{
Contract.Requires(task != null);
Contract.Requires(keyFunc != null);
Contract.Requires(valueFunc != null);
Contract.Ensures(Contract.Result<Task<HashLookup<TKey, TValue>>>() != null);
Contract.Ensures(Contract.Result<Task<HashLookup<TKey, TValue>>>().Result != null);
var reader = await task;
return await reader.ToLookupAsync(keyFunc, valueFunc, extraAction);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.IO;
using System.Net;
using System.Web;
using System.Text.RegularExpressions;
using System.Security.Cryptography;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using Com.Aspose.Email.Model;
namespace Com.Aspose.Email
{
public struct FileInfo
{
public string Name;
public string MimeType;
public byte[] file;
}
public class ApiInvoker
{
private static readonly ApiInvoker _instance = new ApiInvoker();
private Dictionary<String, String> defaultHeaderMap = new Dictionary<String, String>();
public string appSid { set; get; }
public string apiKey { set; get; }
public static ApiInvoker GetInstance()
{
return _instance;
}
public void addDefaultHeader(string key, string value)
{
defaultHeaderMap.Add(key, value);
}
public string escapeString(string str)
{
return str;
}
public static object deserialize(string json, Type type)
{
try
{
if (json.StartsWith("{") || json.StartsWith("["))
return JsonConvert.DeserializeObject(json, type);
else
{
System.Xml.XmlDocument xmlDoc = new System.Xml.XmlDocument();
xmlDoc.LoadXml(json);
return JsonConvert.SerializeXmlNode(xmlDoc);
}
}
catch (IOException e)
{
throw new ApiException(500, e.Message);
}
catch (JsonSerializationException jse)
{
throw new ApiException(500, jse.Message);
}
catch (System.Xml.XmlException xmle)
{
throw new ApiException(500, xmle.Message);
}
}
public static object deserialize(byte[] BinaryData, Type type)
{
try
{
return new ResponseMessage(BinaryData);
}
catch (IOException e)
{
throw new ApiException(500, e.Message);
}
}
private static string Sign(string url, string appKey)
{
UriBuilder uriBuilder = new UriBuilder(url);
// Remove final slash here as it can be added automatically.
uriBuilder.Path = uriBuilder.Path.TrimEnd('/');
// Compute the hash.
byte[] privateKey = Encoding.UTF8.GetBytes(appKey);
HMACSHA1 algorithm = new HMACSHA1(privateKey);
byte[] sequence = ASCIIEncoding.ASCII.GetBytes(uriBuilder.Uri.AbsoluteUri);
byte[] hash = algorithm.ComputeHash(sequence);
string signature = Convert.ToBase64String(hash);
// Remove invalid symbols.
signature = signature.TrimEnd('=');
signature = HttpUtility.UrlEncode(signature);
// Convert codes to upper case as they can be updated automatically.
signature = Regex.Replace(signature, "%[0-9a-f]{2}", e => e.Value.ToUpper());
// Add the signature to query string.
return string.Format("{0}&signature={1}", uriBuilder.Uri.AbsoluteUri, signature);
}
public static string serialize(object obj)
{
try
{
return obj != null ? JsonConvert.SerializeObject(obj, Formatting.Indented, new JsonSerializerSettings { NullValueHandling = NullValueHandling.Ignore }) : null;
}
catch (Exception e)
{
throw new ApiException(500, e.Message);
}
}
public string invokeAPI(string host, string path, string method, Dictionary<String, String> queryParams, object body, Dictionary<String, String> headerParams, Dictionary<String, object> formParams)
{
return invokeAPIInternal(host, path, method, false, queryParams, body, headerParams, formParams) as string;
}
public byte[] invokeBinaryAPI(string host, string path, string method, Dictionary<String, String> queryParams, object body, Dictionary<String, String> headerParams, Dictionary<String, object> formParams)
{
return invokeAPIInternal(host, path, method, true, queryParams, body, headerParams, formParams) as byte[];
}
public static void CopyTo(Stream source, Stream destination, int bufferSize = 81920)
{
byte[] array = new byte[bufferSize];
int count;
while ((count = source.Read(array, 0, array.Length)) != 0)
{
destination.Write(array, 0, count);
}
}
private object invokeAPIInternal(string host, string path, string method, bool binaryResponse, Dictionary<String, String> queryParams, object body, Dictionary<String, String> headerParams, Dictionary<String, object> formParams)
{
path = path.Replace("{appSid}", this.appSid);
path = Regex.Replace(path, @"{.+?}", "");
//var b = new StringBuilder();
host = host.EndsWith("/") ? host.Substring(0, host.Length - 1) : host;
path = Sign(host + path, this.apiKey);
var client = WebRequest.Create(path);
client.Method = method;
byte[] formData = null;
if (formParams.Count > 0)
{
if (formParams.Count > 1)
{
string formDataBoundary = String.Format("Somthing");
client.ContentType = "multipart/form-data; boundary=" + formDataBoundary;
formData = GetMultipartFormData(formParams, formDataBoundary);
}
else
{
client.ContentType = "multipart/form-data";
formData = GetMultipartFormData(formParams, "");
}
client.ContentLength = formData.Length;
}
else
{
client.ContentType = "application/json";
}
foreach (var headerParamsItem in headerParams)
{
client.Headers.Add(headerParamsItem.Key, headerParamsItem.Value);
}
foreach (var defaultHeaderMapItem in defaultHeaderMap.Where(defaultHeaderMapItem => !headerParams.ContainsKey(defaultHeaderMapItem.Key)))
{
client.Headers.Add(defaultHeaderMapItem.Key, defaultHeaderMapItem.Value);
}
switch (method)
{
case "GET":
break;
case "POST":
case "PUT":
case "DELETE":
using (Stream requestStream = client.GetRequestStream())
{
if (formData != null)
{
requestStream.Write(formData, 0, formData.Length);
}
if (body != null)
{
var swRequestWriter = new StreamWriter(requestStream);
swRequestWriter.Write(serialize(body));
swRequestWriter.Close();
}
}
break;
default:
throw new ApiException(500, "unknown method type " + method);
}
try
{
var webResponse = (HttpWebResponse)client.GetResponse();
if (webResponse.StatusCode != HttpStatusCode.OK)
{
webResponse.Close();
throw new ApiException((int)webResponse.StatusCode, webResponse.StatusDescription);
}
if (binaryResponse)
{
using (var memoryStream = new MemoryStream())
{
CopyTo(webResponse.GetResponseStream(), memoryStream);
return memoryStream.ToArray();
}
}
else
{
using (var responseReader = new StreamReader(webResponse.GetResponseStream()))
{
var responseData = responseReader.ReadToEnd();
return responseData;
}
}
}
catch (WebException ex)
{
var response = ex.Response as HttpWebResponse;
int statusCode = 0;
if (response != null)
{
statusCode = (int)response.StatusCode;
response.Close();
}
throw new ApiException(statusCode, ex.Message);
}
}
private static byte[] GetMultipartFormData(Dictionary<string, object> postParameters, string boundary)
{
Stream formDataStream = new System.IO.MemoryStream();
bool needsCLRF = false;
if (postParameters.Count > 1)
{
foreach (var param in postParameters)
{
// Thanks to feedback from commenters, add a CRLF to allow multiple parameters to be added.
// Skip it on the first parameter, add it to subsequent parameters.
if (needsCLRF)
formDataStream.Write(Encoding.UTF8.GetBytes("\r\n"), 0, Encoding.UTF8.GetByteCount("\r\n"));
needsCLRF = true;
var fileInfo = (FileInfo)param.Value;
if (param.Value is FileInfo)
{
string postData = string.Format("--{0}\r\nContent-Disposition: form-data; name=\"{1}\"; filename=\"{1}\"\r\nContent-Type: {2}\r\n\r\n",
boundary,
param.Key,
fileInfo.MimeType);
formDataStream.Write(Encoding.UTF8.GetBytes(postData), 0, Encoding.UTF8.GetByteCount(postData));
// Write the file data directly to the Stream, rather than serializing it to a string.
formDataStream.Write((fileInfo.file as byte[]), 0, (fileInfo.file as byte[]).Length);
}
else
{
string postData = string.Format("--{0}\r\nContent-Disposition: form-data; name=\"{1}\"\r\n\r\n{2}",
boundary,
param.Key,
fileInfo.file);
formDataStream.Write(Encoding.UTF8.GetBytes(postData), 0, Encoding.UTF8.GetByteCount(postData));
}
}
// Add the end of the request. Start with a newline
string footer = "\r\n--" + boundary + "--\r\n";
formDataStream.Write(Encoding.UTF8.GetBytes(footer), 0, Encoding.UTF8.GetByteCount(footer));
}
else
{
foreach (var param in postParameters)
{
var fileInfo = (FileInfo)param.Value;
if (param.Value is FileInfo)
{
// Write the file data directly to the Stream, rather than serializing it to a string.
formDataStream.Write((fileInfo.file as byte[]), 0, (fileInfo.file as byte[]).Length);
}
else
{
string postData = (string)param.Value;
formDataStream.Write(Encoding.UTF8.GetBytes(postData), 0, Encoding.UTF8.GetByteCount(postData));
}
}
}
// Dump the Stream into a byte[]
formDataStream.Position = 0;
byte[] formData = new byte[formDataStream.Length];
formDataStream.Read(formData, 0, formData.Length);
formDataStream.Close();
return formData;
}
/**
* Overloaded method for returning the path value
* For a string value an empty value is returned if the value is null
* @param value
* @return
*/
public String ToPathValue(String value)
{
return (value == null) ? "" : value;
}
public String ToPathValue(int value)
{
return value.ToString();
}
public String ToPathValue(int? value)
{
return value.ToString();
}
public String ToPathValue(float value)
{
return value.ToString();
}
public String ToPathValue(float? value)
{
return value.ToString();
}
public String ToPathValue(long value)
{
return value.ToString();
}
public String ToPathValue(long? value)
{
return value.ToString();
}
public String ToPathValue(bool value)
{
return value.ToString();
}
public String ToPathValue(bool? value)
{
return value.ToString();
}
public String ToPathValue(double value)
{
return value.ToString();
}
public String ToPathValue(double? value)
{
return value.ToString();
}
//public String ToPathValue(Com.Aspose.Email.Model.DateTime value)
//{
// //SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");
// //return format.format(value);
// return value.ToString();
//}
}
}
| |
// Copyright (c) 2015 Alachisoft
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Diagnostics;
using Alachisoft.NCache.Util;
using System.Threading;
using Alachisoft.NCache.Common.Interop;
namespace Alachisoft.NCache.Web.Statistics
{
/// <summary>
/// Summary description for PerfStatsCollector.
/// </summary>
internal class PerfStatsCollector : IDisposable
{
/// <summary> Instance name. </summary>
private string _instanceName;
/// <summary> Port number. </summary>
private string _port;
/// <summary> performance counter for cache requests per second by the client. </summary>
private PerformanceCounter _pcClientRequestsPerSec = null;
/// <summary> performance counter for cache responses per second by the client. </summary>
private PerformanceCounter _pcClientResponsesPerSec = null;
/// <summary> performance counter for cache requests per second by all the clients. </summary>
private PerformanceCounter _pcTotalClientRequestsPerSec = null;
/// <summary> performance counter for cache responses per second by the all clients. </summary>
private PerformanceCounter _pcTotalClientResponsesPerSec = null;
private bool _isEnabled = false;
/// <summary> Category name of counter performance data.</summary>
private const string PC_CATEGORY = "NCache Express";
/// <summary>
/// Constructor
/// </summary>
/// <param name="instanceName"></param>
/// <param name="port"></param>
public PerfStatsCollector(string instanceName, int port)
{
_port = ":" + port.ToString();
_instanceName = instanceName;
}
/// <summary>
/// Returns true if the current user has the rights to read/write to performance counters
/// under the category of object cache.
/// </summary>
public string InstanceName
{
get { return _instanceName; }
set { _instanceName = value; }
}
/// <summary>
/// Returns true if the current user has the rights to read/write to performance counters
/// under the category of object cache.
/// </summary>
public bool UserHasAccessRights
{
get
{
try
{
PerformanceCounterPermission permissions = new
PerformanceCounterPermission(PerformanceCounterPermissionAccess.Instrument,
".", PC_CATEGORY);
permissions.Demand();
if(!PerformanceCounterCategory.Exists(PC_CATEGORY, "."))
{
return false;
}
}
catch(Exception e)
{
return false;
}
return true;
}
}
#region / --- IDisposable --- /
/// <summary>
/// Performs application-defined tasks associated with freeing, releasing, or
/// resetting unmanaged resources.
/// </summary>
public void Dispose()
{
lock (this)
{
if (_pcClientRequestsPerSec != null)
{
_pcClientRequestsPerSec.RemoveInstance();
_pcClientRequestsPerSec.Dispose();
_pcClientRequestsPerSec = null;
}
if (_pcClientResponsesPerSec != null)
{
_pcClientResponsesPerSec.RemoveInstance();
_pcClientResponsesPerSec.Dispose();
_pcClientResponsesPerSec = null;
}
if (_pcTotalClientRequestsPerSec != null)
{
_pcTotalClientRequestsPerSec.RemoveInstance();
_pcTotalClientRequestsPerSec.Dispose();
_pcTotalClientRequestsPerSec = null;
}
if (_pcTotalClientResponsesPerSec != null)
{
_pcTotalClientResponsesPerSec.RemoveInstance();
_pcTotalClientResponsesPerSec.Dispose();
_pcTotalClientResponsesPerSec = null;
}
}
}
#endregion
#region / --- Initialization --- /
/// <summary>
/// Initializes the counter instances and category.
/// </summary>
public void InitializePerfCounters()
{
try
{
if (!UserHasAccessRights)
return;
lock (this)
{
_pcClientRequestsPerSec = new PerformanceCounter(PC_CATEGORY, "Client Requests/sec", _instanceName, false);
_pcClientResponsesPerSec = new PerformanceCounter(PC_CATEGORY, "Client Responses/sec", _instanceName, false);
_pcTotalClientRequestsPerSec = new PerformanceCounter(PC_CATEGORY, "Client Requests/sec", "_Total_ client stats", false);
_pcTotalClientResponsesPerSec = new PerformanceCounter(PC_CATEGORY, "Client Responses/sec", "_Total_ client stats", false);
}
_isEnabled = true;
}
catch (Exception e)
{
}
}
#endregion
/// <summary>
/// Gets or Sets the value indicating whether Performance Stats collection is enabled or not.
/// On initialize Performance Colloection is enabled.
/// </summary>
public bool IsEnabled
{
get { return _isEnabled; }
set { _isEnabled = value; }
}
/// <summary>
/// Increment the performance counter for Requests Per second by client.
/// </summary>
public void IncrementClientRequestsPerSecStats(long requests)
{
if (_pcClientRequestsPerSec != null)
{
lock (_pcClientRequestsPerSec)
{
_pcClientRequestsPerSec.IncrementBy(requests);
IncrementTotalClientRequestsPerSecStats(requests);
}
}
}
/// <summary>
/// Increment the performance counter for Responses received Per second by client.
/// </summary>
public void IncrementClientResponsesPerSecStats(long responses)
{
if (_pcClientResponsesPerSec != null)
{
lock (_pcClientResponsesPerSec)
{
_pcClientResponsesPerSec.IncrementBy(responses);
IncrementTotalClientResponsesPerSecStats(responses);
}
}
}
/// <summary>
/// Increment the performance counter for Requests Per second by all the clients.
/// </summary>
internal void IncrementTotalClientRequestsPerSecStats(long requests)
{
if (_pcTotalClientRequestsPerSec != null)
{
lock (_pcTotalClientRequestsPerSec)
{
_pcTotalClientRequestsPerSec.IncrementBy(requests);
}
}
}
/// <summary>
/// Increment the performance counter for Responses received Per second by all the clients.
/// </summary>
internal void IncrementTotalClientResponsesPerSecStats(long responses)
{
if (_pcTotalClientResponsesPerSec != null)
{
lock (_pcTotalClientResponsesPerSec)
{
_pcTotalClientResponsesPerSec.IncrementBy(responses);
}
}
}
}
}
| |
using System;
using UnityEngine;
using UnityStandardAssets.CrossPlatformInput;
[RequireComponent(typeof(Rigidbody))]
[RequireComponent(typeof(CapsuleCollider))]
public class RigidbodyFirstPersonController : MonoBehaviour
{
[Serializable]
public class MovementSettings
{
public float ForwardSpeed = 8.0f; // Speed when walking forward
public float BackwardSpeed = 4.0f; // Speed when walking backwards
public float StrafeSpeed = 4.0f; // Speed when walking sideways
public float RunMultiplier = 2.0f; // Speed when sprinting
public KeyCode RunKey = KeyCode.LeftShift;
public float JumpForce = 30f;
public AnimationCurve SlopeCurveModifier = new AnimationCurve(new Keyframe(-90.0f, 1.0f), new Keyframe(0.0f, 1.0f), new Keyframe(90.0f, 0.0f));
[HideInInspector]
public float CurrentTargetSpeed = 8f;
#if !MOBILE_INPUT
private bool m_Running;
#endif
public void UpdateDesiredTargetSpeed(Vector2 input)
{
if (input == Vector2.zero) return;
if (input.x > 0 || input.x < 0)
{
//strafe
CurrentTargetSpeed = StrafeSpeed;
}
if (input.y < 0)
{
//backwards
CurrentTargetSpeed = BackwardSpeed;
}
if (input.y > 0)
{
//forwards
//handled last as if strafing and moving forward at the same time forwards speed should take precedence
CurrentTargetSpeed = ForwardSpeed;
}
#if !MOBILE_INPUT
if (Input.GetKey(RunKey))
{
CurrentTargetSpeed *= RunMultiplier;
m_Running = true;
}
else
{
m_Running = false;
}
#endif
}
#if !MOBILE_INPUT
public bool Running
{
get { return m_Running; }
}
#endif
}
[Serializable]
public class AdvancedSettings
{
public float groundCheckDistance = 0.01f; // distance for checking if the controller is grounded ( 0.01f seems to work best for this )
public float stickToGroundHelperDistance = 0.5f; // stops the character
public float slowDownRate = 20f; // rate at which the controller comes to a stop when there is no input
public bool airControl; // can the user control the direction that is being moved in the air
}
public Camera cam;
public MovementSettings movementSettings = new MovementSettings();
public MouseLook mouseLook = new MouseLook();
public AdvancedSettings advancedSettings = new AdvancedSettings();
private Monster monster;
private Player player;
private Rigidbody m_RigidBody;
private CapsuleCollider m_Capsule;
private float m_YRotation;
private Vector3 m_GroundContactNormal;
private bool m_Jump, m_PreviouslyGrounded, m_Jumping, m_IsGrounded;
public Vector3 Velocity
{
get { return m_RigidBody.velocity; }
}
public bool Grounded
{
get { return m_IsGrounded; }
}
public bool Jumping
{
get { return m_Jumping; }
}
public bool Running
{
get
{
#if !MOBILE_INPUT
return movementSettings.Running;
#else
return false;
#endif
}
}
private void Start()
{
monster = GetComponent<Monster>();
player = GameObject.FindObjectOfType<Player>();
m_RigidBody = GetComponent<Rigidbody>();
m_Capsule = GetComponent<CapsuleCollider>();
mouseLook.Init(transform, cam.transform);
}
private void Update()
{
RotateView();
if (CrossPlatformInputManager.GetButtonDown("Jump") && !m_Jump)
{
m_Jump = true;
}
}
private void FixedUpdate()
{
GroundCheck();
Vector2 input = GetInput();
if ((Mathf.Abs(input.x) > float.Epsilon || Mathf.Abs(input.y) > float.Epsilon) && (advancedSettings.airControl || m_IsGrounded))
{
// always move along the camera forward as it is the direction that it being aimed at
Vector3 desiredMove = cam.transform.forward * input.y + cam.transform.right * input.x;
desiredMove = Vector3.ProjectOnPlane(desiredMove, m_GroundContactNormal).normalized;
desiredMove.x = desiredMove.x * movementSettings.CurrentTargetSpeed;
desiredMove.z = desiredMove.z * movementSettings.CurrentTargetSpeed;
desiredMove.y = desiredMove.y * movementSettings.CurrentTargetSpeed;
if (m_RigidBody.velocity.sqrMagnitude <
(movementSettings.CurrentTargetSpeed * movementSettings.CurrentTargetSpeed))
{
m_RigidBody.AddForce(desiredMove * SlopeMultiplier(), ForceMode.Impulse);
}
}
if (m_IsGrounded)
{
m_RigidBody.drag = 5f;
if (!m_Jumping && Mathf.Abs(input.x) < float.Epsilon && Mathf.Abs(input.y) < float.Epsilon && m_RigidBody.velocity.magnitude < 1f)
{
m_RigidBody.Sleep();
}
}
else
{
m_RigidBody.drag = 0f;
if (m_PreviouslyGrounded && !m_Jumping)
{
StickToGroundHelper();
}
}
m_Jump = false;
}
private float SlopeMultiplier()
{
float angle = Vector3.Angle(m_GroundContactNormal, Vector3.up);
return movementSettings.SlopeCurveModifier.Evaluate(angle);
}
private void StickToGroundHelper()
{
RaycastHit hitInfo;
if (Physics.SphereCast(transform.position, m_Capsule.radius, Vector3.down, out hitInfo,
((m_Capsule.height / 2f) - m_Capsule.radius) +
advancedSettings.stickToGroundHelperDistance))
{
if (Mathf.Abs(Vector3.Angle(hitInfo.normal, Vector3.up)) < 85f)
{
m_RigidBody.velocity = Vector3.ProjectOnPlane(m_RigidBody.velocity, hitInfo.normal);
}
}
}
private Vector2 GetInput()
{
Vector2 input = new Vector2
{
x = CrossPlatformInputManager.GetAxis("Horizontal"),
y = CrossPlatformInputManager.GetAxis("Vertical")
};
movementSettings.UpdateDesiredTargetSpeed(input);
return input;
}
private void RotateView()
{
//avoids the mouse looking if the game is effectively paused
if (Mathf.Abs(Time.timeScale) < float.Epsilon) return;
// get the rotation before it's changed
float oldYRotation = transform.eulerAngles.y;
mouseLook.LookRotation(transform, cam.transform);
if (m_IsGrounded || advancedSettings.airControl)
{
// Rotate the rigidbody velocity to match the new direction that the character is looking
Quaternion velRotation = Quaternion.AngleAxis(transform.eulerAngles.y - oldYRotation, Vector3.up);
m_RigidBody.velocity = velRotation * m_RigidBody.velocity;
}
}
/// sphere cast down just beyond the bottom of the capsule to see if the capsule is colliding round the bottom
private void GroundCheck()
{
m_PreviouslyGrounded = m_IsGrounded;
RaycastHit hitInfo;
if (Physics.SphereCast(transform.position, m_Capsule.radius, Vector3.down, out hitInfo,
((m_Capsule.height / 2f) - m_Capsule.radius) + advancedSettings.groundCheckDistance))
{
m_IsGrounded = true;
m_GroundContactNormal = hitInfo.normal;
}
else
{
m_IsGrounded = false;
m_GroundContactNormal = Vector3.up;
}
if (!m_PreviouslyGrounded && m_IsGrounded && m_Jumping)
{
m_Jumping = false;
}
}
}
| |
using System;
using Microsoft.SPOT;
using Grommet.Ext;
namespace Microsoft.Xna.Framework.Graphics
{
public struct Color
{
private uint packedValue;
public uint PackedValue
{
get { return packedValue; }
set { packedValue = value; }
}
private Color(uint value)
{
this.packedValue = value;
}
public Color(byte r, byte g, byte b)
{
this.packedValue = ToUInt32(r, g, b, 255);
}
public Color(byte r, byte g, byte b, byte a)
{
this.packedValue = ToUInt32(r, g, b, a);
}
public byte R
{
get
{
return (byte)(PackedValue >> 16);
}
set
{
PackedValue = (PackedValue & 0xff00ffff) | ((uint)(value << 16));
}
}
public byte G
{
get
{
return (byte)(PackedValue >> 8);
}
set
{
PackedValue = (PackedValue & 0xffff00ff) | ((uint)(value << 8));
}
}
public byte B
{
get
{
return (byte)PackedValue;
}
set
{
PackedValue = (PackedValue & 0xffffff00) | value;
}
}
public byte A
{
get
{
return (byte)(PackedValue >> 24);
}
set
{
PackedValue = (PackedValue & 0xffffff) | ((uint)(value << 024));
}
}
private static uint ToUInt32(byte r, byte g, byte b, byte a)
{
return (uint)((a << 24) | (r << 16) | (g << 8) | b);
}
public override string ToString()
{
return string.Concat("{{R:" + R + " G:" + G + " B:" + B + " A:" + A + "}}");
}
public override int GetHashCode()
{
return PackedValue.GetHashCode();
}
public override bool Equals(object obj)
{
return ((obj is Color) && this.Equals((Color)obj));
}
public bool Equals(Color other)
{
return PackedValue.Equals(other.PackedValue);
}
public static bool operator ==(Color a, Color b)
{
return a.Equals(b);
}
public static bool operator !=(Color a, Color b)
{
return !a.Equals(b);
}
public static Color TransparentBlack
{
get
{
return new Color(0);
}
}
public static Color TransparentWhite
{
get
{
return new Color(0xffffff);
}
}
public static Color AliceBlue
{
get
{
return new Color(0xfff0f8ff);
}
}
public static Color AntiqueWhite
{
get
{
return new Color(0xfffaebd7);
}
}
public static Color Aqua
{
get
{
return new Color(0xff00ffff);
}
}
public static Color Aquamarine
{
get
{
return new Color(0xff7fffd4);
}
}
public static Color Azure
{
get
{
return new Color(0xfff0ffff);
}
}
public static Color Beige
{
get
{
return new Color(0xfff5f5dc);
}
}
public static Color Bisque
{
get
{
return new Color(0xffffe4c4);
}
}
public static Color Black
{
get
{
return new Color(0xff000000);
}
}
public static Color BlanchedAlmond
{
get
{
return new Color(0xffffebcd);
}
}
public static Color Blue
{
get
{
return new Color(0xff0000ff);
}
}
public static Color BlueViolet
{
get
{
return new Color(0xff8a2be2);
}
}
public static Color Brown
{
get
{
return new Color(0xffa52a2a);
}
}
public static Color BurlyWood
{
get
{
return new Color(0xffdeb887);
}
}
public static Color CadetBlue
{
get
{
return new Color(0xff5f9ea0);
}
}
public static Color Chartreuse
{
get
{
return new Color(0xff7fff00);
}
}
public static Color Chocolate
{
get
{
return new Color(0xffd2691e);
}
}
public static Color Coral
{
get
{
return new Color(0xffff7f50);
}
}
public static Color CornflowerBlue
{
get
{
return new Color(0xff6495ed);
}
}
public static Color Cornsilk
{
get
{
return new Color(0xfffff8dc);
}
}
public static Color Crimson
{
get
{
return new Color(0xffdc143c);
}
}
public static Color Cyan
{
get
{
return new Color(0xff00ffff);
}
}
public static Color DarkBlue
{
get
{
return new Color(0xff00008b);
}
}
public static Color DarkCyan
{
get
{
return new Color(0xff008b8b);
}
}
public static Color DarkGoldenrod
{
get
{
return new Color(0xffb8860b);
}
}
public static Color DarkGray
{
get
{
return new Color(0xffa9a9a9);
}
}
public static Color DarkGreen
{
get
{
return new Color(0xff006400);
}
}
public static Color DarkKhaki
{
get
{
return new Color(0xffbdb76b);
}
}
public static Color DarkMagenta
{
get
{
return new Color(0xff8b008b);
}
}
public static Color DarkOliveGreen
{
get
{
return new Color(0xff556b2f);
}
}
public static Color DarkOrange
{
get
{
return new Color(0xffff8c00);
}
}
public static Color DarkOrchid
{
get
{
return new Color(0xff9932cc);
}
}
public static Color DarkRed
{
get
{
return new Color(0xff8b0000);
}
}
public static Color DarkSalmon
{
get
{
return new Color(0xffe9967a);
}
}
public static Color DarkSeaGreen
{
get
{
return new Color(0xff8fbc8b);
}
}
public static Color DarkSlateBlue
{
get
{
return new Color(0xff483d8b);
}
}
public static Color DarkSlateGray
{
get
{
return new Color(0xff2f4f4f);
}
}
public static Color DarkTurquoise
{
get
{
return new Color(0xff00ced1);
}
}
public static Color DarkViolet
{
get
{
return new Color(0xff9400d3);
}
}
public static Color DeepPink
{
get
{
return new Color(0xffff1493);
}
}
public static Color DeepSkyBlue
{
get
{
return new Color(0xff00bfff);
}
}
public static Color DimGray
{
get
{
return new Color(0xff696969);
}
}
public static Color DodgerBlue
{
get
{
return new Color(0xff1e90ff);
}
}
public static Color Firebrick
{
get
{
return new Color(0xffb22222);
}
}
public static Color FloralWhite
{
get
{
return new Color(0xfffffaf0);
}
}
public static Color ForestGreen
{
get
{
return new Color(0xff228b22);
}
}
public static Color Fuchsia
{
get
{
return new Color(0xffff00ff);
}
}
public static Color Gainsboro
{
get
{
return new Color(0xffdcdcdc);
}
}
public static Color GhostWhite
{
get
{
return new Color(0xfff8f8ff);
}
}
public static Color Gold
{
get
{
return new Color(0xffffd700);
}
}
public static Color Goldenrod
{
get
{
return new Color(0xffdaa520);
}
}
public static Color Gray
{
get
{
return new Color(0xff808080);
}
}
public static Color Green
{
get
{
return new Color(0xff008000);
}
}
public static Color GreenYellow
{
get
{
return new Color(0xffadff2f);
}
}
public static Color Honeydew
{
get
{
return new Color(0xfff0fff0);
}
}
public static Color HotPink
{
get
{
return new Color(0xffff69b4);
}
}
public static Color IndianRed
{
get
{
return new Color(0xffcd5c5c);
}
}
public static Color Indigo
{
get
{
return new Color(0xff4b0082);
}
}
public static Color Ivory
{
get
{
return new Color(0xfffffff0);
}
}
public static Color Khaki
{
get
{
return new Color(0xfff0e68c);
}
}
public static Color Lavender
{
get
{
return new Color(0xffe6e6fa);
}
}
public static Color LavenderBlush
{
get
{
return new Color(0xfffff0f5);
}
}
public static Color LawnGreen
{
get
{
return new Color(0xff7cfc00);
}
}
public static Color LemonChiffon
{
get
{
return new Color(0xfffffacd);
}
}
public static Color LightBlue
{
get
{
return new Color(0xffadd8e6);
}
}
public static Color LightCoral
{
get
{
return new Color(0xfff08080);
}
}
public static Color LightCyan
{
get
{
return new Color(0xffe0ffff);
}
}
public static Color LightGoldenrodYellow
{
get
{
return new Color(0xfffafad2);
}
}
public static Color LightGreen
{
get
{
return new Color(0xff90ee90);
}
}
public static Color LightGray
{
get
{
return new Color(0xffd3d3d3);
}
}
public static Color LightPink
{
get
{
return new Color(0xffffb6c1);
}
}
public static Color LightSalmon
{
get
{
return new Color(0xffffa07a);
}
}
public static Color LightSeaGreen
{
get
{
return new Color(0xff20b2aa);
}
}
public static Color LightSkyBlue
{
get
{
return new Color(0xff87cefa);
}
}
public static Color LightSlateGray
{
get
{
return new Color(0xff778899);
}
}
public static Color LightSteelBlue
{
get
{
return new Color(0xffb0c4de);
}
}
public static Color LightYellow
{
get
{
return new Color(0xffffffe0);
}
}
public static Color Lime
{
get
{
return new Color(0xff00ff00);
}
}
public static Color LimeGreen
{
get
{
return new Color(0xff32cd32);
}
}
public static Color Linen
{
get
{
return new Color(0xfffaf0e6);
}
}
public static Color Magenta
{
get
{
return new Color(0xffff00ff);
}
}
public static Color Maroon
{
get
{
return new Color(0xff800000);
}
}
public static Color MediumAquamarine
{
get
{
return new Color(0xff66cdaa);
}
}
public static Color MediumBlue
{
get
{
return new Color(0xff0000cd);
}
}
public static Color MediumOrchid
{
get
{
return new Color(0xffba55d3);
}
}
public static Color MediumPurple
{
get
{
return new Color(0xff9370db);
}
}
public static Color MediumSeaGreen
{
get
{
return new Color(0xff3cb371);
}
}
public static Color MediumSlateBlue
{
get
{
return new Color(0xff7b68ee);
}
}
public static Color MediumSpringGreen
{
get
{
return new Color(0xff00fa9a);
}
}
public static Color MediumTurquoise
{
get
{
return new Color(0xff48d1cc);
}
}
public static Color MediumVioletRed
{
get
{
return new Color(0xffc71585);
}
}
public static Color MidnightBlue
{
get
{
return new Color(0xff191970);
}
}
public static Color MintCream
{
get
{
return new Color(0xfff5fffa);
}
}
public static Color MistyRose
{
get
{
return new Color(0xffffe4e1);
}
}
public static Color Moccasin
{
get
{
return new Color(0xffffe4b5);
}
}
public static Color NavajoWhite
{
get
{
return new Color(0xffffdead);
}
}
public static Color Navy
{
get
{
return new Color(0xff000080);
}
}
public static Color OldLace
{
get
{
return new Color(0xfffdf5e6);
}
}
public static Color Olive
{
get
{
return new Color(0xff808000);
}
}
public static Color OliveDrab
{
get
{
return new Color(0xff6b8e23);
}
}
public static Color Orange
{
get
{
return new Color(0xffffa500);
}
}
public static Color OrangeRed
{
get
{
return new Color(0xffff4500);
}
}
public static Color Orchid
{
get
{
return new Color(0xffda70d6);
}
}
public static Color PaleGoldenrod
{
get
{
return new Color(0xffeee8aa);
}
}
public static Color PaleGreen
{
get
{
return new Color(0xff98fb98);
}
}
public static Color PaleTurquoise
{
get
{
return new Color(0xffafeeee);
}
}
public static Color PaleVioletRed
{
get
{
return new Color(0xffdb7093);
}
}
public static Color PapayaWhip
{
get
{
return new Color(0xffffefd5);
}
}
public static Color PeachPuff
{
get
{
return new Color(0xffffdab9);
}
}
public static Color Peru
{
get
{
return new Color(0xffcd853f);
}
}
public static Color Pink
{
get
{
return new Color(0xffffc0cb);
}
}
public static Color Plum
{
get
{
return new Color(0xffdda0dd);
}
}
public static Color PowderBlue
{
get
{
return new Color(0xffb0e0e6);
}
}
public static Color Purple
{
get
{
return new Color(0xff800080);
}
}
public static Color Red
{
get
{
return new Color(0xffff0000);
}
}
public static Color RosyBrown
{
get
{
return new Color(0xffbc8f8f);
}
}
public static Color RoyalBlue
{
get
{
return new Color(0xff4169e1);
}
}
public static Color SaddleBrown
{
get
{
return new Color(0xff8b4513);
}
}
public static Color Salmon
{
get
{
return new Color(0xfffa8072);
}
}
public static Color SandyBrown
{
get
{
return new Color(0xfff4a460);
}
}
public static Color SeaGreen
{
get
{
return new Color(0xff2e8b57);
}
}
public static Color SeaShell
{
get
{
return new Color(0xfffff5ee);
}
}
public static Color Sienna
{
get
{
return new Color(0xffa0522d);
}
}
public static Color Silver
{
get
{
return new Color(0xffc0c0c0);
}
}
public static Color SkyBlue
{
get
{
return new Color(0xff87ceeb);
}
}
public static Color SlateBlue
{
get
{
return new Color(0xff6a5acd);
}
}
public static Color SlateGray
{
get
{
return new Color(0xff708090);
}
}
public static Color Snow
{
get
{
return new Color(0xfffffafa);
}
}
public static Color SpringGreen
{
get
{
return new Color(0xff00ff7f);
}
}
public static Color SteelBlue
{
get
{
return new Color(0xff4682b4);
}
}
public static Color Tan
{
get
{
return new Color(0xffd2b48c);
}
}
public static Color Teal
{
get
{
return new Color(0xff008080);
}
}
public static Color Thistle
{
get
{
return new Color(0xffd8bfd8);
}
}
public static Color Tomato
{
get
{
return new Color(0xffff6347);
}
}
public static Color Turquoise
{
get
{
return new Color(0xff40e0d0);
}
}
public static Color Violet
{
get
{
return new Color(0xffee82ee);
}
}
public static Color Wheat
{
get
{
return new Color(0xfff5deb3);
}
}
public static Color White
{
get
{
return new Color(uint.MaxValue);
}
}
public static Color WhiteSmoke
{
get
{
return new Color(0xfff5f5f5);
}
}
public static Color Yellow
{
get
{
return new Color(0xffffff00);
}
}
public static Color YellowGreen
{
get
{
return new Color(0xff9acd32);
}
}
}
}
| |
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER
// EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE,
// FITNESS FOR A PARTICULAR PURPOSE, MERCHANTABLITY OR NON-INFRINGEMENT.
//
// See the Apache Version 2.0 License for specific language governing permissions and limitations under the License.
namespace Microsoft.Spectrum.Import.Service
{
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Configuration;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net;
using System.ServiceModel;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Timers;
using System.Transactions;
using Microsoft.Spectrum.Common;
using Microsoft.Spectrum.Common.Azure;
using Microsoft.Spectrum.IO.MeasurementStationSettings;
using Microsoft.Spectrum.MeasurementStation.Client;
using Microsoft.Spectrum.Storage.Blob;
using Microsoft.WindowsAzure;
using Microsoft.WindowsAzure.Storage;
using Microsoft.WindowsAzure.Storage.Blob;
using WindowsAzure.Storage.Auth;
internal class ImporterAgent : IDisposable
{
private readonly ILogger logger;
private readonly IConfigurationSource configurationSource;
private HealthReporter healthReport;
private DirectoryWatcherConfiguration configuration;
private MeasurementStationConfigurationEndToEnd measurementStationConfiguration = null;
private SettingsConfigurationSection settingsConfiguration;
private bool continueMonitoring = true;
private Task importerThread;
private Task healthReportThread;
public ImporterAgent(ILogger logger, IConfigurationSource configurationSource)
{
if (logger == null)
{
throw new ArgumentNullException("logger");
}
if (configurationSource == null)
{
throw new ArgumentNullException("configurationSource");
}
this.logger = logger;
this.configurationSource = configurationSource;
this.settingsConfiguration = (SettingsConfigurationSection)ConfigurationManager.GetSection("SettingsConfiguration");
if (File.Exists(this.settingsConfiguration.MeasurementStationConfigurationFileFullPath))
{
try
{
using (Stream input = File.OpenRead(this.settingsConfiguration.MeasurementStationConfigurationFileFullPath))
{
this.measurementStationConfiguration = MeasurementStationConfigurationEndToEnd.Read(input);
}
}
catch
{
// There is an issue with the configuration file, so delete it and we can rewrite another one
File.Delete(this.settingsConfiguration.MeasurementStationConfigurationFileFullPath);
this.measurementStationConfiguration = null;
}
}
}
#region IImporterAgent Implemenation
public void StartMonitoring()
{
// Read the configuration and make sure that it is correct. We should fail fast if there are problems.
this.configuration = this.configurationSource.GetConfiguration();
IList<ValidationResult> validationResults = ValidationHelper.Validate(this.configuration);
if (validationResults.Any())
{
throw new ImporterConfigurationException("There was a problem with the specified configuration file.", validationResults);
}
this.logger.Log(TraceEventType.Information, LoggingMessageId.ImporterAgent, "Configuration validated successfully.");
this.logger.Log(TraceEventType.Information, LoggingMessageId.ImporterAgent, "Importer Starting");
this.healthReport = new HealthReporter(configuration, settingsConfiguration, logger);
Action action = null;
action = this.UploadWatchFiles;
Action healthReportAction = null;
healthReportAction = healthReport.AutoHealthReporterThread;
this.importerThread = Task.Factory.StartNew(action);
this.healthReportThread = Task.Factory.StartNew(healthReportAction);
}
public void StopMonitoring()
{
try
{
this.continueMonitoring = false;
this.importerThread.Wait();
this.healthReport.ShutDown();
this.healthReportThread.Wait();
}
catch (Exception ex)
{
this.logger.Log(TraceEventType.Error, LoggingMessageId.ImporterAgent, ex.ToString());
}
this.logger.Log(TraceEventType.Information, LoggingMessageId.ImporterAgent, "Importer Stopped");
}
#endregion
private void UploadWatchFiles()
{
while (this.continueMonitoring)
{
try
{
using (MeasurementStationServiceChannelFactory channelFactory = new MeasurementStationServiceChannelFactory())
{
IMeasurementStationServiceChannel channel = channelFactory.CreateChannel(this.configuration.MeasurementStationServiceUri);
int stationAvailability = channel.GetStationAvailability(this.configuration.StationAccessId);
// NOTE: Following is to prevent data uploading for the decommissioned station.
if (Microsoft.Spectrum.Storage.Enums.StationAvailability.Decommissioned == (Microsoft.Spectrum.Storage.Enums.StationAvailability)stationAvailability)
{
string message = string.Format("Station {0} has been decomissioned. With the decomissioned status no data files will be pushed to Cloud.", this.configuration.StationAccessId);
this.logger.Log(TraceEventType.Information, LoggingMessageId.ImporterAgent, message);
continue;
}
// Get the list of pathnames for all existing files that need to be processed
string[] existingWatchFiles = Directory.GetFiles(this.configuration.WatchDirectory, this.configuration.WatchDirectoryFileExtension, SearchOption.TopDirectoryOnly);
bool gotUpdatedSettings;
foreach (string watchFile in existingWatchFiles)
{
bool fileWritten = false;
try
{
// This call will throw an IOException if the file is current being written to
using (var file = File.Open(watchFile, FileMode.Open, FileAccess.Read, FileShare.None))
{
fileWritten = true;
}
}
catch (IOException)
{
// We couldn't write to this file, so it must still be open by someone else
}
if (!fileWritten)
{
// skip this file until file it is completely available
continue;
}
Stream scanFileStream = null;
bool uploadSuccess = true;
bool notifySuccess = false;
string blobUri = string.Empty;
string error = string.Empty;
try
{
string filename = Path.GetFileName(watchFile);
scanFileStream = File.OpenRead(watchFile);
// Check to see if there is any changes to the settings
string storageAccountName = string.Empty;
string storageAccessKey = string.Empty;
byte[] measurementStationConfigurationUpdate = null;
gotUpdatedSettings = false;
while (!gotUpdatedSettings)
{
try
{
channel.GetUpdatedSettings(this.configuration.StationAccessId, out storageAccountName, out storageAccessKey, out measurementStationConfigurationUpdate);
gotUpdatedSettings = true;
}
catch (WebException ex)
{
this.logger.Log(TraceEventType.Error, LoggingMessageId.ImporterAgent, ex.ToString());
}
}
MeasurementStationConfigurationEndToEnd settingsUpdated;
using (MemoryStream stream = new MemoryStream(measurementStationConfigurationUpdate))
{
settingsUpdated = MeasurementStationConfigurationEndToEnd.Read(stream);
}
// if the configuration has been updated, then update the setting file
if (this.measurementStationConfiguration == null ||
this.measurementStationConfiguration.LastModifiedTime < settingsUpdated.LastModifiedTime ||
!File.Exists(this.settingsConfiguration.MeasurementStationConfigurationFileFullPath))
{
this.measurementStationConfiguration = settingsUpdated;
// Write out to a file so that the scanner can get the updated settings as well
using (Stream output = File.OpenWrite(this.settingsConfiguration.MeasurementStationConfigurationFileFullPath))
{
this.measurementStationConfiguration.Write(output);
if (this.healthReport != null)
{
this.healthReport.UsrpScannerConfigurationChanged(this.measurementStationConfiguration);
}
}
}
AzureSpectrumBlobStorage cloudStorage = new AzureSpectrumBlobStorage(null, null, storageAccountName + storageAccessKey);
blobUri = cloudStorage.UploadFile(scanFileStream, filename, this.configuration.UploadRetryCount, this.configuration.ServerUploadTimeout, this.configuration.RetryDeltaBackoff);
}
catch (Exception e)
{
uploadSuccess = false;
this.logger.Log(TraceEventType.Error, LoggingMessageId.ImporterAgent, e.ToString());
}
finally
{
if (scanFileStream != null)
{
scanFileStream.Dispose();
}
}
Exception mostRecentEx = null;
for (int i = 0; (i < this.configuration.UploadRetryCount) && (notifySuccess == false); i++)
{
// Do the notification on success or failure
mostRecentEx = null;
try
{
channel.ScanFileUploaded(this.configuration.StationAccessId, blobUri, uploadSuccess);
notifySuccess = true;
}
catch (ChannelTerminatedException cte)
{
mostRecentEx = cte;
System.Threading.Thread.Sleep(TimeSpan.FromMinutes(this.configuration.RetryDeltaBackoff));
}
catch (EndpointNotFoundException enfe)
{
mostRecentEx = enfe;
System.Threading.Thread.Sleep(TimeSpan.FromMinutes(this.configuration.RetryDeltaBackoff));
}
catch (ServerTooBusyException stbe)
{
mostRecentEx = stbe;
System.Threading.Thread.Sleep(TimeSpan.FromMinutes(this.configuration.RetryDeltaBackoff));
}
catch (Exception ex)
{
mostRecentEx = ex;
System.Threading.Thread.Sleep(TimeSpan.FromMinutes(this.configuration.RetryDeltaBackoff));
}
}
if (mostRecentEx != null)
{
this.logger.Log(TraceEventType.Error, LoggingMessageId.ImporterAgent, mostRecentEx.ToString());
}
if (uploadSuccess && notifySuccess)
{
this.OnFileInJobCompleted(watchFile, blobUri);
}
else
{
this.OnFileInJobFailed(watchFile, error);
}
}
if (channel != null)
{
channel.CloseOrAbort();
}
}
Thread.Sleep(5000);
}
catch (Exception ex)
{
// make sure that an exception doesn't cause us to stop uploading files, we need to keep trying
this.logger.Log(TraceEventType.Error, LoggingMessageId.ImporterAgent, ex.ToString());
}
}
}
private void OnFileInJobFailed(string failedFilePath, string error)
{
if (!string.IsNullOrWhiteSpace(failedFilePath) && File.Exists(failedFilePath))
{
string errorPath = FileHelper.GetUniqueFileName(this.configuration.InvalidFilesDirectory, failedFilePath);
this.logger.Log(TraceEventType.Error, LoggingMessageId.MessageBufferEventId, string.Format(CultureInfo.InvariantCulture, "Error occurred while uploading the file {0} {1} Exception Details: {2}", errorPath, Environment.NewLine, error));
using (TransactionScope scope = new TransactionScope())
{
TransactedFileHelper.MoveFileTransacted(failedFilePath, errorPath);
scope.Complete();
}
}
}
private void OnFileInJobCompleted(string scanFilePath, string uploadedFilePath)
{
if (!string.IsNullOrWhiteSpace(uploadedFilePath) && File.Exists(scanFilePath))
{
try
{
File.Delete(scanFilePath);
}
catch (IOException exception)
{
this.logger.Log(TraceEventType.Error, LoggingMessageId.ImporterAgent, exception.ToString());
}
}
}
public void Dispose()
{
this.Dispose(true);
GC.SuppressFinalize(this);
}
protected void Dispose(bool isDisposing)
{
if (isDisposing)
{
if (this.healthReport != null)
{
this.healthReport.Dispose();
}
}
}
}
}
| |
//
// Copyright (c) 2008-2011, Kenneth Bell
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
namespace DiscUtils.Iso9660
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
/// <summary>
/// Class that creates ISO images.
/// </summary>
/// <example>
/// <code>
/// CDBuilder builder = new CDBuilder();
/// builder.VolumeIdentifier = "MYISO";
/// builder.UseJoliet = true;
/// builder.AddFile("Hello.txt", Encoding.ASCII.GetBytes("hello world!"));
/// builder.Build(@"C:\TEMP\myiso.iso");
/// </code>
/// </example>
public sealed class CDBuilder : StreamBuilder
{
private const long DiskStart = 0x8000;
private List<BuildFileInfo> _files;
private List<BuildDirectoryInfo> _dirs;
private BuildDirectoryInfo _rootDirectory;
private BootInitialEntry _bootEntry;
private Stream _bootImage;
private BuildParameters _buildParams;
/// <summary>
/// Initializes a new instance of the CDBuilder class.
/// </summary>
public CDBuilder()
{
_files = new List<BuildFileInfo>();
_dirs = new List<BuildDirectoryInfo>();
_rootDirectory = new BuildDirectoryInfo("\0", null);
_dirs.Add(_rootDirectory);
_buildParams = new BuildParameters();
_buildParams.UseJoliet = true;
}
/// <summary>
/// Gets or sets the Volume Identifier for the ISO file.
/// </summary>
/// <remarks>
/// Must be a valid identifier, i.e. max 32 characters in the range A-Z, 0-9 or _.
/// Lower-case characters are not permitted.
/// </remarks>
public string VolumeIdentifier
{
get
{
return _buildParams.VolumeIdentifier;
}
set
{
if (value.Length > 32)
{
throw new ArgumentException("Not a valid volume identifier");
}
else
{
_buildParams.VolumeIdentifier = value;
}
}
}
/// <summary>
/// Gets or sets a value indicating whether Joliet file-system extensions should be used.
/// </summary>
public bool UseJoliet
{
get { return _buildParams.UseJoliet; }
set { _buildParams.UseJoliet = value; }
}
/// <summary>
/// Gets or sets a value indicating whether to update the ISOLINUX info table at the
/// start of the boot image. Use with ISOLINUX only!
/// </summary>
/// <remarks>
/// ISOLINUX has an 'information table' at the start of the boot loader that verifies
/// the CD has been loaded correctly by the BIOS. This table needs to be updated
/// to match the actual ISO.
/// </remarks>
public bool UpdateIsolinuxBootTable
{
get;
set;
}
/// <summary>
/// Sets the boot image for the ISO image.
/// </summary>
/// <param name="image">Stream containing the boot image.</param>
/// <param name="emulation">The type of emulation requested of the BIOS.</param>
/// <param name="loadSegment">The memory segment to load the image to (0 for default).</param>
public void SetBootImage(Stream image, BootDeviceEmulation emulation, int loadSegment)
{
if (_bootEntry != null)
{
throw new InvalidOperationException("Boot image already set");
}
_bootEntry = new BootInitialEntry();
_bootEntry.BootIndicator = 0x88;
_bootEntry.BootMediaType = emulation;
_bootEntry.LoadSegment = (ushort)loadSegment;
_bootEntry.SystemType = 0;
_bootImage = image;
}
/// <summary>
/// Adds a directory to the ISO image.
/// </summary>
/// <param name="name">The name of the directory on the ISO image.</param>
/// <returns>The object representing this directory</returns>
/// <remarks>
/// The name is the full path to the directory, for example:
/// <example><code>
/// builder.AddDirectory(@"DIRA\DIRB\DIRC");
/// </code></example>
/// </remarks>
public BuildDirectoryInfo AddDirectory(string name)
{
string[] nameElements = name.Split(new char[] { '\\' }, StringSplitOptions.RemoveEmptyEntries);
return GetDirectory(nameElements, nameElements.Length, true);
}
/// <summary>
/// Adds a byte array to the ISO image as a file.
/// </summary>
/// <param name="name">The name of the file on the ISO image.</param>
/// <param name="content">The contents of the file.</param>
/// <returns>The object representing this file.</returns>
/// <remarks>
/// The name is the full path to the file, for example:
/// <example><code>
/// builder.AddFile(@"DIRA\DIRB\FILE.TXT;1", new byte[]{0,1,2});
/// </code></example>
/// <para>Note the version number at the end of the file name is optional, if not
/// specified the default of 1 will be used.</para>
/// </remarks>
public BuildFileInfo AddFile(string name, byte[] content)
{
string[] nameElements = name.Split(new char[] { '\\' }, StringSplitOptions.RemoveEmptyEntries);
BuildDirectoryInfo dir = GetDirectory(nameElements, nameElements.Length - 1, true);
BuildDirectoryMember existing;
if (dir.TryGetMember(nameElements[nameElements.Length - 1], out existing))
{
throw new IOException("File already exists");
}
else
{
BuildFileInfo fi = new BuildFileInfo(nameElements[nameElements.Length - 1], dir, content);
_files.Add(fi);
dir.Add(fi);
return fi;
}
}
/// <summary>
/// Adds a disk file to the ISO image as a file.
/// </summary>
/// <param name="name">The name of the file on the ISO image.</param>
/// <param name="sourcePath">The name of the file on disk.</param>
/// <returns>The object representing this file.</returns>
/// <remarks>
/// The name is the full path to the file, for example:
/// <example><code>
/// builder.AddFile(@"DIRA\DIRB\FILE.TXT;1", @"C:\temp\tempfile.bin");
/// </code></example>
/// <para>Note the version number at the end of the file name is optional, if not
/// specified the default of 1 will be used.</para>
/// </remarks>
public BuildFileInfo AddFile(string name, string sourcePath)
{
string[] nameElements = name.Split(new char[] { '\\' }, StringSplitOptions.RemoveEmptyEntries);
BuildDirectoryInfo dir = GetDirectory(nameElements, nameElements.Length - 1, true);
BuildDirectoryMember existing;
if (dir.TryGetMember(nameElements[nameElements.Length - 1], out existing))
{
throw new IOException("File already exists");
}
else
{
BuildFileInfo fi = new BuildFileInfo(nameElements[nameElements.Length - 1], dir, sourcePath);
_files.Add(fi);
dir.Add(fi);
return fi;
}
}
/// <summary>
/// Adds a stream to the ISO image as a file.
/// </summary>
/// <param name="name">The name of the file on the ISO image.</param>
/// <param name="source">The contents of the file.</param>
/// <returns>The object representing this file.</returns>
/// <remarks>
/// The name is the full path to the file, for example:
/// <example><code>
/// builder.AddFile(@"DIRA\DIRB\FILE.TXT;1", stream);
/// </code></example>
/// <para>Note the version number at the end of the file name is optional, if not
/// specified the default of 1 will be used.</para>
/// </remarks>
public BuildFileInfo AddFile(string name, Stream source)
{
if (!source.CanSeek)
{
throw new ArgumentException("source doesn't support seeking", "source");
}
string[] nameElements = name.Split(new char[] { '\\' }, StringSplitOptions.RemoveEmptyEntries);
BuildDirectoryInfo dir = GetDirectory(nameElements, nameElements.Length - 1, true);
BuildDirectoryMember existing;
if (dir.TryGetMember(nameElements[nameElements.Length - 1], out existing))
{
throw new IOException("File already exists");
}
else
{
BuildFileInfo fi = new BuildFileInfo(nameElements[nameElements.Length - 1], dir, source);
_files.Add(fi);
dir.Add(fi);
return fi;
}
}
internal override List<BuilderExtent> FixExtents(out long totalLength)
{
List<BuilderExtent> fixedRegions = new List<BuilderExtent>();
DateTime buildTime = DateTime.UtcNow;
Encoding suppEncoding = _buildParams.UseJoliet ? Encoding.BigEndianUnicode : Encoding.ASCII;
Dictionary<BuildDirectoryMember, uint> primaryLocationTable = new Dictionary<BuildDirectoryMember, uint>();
Dictionary<BuildDirectoryMember, uint> supplementaryLocationTable = new Dictionary<BuildDirectoryMember, uint>();
long focus = DiskStart + (3 * IsoUtilities.SectorSize); // Primary, Supplementary, End (fixed at end...)
if (_bootEntry != null)
{
focus += IsoUtilities.SectorSize;
}
// ####################################################################
// # 0. Fix boot image location
// ####################################################################
long bootCatalogPos = 0;
if (_bootEntry != null)
{
long bootImagePos = focus;
Stream realBootImage = PatchBootImage(_bootImage, (uint)(DiskStart / IsoUtilities.SectorSize), (uint)(bootImagePos / IsoUtilities.SectorSize));
BuilderStreamExtent bootImageExtent = new BuilderStreamExtent(focus, realBootImage);
fixedRegions.Add(bootImageExtent);
focus += Utilities.RoundUp(bootImageExtent.Length, IsoUtilities.SectorSize);
bootCatalogPos = focus;
byte[] bootCatalog = new byte[IsoUtilities.SectorSize];
BootValidationEntry bve = new BootValidationEntry();
bve.WriteTo(bootCatalog, 0x00);
_bootEntry.ImageStart = (uint)Utilities.Ceil(bootImagePos, IsoUtilities.SectorSize);
_bootEntry.SectorCount = (ushort)Utilities.Ceil(_bootImage.Length, Sizes.Sector);
_bootEntry.WriteTo(bootCatalog, 0x20);
fixedRegions.Add(new BuilderBufferExtent(bootCatalogPos, bootCatalog));
focus += IsoUtilities.SectorSize;
}
// ####################################################################
// # 1. Fix file locations
// ####################################################################
// Find end of the file data, fixing the files in place as we go
foreach (BuildFileInfo fi in _files)
{
primaryLocationTable.Add(fi, (uint)(focus / IsoUtilities.SectorSize));
supplementaryLocationTable.Add(fi, (uint)(focus / IsoUtilities.SectorSize));
FileExtent extent = new FileExtent(fi, focus);
// Only remember files of non-zero length (otherwise we'll stomp on a valid file)
if (extent.Length != 0)
{
fixedRegions.Add(extent);
}
focus += Utilities.RoundUp(extent.Length, IsoUtilities.SectorSize);
}
// ####################################################################
// # 2. Fix directory locations
// ####################################################################
// There are two directory tables
// 1. Primary (std ISO9660)
// 2. Supplementary (Joliet)
// Find start of the second set of directory data, fixing ASCII directories in place.
long startOfFirstDirData = focus;
foreach (BuildDirectoryInfo di in _dirs)
{
primaryLocationTable.Add(di, (uint)(focus / IsoUtilities.SectorSize));
DirectoryExtent extent = new DirectoryExtent(di, primaryLocationTable, Encoding.ASCII, focus);
fixedRegions.Add(extent);
focus += Utilities.RoundUp(extent.Length, IsoUtilities.SectorSize);
}
// Find end of the second directory table, fixing supplementary directories in place.
long startOfSecondDirData = focus;
foreach (BuildDirectoryInfo di in _dirs)
{
supplementaryLocationTable.Add(di, (uint)(focus / IsoUtilities.SectorSize));
DirectoryExtent extent = new DirectoryExtent(di, supplementaryLocationTable, suppEncoding, focus);
fixedRegions.Add(extent);
focus += Utilities.RoundUp(extent.Length, IsoUtilities.SectorSize);
}
// ####################################################################
// # 3. Fix path tables
// ####################################################################
// There are four path tables:
// 1. LE, ASCII
// 2. BE, ASCII
// 3. LE, Supp Encoding (Joliet)
// 4. BE, Supp Encoding (Joliet)
// Find end of the path table
long startOfFirstPathTable = focus;
PathTable pathTable = new PathTable(false, Encoding.ASCII, _dirs, primaryLocationTable, focus);
fixedRegions.Add(pathTable);
focus += Utilities.RoundUp(pathTable.Length, IsoUtilities.SectorSize);
long primaryPathTableLength = pathTable.Length;
long startOfSecondPathTable = focus;
pathTable = new PathTable(true, Encoding.ASCII, _dirs, primaryLocationTable, focus);
fixedRegions.Add(pathTable);
focus += Utilities.RoundUp(pathTable.Length, IsoUtilities.SectorSize);
long startOfThirdPathTable = focus;
pathTable = new PathTable(false, suppEncoding, _dirs, supplementaryLocationTable, focus);
fixedRegions.Add(pathTable);
focus += Utilities.RoundUp(pathTable.Length, IsoUtilities.SectorSize);
long supplementaryPathTableLength = pathTable.Length;
long startOfFourthPathTable = focus;
pathTable = new PathTable(true, suppEncoding, _dirs, supplementaryLocationTable, focus);
fixedRegions.Add(pathTable);
focus += Utilities.RoundUp(pathTable.Length, IsoUtilities.SectorSize);
// Find the end of the disk
totalLength = focus;
// ####################################################################
// # 4. Prepare volume descriptors now other structures are fixed
// ####################################################################
int regionIdx = 0;
focus = DiskStart;
PrimaryVolumeDescriptor pvDesc = new PrimaryVolumeDescriptor(
(uint)(totalLength / IsoUtilities.SectorSize), // VolumeSpaceSize
(uint)primaryPathTableLength, // PathTableSize
(uint)(startOfFirstPathTable / IsoUtilities.SectorSize), // TypeLPathTableLocation
(uint)(startOfSecondPathTable / IsoUtilities.SectorSize), // TypeMPathTableLocation
(uint)(startOfFirstDirData / IsoUtilities.SectorSize), // RootDirectory.LocationOfExtent
(uint)_rootDirectory.GetDataSize(Encoding.ASCII), // RootDirectory.DataLength
buildTime);
pvDesc.VolumeIdentifier = _buildParams.VolumeIdentifier;
PrimaryVolumeDescriptorRegion pvdr = new PrimaryVolumeDescriptorRegion(pvDesc, focus);
fixedRegions.Insert(regionIdx++, pvdr);
focus += IsoUtilities.SectorSize;
if (_bootEntry != null)
{
BootVolumeDescriptor bvDesc = new BootVolumeDescriptor(
(uint)(bootCatalogPos / IsoUtilities.SectorSize));
BootVolumeDescriptorRegion bvdr = new BootVolumeDescriptorRegion(bvDesc, focus);
fixedRegions.Insert(regionIdx++, bvdr);
focus += IsoUtilities.SectorSize;
}
SupplementaryVolumeDescriptor svDesc = new SupplementaryVolumeDescriptor(
(uint)(totalLength / IsoUtilities.SectorSize), // VolumeSpaceSize
(uint)supplementaryPathTableLength, // PathTableSize
(uint)(startOfThirdPathTable / IsoUtilities.SectorSize), // TypeLPathTableLocation
(uint)(startOfFourthPathTable / IsoUtilities.SectorSize), // TypeMPathTableLocation
(uint)(startOfSecondDirData / IsoUtilities.SectorSize), // RootDirectory.LocationOfExtent
(uint)_rootDirectory.GetDataSize(suppEncoding), // RootDirectory.DataLength
buildTime,
suppEncoding);
svDesc.VolumeIdentifier = _buildParams.VolumeIdentifier;
SupplementaryVolumeDescriptorRegion svdr = new SupplementaryVolumeDescriptorRegion(svDesc, focus);
fixedRegions.Insert(regionIdx++, svdr);
focus += IsoUtilities.SectorSize;
VolumeDescriptorSetTerminator evDesc = new VolumeDescriptorSetTerminator();
VolumeDescriptorSetTerminatorRegion evdr = new VolumeDescriptorSetTerminatorRegion(evDesc, focus);
fixedRegions.Insert(regionIdx++, evdr);
return fixedRegions;
}
/// <summary>
/// Patches a boot image (esp. for ISOLINUX) before it is written to the disk.
/// </summary>
/// <param name="bootImage">The original (master) boot image.</param>
/// <param name="pvdLba">The logical block address of the primary volume descriptor.</param>
/// <param name="bootImageLba">The logical block address of the boot image itself.</param>
/// <returns>A stream containing the patched boot image - does not need to be disposed.</returns>
private Stream PatchBootImage(Stream bootImage, uint pvdLba, uint bootImageLba)
{
// Early-exit if no patching to do...
if (!UpdateIsolinuxBootTable)
{
return bootImage;
}
byte[] bootData = Utilities.ReadFully(bootImage, (int)bootImage.Length);
Array.Clear(bootData, 8, 56);
uint checkSum = 0;
for (int i = 64; i < bootData.Length; i += 4)
{
checkSum += Utilities.ToUInt32LittleEndian(bootData, i);
}
Utilities.WriteBytesLittleEndian(pvdLba, bootData, 8);
Utilities.WriteBytesLittleEndian(bootImageLba, bootData, 12);
Utilities.WriteBytesLittleEndian(bootData.Length, bootData, 16);
Utilities.WriteBytesLittleEndian(checkSum, bootData, 20);
return new MemoryStream(bootData, false);
}
private BuildDirectoryInfo GetDirectory(string[] path, int pathLength, bool createMissing)
{
BuildDirectoryInfo di = TryGetDirectory(path, pathLength, createMissing);
if (di == null)
{
throw new DirectoryNotFoundException("Directory not found");
}
return di;
}
private BuildDirectoryInfo TryGetDirectory(string[] path, int pathLength, bool createMissing)
{
BuildDirectoryInfo focus = _rootDirectory;
for (int i = 0; i < pathLength; ++i)
{
BuildDirectoryMember next;
if (!focus.TryGetMember(path[i], out next))
{
if (createMissing)
{
// This directory doesn't exist, create it...
BuildDirectoryInfo di = new BuildDirectoryInfo(path[i], focus);
focus.Add(di);
_dirs.Add(di);
focus = di;
}
else
{
return null;
}
}
else
{
BuildDirectoryInfo nextAsBuildDirectoryInfo = next as BuildDirectoryInfo;
if (nextAsBuildDirectoryInfo == null)
{
throw new IOException("File with conflicting name exists");
}
else
{
focus = nextAsBuildDirectoryInfo;
}
}
}
return focus;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.ServiceModel;
namespace System.Runtime
{
internal class MruCache<TKey, TValue> : IDisposable
where TKey : class
where TValue : class
{
private LinkedList<TKey> _mruList;
private Dictionary<TKey, CacheEntry> _items;
private readonly int _lowWatermark;
private readonly int _highWatermark;
private CacheEntry _mruEntry;
private int _refCount = 1;
private object _mutex = new object();
public MruCache(int watermark)
: this(watermark * 4 / 5, watermark)
{
}
// Returns whether the cache can be used by the caller after calling AddRef
public bool AddRef()
{
lock(_mutex)
{
if (_refCount == 0)
{
return false;
}
_refCount++;
return true;
}
}
//
// The cache will grow until the high watermark. At which point, the least recently used items
// will be purge until the cache's size is reduced to low watermark
//
public MruCache(int lowWatermark, int highWatermark)
: this(lowWatermark, highWatermark, null)
{
}
public MruCache(int lowWatermark, int highWatermark, IEqualityComparer<TKey> comparer)
{
Fx.Assert(lowWatermark < highWatermark, "");
Fx.Assert(lowWatermark >= 0, "");
_lowWatermark = lowWatermark;
_highWatermark = highWatermark;
_mruList = new LinkedList<TKey>();
if (comparer == null)
{
_items = new Dictionary<TKey, CacheEntry>();
}
else
{
_items = new Dictionary<TKey, CacheEntry>(comparer);
}
}
public int Count
{
get
{
ThrowIfDisposed();
return _items.Count;
}
}
public bool IsDisposed { get; private set; }
public void Add(TKey key, TValue value)
{
Fx.Assert(null != key, "");
ThrowIfDisposed();
// if anything goes wrong (duplicate entry, etc) we should
// clear our caches so that we don't get out of sync
bool success = false;
try
{
if (_items.Count == _highWatermark)
{
// If the cache is full, purge enough LRU items to shrink the
// cache down to the low watermark
int countToPurge = _highWatermark - _lowWatermark;
for (int i = 0; i < countToPurge; i++)
{
TKey keyRemove = _mruList.Last.Value;
_mruList.RemoveLast();
TValue item = _items[keyRemove].value;
_items.Remove(keyRemove);
OnSingleItemRemoved(item);
OnItemAgedOutOfCache(item);
}
}
// Add the new entry to the cache and make it the MRU element
CacheEntry entry;
entry.node = _mruList.AddFirst(key);
entry.value = value;
_items.Add(key, entry);
_mruEntry = entry;
success = true;
}
finally
{
if (!success)
{
Clear();
}
}
}
public void Clear()
{
ThrowIfDisposed();
Clear(false);
}
private void Clear(bool dispose)
{
_mruList.Clear();
if (dispose)
{
foreach (CacheEntry cacheEntry in _items.Values)
{
var item = cacheEntry.value as IDisposable;
if (item != null)
{
try
{
item.Dispose();
}
catch (Exception e)
{
if (Fx.IsFatal(e))
{
throw;
}
}
}
}
}
_items.Clear();
_mruEntry.value = null;
_mruEntry.node = null;
}
public bool Remove(TKey key)
{
Fx.Assert(null != key, "");
ThrowIfDisposed();
CacheEntry entry;
if (_items.TryGetValue(key, out entry))
{
_items.Remove(key);
OnSingleItemRemoved(entry.value);
_mruList.Remove(entry.node);
if (object.ReferenceEquals(_mruEntry.node, entry.node))
{
_mruEntry.value = null;
_mruEntry.node = null;
}
return true;
}
return false;
}
protected virtual void OnSingleItemRemoved(TValue item)
{
ThrowIfDisposed();
}
protected virtual void OnItemAgedOutOfCache(TValue item)
{
ThrowIfDisposed();
}
//
// If found, make the entry most recently used
//
public bool TryGetValue(TKey key, out TValue value)
{
// first check our MRU item
if (_mruEntry.node != null && key != null && key.Equals(_mruEntry.node.Value))
{
value = _mruEntry.value;
return true;
}
CacheEntry entry;
bool found = _items.TryGetValue(key, out entry);
value = entry.value;
// Move the node to the head of the MRU list if it's not already there
if (found && _mruList.Count > 1
&& !object.ReferenceEquals(_mruList.First, entry.node))
{
_mruList.Remove(entry.node);
_mruList.AddFirst(entry.node);
_mruEntry = entry;
}
return found;
}
public void Dispose()
{
int refCount;
lock (_mutex)
{
refCount = --_refCount;
}
Fx.Assert(refCount >= 0, "Ref count shouldn't go below zero");
if (refCount == 0)
{
Dispose(true);
}
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
lock (_mutex)
{
if (!IsDisposed)
{
IsDisposed = true;
Clear(true);
}
}
}
}
private void ThrowIfDisposed()
{
if (IsDisposed)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ObjectDisposedException(GetType().FullName));
}
}
private struct CacheEntry
{
internal TValue value;
internal LinkedListNode<TKey> node;
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Text;
using System.Threading;
using Microsoft.CodeAnalysis.Internal.Log;
using Microsoft.CodeAnalysis.Text;
using Microsoft.CodeAnalysis.Text.Shared.Extensions;
using Microsoft.VisualStudio.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.Text
{
public static partial class Extensions
{
/// <summary>
/// ITextSnapshot implementation of SourceText
/// </summary>
private class SnapshotSourceText : SourceText
{
/// <summary>
/// Use a separate class for closed files to simplify memory leak investigations
/// </summary>
internal sealed class ClosedSnapshotSourceText : SnapshotSourceText
{
public ClosedSnapshotSourceText(ITextSnapshot roslynSnapshot, Encoding encodingOpt)
: base(roslynSnapshot, encodingOpt, containerOpt: null)
{
}
}
private static readonly Func<int, int, string> s_textLog = (v1, v2) => string.Format("FullRange : from {0} to {1}", v1, v2);
/// <summary>
/// The ITextSnapshot backing the SourceText instance
/// </summary>
protected readonly ITextSnapshot RoslynSnapshot;
private readonly Encoding _encodingOpt;
private readonly TextBufferContainer _containerOpt;
private readonly int _reiteratedVersion;
private SnapshotSourceText(ITextSnapshot editorSnapshot, Encoding encodingOpt)
{
Contract.ThrowIfNull(editorSnapshot);
this.RoslynSnapshot = TextBufferMapper.ToRoslyn(editorSnapshot);
_containerOpt = TextBufferContainer.From(editorSnapshot.TextBuffer);
_reiteratedVersion = editorSnapshot.Version.ReiteratedVersionNumber;
_encodingOpt = encodingOpt;
}
public SnapshotSourceText(ITextSnapshot roslynSnapshot, Encoding encodingOpt, TextBufferContainer containerOpt)
{
Contract.ThrowIfNull(roslynSnapshot);
this.RoslynSnapshot = roslynSnapshot;
_encodingOpt = encodingOpt;
_containerOpt = containerOpt;
}
/// <summary>
/// A weak map of all Editor ITextSnapshots and their associated SourceText
/// </summary>
private static readonly ConditionalWeakTable<ITextSnapshot, SnapshotSourceText> s_textSnapshotMap = new ConditionalWeakTable<ITextSnapshot, SnapshotSourceText>();
private static readonly ConditionalWeakTable<ITextSnapshot, SnapshotSourceText>.CreateValueCallback s_createTextCallback = CreateText;
public static SourceText From(ITextSnapshot editorSnapshot)
{
if (editorSnapshot == null)
{
throw new ArgumentNullException("textSnapshot");
}
return s_textSnapshotMap.GetValue(editorSnapshot, s_createTextCallback);
}
// Use this as a secondary cache to catch ITextSnapshots that have the same ReiteratedVersionNumber as a previously created SnapshotSourceText
private static readonly ConditionalWeakTable<ITextBuffer, StrongBox<SnapshotSourceText>> s_textBufferLatestSnapshotMap = new ConditionalWeakTable<ITextBuffer, StrongBox<SnapshotSourceText>>();
private static SnapshotSourceText CreateText(ITextSnapshot editorSnapshot)
{
var strongBox = s_textBufferLatestSnapshotMap.GetOrCreateValue(editorSnapshot.TextBuffer);
var text = strongBox.Value;
if (text != null && text._reiteratedVersion == editorSnapshot.Version.ReiteratedVersionNumber)
{
return text;
}
text = new SnapshotSourceText(editorSnapshot, editorSnapshot.TextBuffer.GetEncodingOrUTF8());
strongBox.Value = text;
return text;
}
public override Encoding Encoding
{
get { return _encodingOpt; }
}
public ITextSnapshot EditorSnapshot
{
get { return TextBufferMapper.ToEditor(this.RoslynSnapshot); }
}
protected static ITextBufferCloneService TextBufferFactory
{
get
{
// simplest way to get text factory
var ws = PrimaryWorkspace.Workspace;
if (ws != null)
{
return ws.Services.GetService<ITextBufferCloneService>();
}
return null;
}
}
public override SourceTextContainer Container
{
get
{
return _containerOpt ?? base.Container;
}
}
public override int Length
{
get
{
var res = this.RoslynSnapshot.Length;
return res;
}
}
public override char this[int position]
{
get { return this.RoslynSnapshot[position]; }
}
#region Lines
protected override TextLineCollection GetLinesCore()
{
return new LineInfo(this);
}
private class LineInfo : TextLineCollection
{
private readonly SnapshotSourceText _text;
public LineInfo(SnapshotSourceText text)
{
_text = text;
}
public override int Count
{
get { return _text.RoslynSnapshot.LineCount; }
}
public override TextLine this[int index]
{
get
{
var line = _text.RoslynSnapshot.GetLineFromLineNumber(index);
return TextLine.FromSpan(_text, TextSpan.FromBounds(line.Start, line.End));
}
}
public override int IndexOf(int position)
{
return _text.RoslynSnapshot.GetLineNumberFromPosition(position);
}
public override TextLine GetLineFromPosition(int position)
{
return this[this.IndexOf(position)];
}
public override LinePosition GetLinePosition(int position)
{
ITextSnapshotLine textLine = _text.RoslynSnapshot.GetLineFromPosition(position);
return new LinePosition(textLine.LineNumber, position - textLine.Start);
}
}
#endregion
public override string ToString()
{
return this.RoslynSnapshot.GetText();
}
public override string ToString(TextSpan textSpan)
{
var editorSpan = new Span(textSpan.Start, textSpan.Length);
var res = this.RoslynSnapshot.GetText(editorSpan);
return res;
}
public override SourceText WithChanges(IEnumerable<TextChange> changes)
{
if (changes == null)
{
throw new ArgumentNullException("changes");
}
if (!changes.Any())
{
return this;
}
// check whether we can use text buffer factory
var factory = TextBufferFactory;
if (factory == null)
{
// if we can't get the factory, use the default implementation
return base.WithChanges(changes);
}
// otherwise, create a new cloned snapshot
var buffer = factory.Clone(RoslynSnapshot.GetFullSpan());
var baseSnapshot = buffer.CurrentSnapshot;
// apply the change to the buffer
using (var edit = buffer.CreateEdit())
{
foreach (var change in changes)
{
edit.Replace(change.Span.ToSpan(), change.NewText);
}
edit.Apply();
}
return new ChangedSourceText(this, baseSnapshot, buffer.CurrentSnapshot);
}
/// <summary>
/// Perf: Optimize calls to GetChangeRanges after WithChanges by using editor snapshots
/// </summary>
private class ChangedSourceText : SnapshotSourceText
{
private readonly SnapshotSourceText _baseText;
private readonly ITextSnapshot _baseSnapshot;
public ChangedSourceText(SnapshotSourceText baseText, ITextSnapshot baseSnapshot, ITextSnapshot currentSnapshot)
: base(currentSnapshot, baseText.Encoding, containerOpt: null)
{
_baseText = baseText;
_baseSnapshot = baseSnapshot;
}
public override IReadOnlyList<TextChangeRange> GetChangeRanges(SourceText oldText)
{
if (oldText == null)
{
throw new ArgumentNullException("oldText");
}
// if they are the same text there is no change.
if (oldText == this)
{
return TextChangeRange.NoChanges;
}
if (oldText != _baseText)
{
return new[] { new TextChangeRange(new TextSpan(0, oldText.Length), this.Length) };
}
return GetChangeRanges(_baseSnapshot, _baseSnapshot.Length, this.RoslynSnapshot);
}
}
public override void CopyTo(int sourceIndex, char[] destination, int destinationIndex, int count)
{
this.RoslynSnapshot.CopyTo(sourceIndex, destination, destinationIndex, count);
}
public override void Write(TextWriter textWriter, TextSpan span, CancellationToken cancellationToken)
{
this.RoslynSnapshot.Write(textWriter, span.ToSpan());
}
#region GetChangeRangesImplementation
public override IReadOnlyList<TextChangeRange> GetChangeRanges(SourceText oldText)
{
if (oldText == null)
{
throw new ArgumentNullException("oldText");
}
// if they are the same text there is no change.
if (oldText == this)
{
return TextChangeRange.NoChanges;
}
// first, check whether the text buffer is still alive.
var container = this.Container as TextBufferContainer;
if (container != null)
{
var lastEventArgs = container.LastEventArgs;
if (lastEventArgs != null && lastEventArgs.OldText == oldText && lastEventArgs.NewText == this)
{
return lastEventArgs.Changes;
}
}
var oldSnapshot = oldText.FindCorrespondingEditorTextSnapshot();
var newSnapshot = this.FindCorrespondingEditorTextSnapshot();
return GetChangeRanges(oldSnapshot, oldText.Length, newSnapshot);
}
private IReadOnlyList<TextChangeRange> GetChangeRanges(ITextSnapshot oldSnapshot, int oldTextLength, ITextSnapshot newSnapshot)
{
if (oldSnapshot == null ||
newSnapshot == null ||
oldSnapshot.TextBuffer != newSnapshot.TextBuffer)
{
// Claim its all changed
Logger.Log(FunctionId.Workspace_SourceText_GetChangeRanges, "Invalid Snapshots");
return ImmutableArray.Create<TextChangeRange>(new TextChangeRange(new TextSpan(0, oldTextLength), this.Length));
}
else if (oldSnapshot.Version.ReiteratedVersionNumber == newSnapshot.Version.ReiteratedVersionNumber)
{
// content of two snapshot must be same even if versions are different
return TextChangeRange.NoChanges;
}
else
{
return GetChangeRanges(oldSnapshot, newSnapshot, forward: oldSnapshot.Version.VersionNumber <= newSnapshot.Version.VersionNumber);
}
}
private static readonly Func<ITextChange, TextChangeRange> s_forwardTextChangeRange = c => CreateTextChangeRange(c, forward: true);
private static readonly Func<ITextChange, TextChangeRange> s_backwardTextChangeRange = c => CreateTextChangeRange(c, forward: false);
private IReadOnlyList<TextChangeRange> GetChangeRanges(ITextSnapshot snapshot1, ITextSnapshot snapshot2, bool forward)
{
var oldSnapshot = forward ? snapshot1 : snapshot2;
var newSnapshot = forward ? snapshot2 : snapshot1;
INormalizedTextChangeCollection changes = null;
for (var oldVersion = oldSnapshot.Version;
oldVersion != newSnapshot.Version;
oldVersion = oldVersion.Next)
{
if (oldVersion.Changes.Count != 0)
{
if (changes != null)
{
// Oops - more than one "textual" change between these snapshots, bail and try to find smallest changes span
Logger.Log(FunctionId.Workspace_SourceText_GetChangeRanges, s_textLog, snapshot1.Version.VersionNumber, snapshot2.Version.VersionNumber);
return new[] { GetChangeRanges(oldSnapshot.Version, newSnapshot.Version, forward) };
}
else
{
changes = oldVersion.Changes;
}
}
}
if (changes == null)
{
return ImmutableArray.Create<TextChangeRange>();
}
else
{
return ImmutableArray.CreateRange(changes.Select(forward ? s_forwardTextChangeRange : s_backwardTextChangeRange));
}
}
private TextChangeRange GetChangeRanges(ITextVersion oldVersion, ITextVersion newVersion, bool forward)
{
TextChangeRange? range = null;
var iterator = GetMultipleVersionTextChanges(oldVersion, newVersion, forward);
foreach (var changes in forward ? iterator : iterator.Reverse())
{
range = range.Accumulate(changes);
}
Contract.Requires(range.HasValue);
return range.Value;
}
private static IEnumerable<IEnumerable<TextChangeRange>> GetMultipleVersionTextChanges(ITextVersion oldVersion, ITextVersion newVersion, bool forward)
{
for (var version = oldVersion; version != newVersion; version = version.Next)
{
yield return version.Changes.Select(forward ? s_forwardTextChangeRange : s_backwardTextChangeRange);
}
}
private static TextChangeRange CreateTextChangeRange(ITextChange change, bool forward)
{
if (forward)
{
return new TextChangeRange(new TextSpan(change.OldSpan.Start, change.OldSpan.Length), change.NewLength);
}
else
{
return new TextChangeRange(new TextSpan(change.NewSpan.Start, change.NewSpan.Length), change.OldLength);
}
}
#endregion
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Text;
namespace Lucene.Net.Search
{
using NUnit.Framework;
using System.IO;
using AllDeletedFilterReader = Lucene.Net.Index.AllDeletedFilterReader;
using AtomicReader = Lucene.Net.Index.AtomicReader;
using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
using IBits = Lucene.Net.Util.IBits;
using Directory = Lucene.Net.Store.Directory;
using DirectoryReader = Lucene.Net.Index.DirectoryReader;
using Document = Documents.Document;
using IndexReader = Lucene.Net.Index.IndexReader;
using IndexWriter = Lucene.Net.Index.IndexWriter;
using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//using Assert = junit.framework.Assert;
using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
using MultiReader = Lucene.Net.Index.MultiReader;
using RAMDirectory = Lucene.Net.Store.RAMDirectory;
using SlowCompositeReaderWrapper = Lucene.Net.Index.SlowCompositeReaderWrapper;
using Similarities;
/// <summary>
/// Utility class for sanity-checking queries.
/// </summary>
public class QueryUtils
{
/// <summary>
/// Check the types of things query objects should be able to do. </summary>
public static void Check(Query q)
{
CheckHashEquals(q);
}
/// <summary>
/// check very basic hashCode and equals </summary>
public static void CheckHashEquals(Query q)
{
Query q2 = (Query)q.Clone();
CheckEqual(q, q2);
Query q3 = (Query)q.Clone();
q3.Boost = 7.21792348f;
CheckUnequal(q, q3);
// test that a class check is done so that no exception is thrown
// in the implementation of equals()
Query whacky = new QueryAnonymousInnerClassHelper();
whacky.Boost = q.Boost;
CheckUnequal(q, whacky);
// null test
Assert.IsFalse(q.Equals(null));
}
private class QueryAnonymousInnerClassHelper : Query
{
public QueryAnonymousInnerClassHelper()
{
}
public override string ToString(string field)
{
return "My Whacky Query";
}
}
public static void CheckEqual(Query q1, Query q2)
{
Assert.IsTrue(q1.Equals(q2));
Assert.AreEqual(q1, q2);
Assert.AreEqual(q1.GetHashCode(), q2.GetHashCode());
}
public static void CheckUnequal(Query q1, Query q2)
{
Assert.IsFalse(q1.Equals(q2), q1 + " equal to " + q2);
Assert.IsFalse(q2.Equals(q1), q2 + " equal to " + q1);
// possible this test can fail on a hash collision... if that
// happens, please change test to use a different example.
Assert.IsTrue(q1.GetHashCode() != q2.GetHashCode());
}
/// <summary>
/// deep check that explanations of a query 'score' correctly </summary>
public static void CheckExplanations(Query q, IndexSearcher s)
{
CheckHits.CheckExplanations(q, null, s, true);
}
/// <summary>
/// Various query sanity checks on a searcher, some checks are only done for
/// instanceof IndexSearcher.
/// </summary>
/// <param name = "similarity" >
/// LUCENENET specific
/// Removes dependency on <see cref="LuceneTestCase.ClassEnv.Similarity"/>
/// </param>
/// <seealso cref= #check(Query) </seealso>
/// <seealso cref= #checkFirstSkipTo </seealso>
/// <seealso cref= #checkSkipTo </seealso>
/// <seealso cref= #checkExplanations </seealso>
/// <seealso cref= #checkEqual </seealso>
public static void Check(Random random, Query q1, IndexSearcher s, Similarity similarity)
{
Check(random, q1, s, true, similarity);
}
/// <param name = "similarity" >
/// LUCENENET specific
/// Removes dependency on <see cref="LuceneTestCase.ClassEnv.Similarity"/>
/// </param>
public static void Check(Random random, Query q1, IndexSearcher s, bool wrap, Similarity similarity)
{
try
{
Check(q1);
if (s != null)
{
CheckFirstSkipTo(q1, s, similarity);
CheckSkipTo(q1, s, similarity);
if (wrap)
{
Check(random, q1, WrapUnderlyingReader(random, s, -1, similarity), false, similarity);
Check(random, q1, WrapUnderlyingReader(random, s, 0, similarity), false, similarity);
Check(random, q1, WrapUnderlyingReader(random, s, +1, similarity), false, similarity);
}
CheckExplanations(q1, s);
Query q2 = (Query)q1.Clone();
CheckEqual(s.Rewrite(q1), s.Rewrite(q2));
}
}
catch (IOException e)
{
throw new Exception(e.ToString(), e);
}
}
public static void PurgeFieldCache(IndexReader r)
{
// this is just a hack, to get an atomic reader that contains all subreaders for insanity checks
FieldCache.DEFAULT.PurgeByCacheKey(SlowCompositeReaderWrapper.Wrap(r).CoreCacheKey);
}
/// <summary>
/// this is a MultiReader that can be used for randomly wrapping other readers
/// without creating FieldCache insanity.
/// The trick is to use an opaque/fake cache key.
/// </summary>
public class FCInvisibleMultiReader : MultiReader
{
internal readonly object CacheKey = new object();
public FCInvisibleMultiReader(params IndexReader[] readers)
: base(readers)
{
}
public override object CoreCacheKey
{
get
{
return CacheKey;
}
}
public override object CombinedCoreAndDeletesKey
{
get
{
return CacheKey;
}
}
}
/// <summary>
/// Given an IndexSearcher, returns a new IndexSearcher whose IndexReader
/// is a MultiReader containing the Reader of the original IndexSearcher,
/// as well as several "empty" IndexReaders -- some of which will have
/// deleted documents in them. this new IndexSearcher should
/// behave exactly the same as the original IndexSearcher. </summary>
/// <param name="s"> the searcher to wrap </param>
/// <param name="edge"> if negative, s will be the first sub; if 0, s will be in the middle, if positive s will be the last sub </param>
/// <param name="similarity">
/// LUCENENET specific
/// Removes dependency on <see cref="LuceneTestCase.ClassEnv.Similarity"/>
/// </param>
public static IndexSearcher WrapUnderlyingReader(Random random, IndexSearcher s, int edge, Similarity similarity)
{
IndexReader r = s.IndexReader;
// we can't put deleted docs before the nested reader, because
// it will throw off the docIds
IndexReader[] readers = new IndexReader[] { edge < 0 ? r : EmptyReaders[0], EmptyReaders[0], new FCInvisibleMultiReader(edge < 0 ? EmptyReaders[4] : EmptyReaders[0], EmptyReaders[0], 0 == edge ? r : EmptyReaders[0]), 0 < edge ? EmptyReaders[0] : EmptyReaders[7], EmptyReaders[0], new FCInvisibleMultiReader(0 < edge ? EmptyReaders[0] : EmptyReaders[5], EmptyReaders[0], 0 < edge ? r : EmptyReaders[0]) };
IndexSearcher @out = LuceneTestCase.NewSearcher(new FCInvisibleMultiReader(readers), similarity);
@out.Similarity = s.Similarity;
return @out;
}
internal static readonly IndexReader[] EmptyReaders = null;// = new IndexReader[8];
static QueryUtils()
{
EmptyReaders = new IndexReader[8];
try
{
EmptyReaders[0] = new MultiReader();
EmptyReaders[4] = MakeEmptyIndex(new Random(0), 4);
EmptyReaders[5] = MakeEmptyIndex(new Random(0), 5);
EmptyReaders[7] = MakeEmptyIndex(new Random(0), 7);
}
catch (IOException ex)
{
throw new Exception(ex.ToString(), ex);
}
}
private static IndexReader MakeEmptyIndex(Random random, int numDocs)
{
Debug.Assert(numDocs > 0);
Directory d = new MockDirectoryWrapper(random, new RAMDirectory());
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, new MockAnalyzer(random)));
for (int i = 0; i < numDocs; i++)
{
w.AddDocument(new Document());
}
w.ForceMerge(1);
w.Commit();
w.Dispose();
DirectoryReader reader = DirectoryReader.Open(d);
return new AllDeletedFilterReader(LuceneTestCase.GetOnlySegmentReader(reader));
}
/// <summary>
/// alternate scorer skipTo(),skipTo(),next(),next(),skipTo(),skipTo(), etc
/// and ensure a hitcollector receives same docs and scores
/// </summary>
/// <param name = "similarity" >
/// LUCENENET specific
/// Removes dependency on <see cref="LuceneTestCase.ClassEnv.Similarity"/>
/// </param>
public static void CheckSkipTo(Query q, IndexSearcher s, Similarity similarity)
{
//System.out.println("Checking "+q);
IList<AtomicReaderContext> readerContextArray = s.TopReaderContext.Leaves;
if (s.CreateNormalizedWeight(q).ScoresDocsOutOfOrder) // in this case order of skipTo() might differ from that of next().
{
return;
}
const int skip_op = 0;
const int next_op = 1;
int[][] orders = new int[][] { new int[] { next_op }, new int[] { skip_op }, new int[] { skip_op, next_op }, new int[] { next_op, skip_op }, new int[] { skip_op, skip_op, next_op, next_op }, new int[] { next_op, next_op, skip_op, skip_op }, new int[] { skip_op, skip_op, skip_op, next_op, next_op } };
for (int k = 0; k < orders.Length; k++)
{
int[] order = orders[k];
// System.out.print("Order:");for (int i = 0; i < order.Length; i++)
// System.out.print(order[i]==skip_op ? " skip()":" next()");
// System.out.println();
int[] opidx = new int[] { 0 };
int[] lastDoc = new int[] { -1 };
// FUTURE: ensure scorer.Doc()==-1
const float maxDiff = 1e-5f;
AtomicReader[] lastReader = new AtomicReader[] { null };
s.Search(q, new CollectorAnonymousInnerClassHelper(q, s, readerContextArray, skip_op, order, opidx, lastDoc, maxDiff, lastReader, similarity));
if (lastReader[0] != null)
{
// confirm that skipping beyond the last doc, on the
// previous reader, hits NO_MORE_DOCS
AtomicReader previousReader = lastReader[0];
IndexSearcher indexSearcher = LuceneTestCase.NewSearcher(previousReader, false, similarity);
indexSearcher.Similarity = s.Similarity;
Weight w = indexSearcher.CreateNormalizedWeight(q);
AtomicReaderContext ctx = (AtomicReaderContext)previousReader.Context;
Scorer scorer = w.GetScorer(ctx, ((AtomicReader)ctx.Reader).LiveDocs);
if (scorer != null)
{
bool more = scorer.Advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS;
Assert.IsFalse(more, "query's last doc was " + lastDoc[0] + " but skipTo(" + (lastDoc[0] + 1) + ") got to " + scorer.DocID);
}
}
}
}
private class CollectorAnonymousInnerClassHelper : ICollector
{
private Query q;
private IndexSearcher s;
private IList<AtomicReaderContext> ReaderContextArray;
private int Skip_op;
private int[] Order;
private int[] Opidx;
private int[] LastDoc;
private float MaxDiff;
private AtomicReader[] LastReader;
private readonly Similarity Similarity;
public CollectorAnonymousInnerClassHelper(Query q, IndexSearcher s, IList<AtomicReaderContext> readerContextArray,
int skip_op, int[] order, int[] opidx, int[] lastDoc, float maxDiff, AtomicReader[] lastReader,
Similarity similarity)
{
this.q = q;
this.s = s;
this.ReaderContextArray = readerContextArray;
this.Skip_op = skip_op;
this.Order = order;
this.Opidx = opidx;
this.LastDoc = lastDoc;
this.MaxDiff = maxDiff;
this.LastReader = lastReader;
this.Similarity = similarity;
}
private Scorer sc;
private Scorer scorer;
private int leafPtr;
public virtual void SetScorer(Scorer scorer)
{
this.sc = scorer;
}
public virtual void Collect(int doc)
{
float score = sc.GetScore();
LastDoc[0] = doc;
try
{
if (scorer == null)
{
Weight w = s.CreateNormalizedWeight(q);
AtomicReaderContext context = ReaderContextArray[leafPtr];
scorer = w.GetScorer(context, (context.AtomicReader).LiveDocs);
}
int op = Order[(Opidx[0]++) % Order.Length];
// System.out.println(op==skip_op ?
// "skip("+(sdoc[0]+1)+")":"next()");
bool more = op == Skip_op ? scorer.Advance(scorer.DocID + 1) != DocIdSetIterator.NO_MORE_DOCS : scorer.NextDoc() != DocIdSetIterator.NO_MORE_DOCS;
int scorerDoc = scorer.DocID;
float scorerScore = scorer.GetScore();
float scorerScore2 = scorer.GetScore();
float scoreDiff = Math.Abs(score - scorerScore);
float scorerDiff = Math.Abs(scorerScore2 - scorerScore);
if (!more || doc != scorerDoc || scoreDiff > MaxDiff || scorerDiff > MaxDiff)
{
StringBuilder sbord = new StringBuilder();
for (int i = 0; i < Order.Length; i++)
{
sbord.Append(Order[i] == Skip_op ? " skip()" : " next()");
}
throw new Exception("ERROR matching docs:" + "\n\t" + (doc != scorerDoc ? "--> " : "") + "doc=" + doc + ", scorerDoc=" + scorerDoc + "\n\t" + (!more ? "--> " : "") + "tscorer.more=" + more + "\n\t" + (scoreDiff > MaxDiff ? "--> " : "") + "scorerScore=" + scorerScore + " scoreDiff=" + scoreDiff + " maxDiff=" + MaxDiff + "\n\t" + (scorerDiff > MaxDiff ? "--> " : "") + "scorerScore2=" + scorerScore2 + " scorerDiff=" + scorerDiff + "\n\thitCollector.Doc=" + doc + " score=" + score + "\n\t Scorer=" + scorer + "\n\t Query=" + q + " " + q.GetType().Name + "\n\t Searcher=" + s + "\n\t Order=" + sbord + "\n\t Op=" + (op == Skip_op ? " skip()" : " next()"));
}
}
catch (IOException e)
{
throw new Exception(e.ToString(), e);
}
}
public virtual void SetNextReader(AtomicReaderContext context)
{
// confirm that skipping beyond the last doc, on the
// previous reader, hits NO_MORE_DOCS
if (LastReader[0] != null)
{
AtomicReader previousReader = LastReader[0];
IndexSearcher indexSearcher = LuceneTestCase.NewSearcher(previousReader, Similarity);
indexSearcher.Similarity = s.Similarity;
Weight w = indexSearcher.CreateNormalizedWeight(q);
AtomicReaderContext ctx = (AtomicReaderContext)indexSearcher.TopReaderContext;
Scorer scorer = w.GetScorer(ctx, ((AtomicReader)ctx.Reader).LiveDocs);
if (scorer != null)
{
bool more = scorer.Advance(LastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS;
Assert.IsFalse(more, "query's last doc was " + LastDoc[0] + " but skipTo(" + (LastDoc[0] + 1) + ") got to " + scorer.DocID);
}
leafPtr++;
}
LastReader[0] = (AtomicReader)context.Reader;
Debug.Assert(ReaderContextArray[leafPtr].Reader == context.Reader);
this.scorer = null;
LastDoc[0] = -1;
}
public virtual bool AcceptsDocsOutOfOrder
{
get { return false; }
}
}
/// <summary>
/// check that first skip on just created scorers always goes to the right doc</summary>
/// <param name = "similarity" >
/// LUCENENET specific
/// Removes dependency on <see cref="LuceneTestCase.ClassEnv.Similarity"/>
/// </param>
public static void CheckFirstSkipTo(Query q, IndexSearcher s, Similarity similarity)
{
//System.out.println("checkFirstSkipTo: "+q);
const float maxDiff = 1e-3f;
int[] lastDoc = new int[] { -1 };
AtomicReader[] lastReader = new AtomicReader[] { null };
IList<AtomicReaderContext> context = s.TopReaderContext.Leaves;
s.Search(q, new CollectorAnonymousInnerClassHelper2(q, s, maxDiff, lastDoc, lastReader, context, similarity));
if (lastReader[0] != null)
{
// confirm that skipping beyond the last doc, on the
// previous reader, hits NO_MORE_DOCS
AtomicReader previousReader = lastReader[0];
IndexSearcher indexSearcher = LuceneTestCase.NewSearcher(previousReader, similarity);
indexSearcher.Similarity = s.Similarity;
Weight w = indexSearcher.CreateNormalizedWeight(q);
Scorer scorer = w.GetScorer((AtomicReaderContext)indexSearcher.TopReaderContext, previousReader.LiveDocs);
if (scorer != null)
{
bool more = scorer.Advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS;
Assert.IsFalse(more, "query's last doc was " + lastDoc[0] + " but skipTo(" + (lastDoc[0] + 1) + ") got to " + scorer.DocID);
}
}
}
private class CollectorAnonymousInnerClassHelper2 : ICollector
{
private Query q;
private IndexSearcher s;
private float MaxDiff;
private int[] LastDoc;
private AtomicReader[] LastReader;
private IList<AtomicReaderContext> Context;
private readonly Similarity Similarity;
public CollectorAnonymousInnerClassHelper2(Query q, IndexSearcher s, float maxDiff, int[] lastDoc, AtomicReader[] lastReader, IList<AtomicReaderContext> context, Similarity similarity)
{
this.q = q;
this.s = s;
this.MaxDiff = maxDiff;
this.LastDoc = lastDoc;
this.LastReader = lastReader;
this.Context = context;
this.Similarity = similarity;
}
private Scorer scorer;
private int leafPtr;
private IBits liveDocs;
public virtual void SetScorer(Scorer scorer)
{
this.scorer = scorer;
}
public virtual void Collect(int doc)
{
float score = scorer.GetScore();
try
{
long startMS = Environment.TickCount;
for (int i = LastDoc[0] + 1; i <= doc; i++)
{
Weight w = s.CreateNormalizedWeight(q);
Scorer scorer_ = w.GetScorer(Context[leafPtr], liveDocs);
Assert.IsTrue(scorer_.Advance(i) != DocIdSetIterator.NO_MORE_DOCS, "query collected " + doc + " but skipTo(" + i + ") says no more docs!");
Assert.AreEqual(doc, scorer_.DocID, "query collected " + doc + " but skipTo(" + i + ") got to " + scorer_.DocID);
float skipToScore = scorer_.GetScore();
Assert.AreEqual(skipToScore, scorer_.GetScore(), MaxDiff, "unstable skipTo(" + i + ") score!");
Assert.AreEqual(score, skipToScore, MaxDiff, "query assigned doc " + doc + " a score of <" + score + "> but skipTo(" + i + ") has <" + skipToScore + ">!");
// Hurry things along if they are going slow (eg
// if you got SimpleText codec this will kick in):
if (i < doc && Environment.TickCount - startMS > 5)
{
i = doc - 1;
}
}
LastDoc[0] = doc;
}
catch (IOException e)
{
throw new Exception(e.ToString(), e);
}
}
public virtual void SetNextReader(AtomicReaderContext context)
{
// confirm that skipping beyond the last doc, on the
// previous reader, hits NO_MORE_DOCS
if (LastReader[0] != null)
{
AtomicReader previousReader = LastReader[0];
IndexSearcher indexSearcher = LuceneTestCase.NewSearcher(previousReader, Similarity);
indexSearcher.Similarity = s.Similarity;
Weight w = indexSearcher.CreateNormalizedWeight(q);
Scorer scorer = w.GetScorer((AtomicReaderContext)indexSearcher.TopReaderContext, previousReader.LiveDocs);
if (scorer != null)
{
bool more = scorer.Advance(LastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS;
Assert.IsFalse(more, "query's last doc was " + LastDoc[0] + " but skipTo(" + (LastDoc[0] + 1) + ") got to " + scorer.DocID);
}
leafPtr++;
}
LastReader[0] = (AtomicReader)context.Reader;
LastDoc[0] = -1;
liveDocs = ((AtomicReader)context.Reader).LiveDocs;
}
public virtual bool AcceptsDocsOutOfOrder
{
get { return false; }
}
}
}
}
| |
#region License
/*---------------------------------------------------------------------------------*\
Distributed under the terms of an MIT-style license:
The MIT License
Copyright (c) 2006-2010 Stephen M. McKamey
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
\*---------------------------------------------------------------------------------*/
#endregion License
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Text.RegularExpressions;
namespace JsonFx.Json
{
/// <summary>
/// Writes data as full ECMAScript objects, rather than the limited set of JSON objects.
/// </summary>
public class EcmaScriptWriter : JsonWriter
{
#region Constants
private static readonly DateTime EcmaScriptEpoch = new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc);
private const string EcmaScriptDateCtor1 = "new Date({0})";
private const string EcmaScriptDateCtor7 = "new Date({0:0000},{1},{2},{3},{4},{5},{6})";
private const string EmptyRegExpLiteral = "(?:)";
private const char RegExpLiteralDelim = '/';
private const char OperatorCharEscape = '\\';
private const string NamespaceDelim = ".";
private static readonly char[] NamespaceDelims = { '.' };
private const string RootDeclarationDebug =
@"
/* namespace {1} */
var {0};";
private const string RootDeclaration = @"var {0};";
private const string NamespaceCheck =
@"if(""undefined""===typeof {0}){{{0}={{}};}}";
private const string NamespaceCheckDebug =
@"
if (""undefined"" === typeof {0}) {{
{0} = {{}};
}}";
private static readonly IList<string> BrowserObjects = new List<string>(new string[]
{
"console",
"document",
"event",
"frames",
"history",
"location",
"navigator",
"opera",
"screen",
"window"
});
#endregion Constants
#region Init
/// <summary>
/// Ctor
/// </summary>
/// <param name="output">TextWriter for writing</param>
public EcmaScriptWriter(TextWriter output)
: base(output)
{
}
/// <summary>
/// Ctor
/// </summary>
/// <param name="output">Stream for writing</param>
public EcmaScriptWriter(Stream output)
: base(output)
{
}
/// <summary>
/// Ctor
/// </summary>
/// <param name="output">File name for writing</param>
public EcmaScriptWriter(string outputFileName)
: base(outputFileName)
{
}
/// <summary>
/// Ctor
/// </summary>
/// <param name="output">StringBuilder for appending</param>
public EcmaScriptWriter(StringBuilder output)
: base(output)
{
}
#endregion Init
#region Static Methods
/// <summary>
/// A helper method for serializing an object to EcmaScript
/// </summary>
/// <param name="value"></param>
/// <returns></returns>
public static new string Serialize(object value)
{
StringBuilder output = new StringBuilder();
using (EcmaScriptWriter writer = new EcmaScriptWriter(output))
{
writer.Write(value);
}
return output.ToString();
}
/// <summary>
/// Returns a block of script for ensuring that a namespace is declared.
/// </summary>
/// <param name="writer">the output writer</param>
/// <param name="ident">the namespace to ensure</param>
/// <param name="namespaces">list of namespaces already emitted</param>
/// <param name="debug">determines if should emit pretty-printed</param>
/// <returns>if was a nested identifier</returns>
public static bool WriteNamespaceDeclaration(TextWriter writer, string ident, List<string> namespaces, bool isDebug)
{
if (String.IsNullOrEmpty(ident))
{
return false;
}
if (namespaces == null)
{
namespaces = new List<string>();
}
string[] nsParts = ident.Split(EcmaScriptWriter.NamespaceDelims, StringSplitOptions.RemoveEmptyEntries);
string ns = nsParts[0];
bool isNested = false;
for (int i=0; i<nsParts.Length-1; i++)
{
isNested = true;
if (i > 0)
{
ns += EcmaScriptWriter.NamespaceDelim;
ns += nsParts[i];
}
if (namespaces.Contains(ns) ||
EcmaScriptWriter.BrowserObjects.Contains(ns))
{
// don't emit multiple checks for same namespace
continue;
}
// make note that we've emitted this namespace before
namespaces.Add(ns);
if (i == 0)
{
if (isDebug)
{
writer.Write(EcmaScriptWriter.RootDeclarationDebug, ns,
String.Join(NamespaceDelim, nsParts, 0, nsParts.Length-1));
}
else
{
writer.Write(EcmaScriptWriter.RootDeclaration, ns);
}
}
if (isDebug)
{
writer.WriteLine(EcmaScriptWriter.NamespaceCheckDebug, ns);
}
else
{
writer.Write(EcmaScriptWriter.NamespaceCheck, ns);
}
}
if (isDebug && isNested)
{
writer.WriteLine();
}
return isNested;
}
#endregion Static Methods
#region Writer Methods
/// <summary>
/// Writes dates as ECMAScript Date constructors
/// </summary>
/// <param name="value"></param>
public override void Write(DateTime value)
{
EcmaScriptWriter.WriteEcmaScriptDate(this, value);
}
/// <summary>
/// Writes out all Single values including NaN, Infinity, -Infinity
/// </summary>
/// <param name="value">Single</param>
public override void Write(float value)
{
this.TextWriter.Write(value.ToString("r"));
}
/// <summary>
/// Writes out all Double values including NaN, Infinity, -Infinity
/// </summary>
/// <param name="value">Double</param>
public override void Write(double value)
{
this.TextWriter.Write(value.ToString("r"));
}
protected override void Write(object value, bool isProperty)
{
if (value is Regex)
{
if (isProperty && this.Settings.PrettyPrint)
{
this.TextWriter.Write(' ');
}
EcmaScriptWriter.WriteEcmaScriptRegExp(this, (Regex)value);
return;
}
base.Write(value, isProperty);
}
protected override void WriteObjectPropertyName(string name)
{
if (EcmaScriptIdentifier.IsValidIdentifier(name, false))
{
// write out without quoting
this.TextWriter.Write(name);
}
else
{
// write out as an escaped string
base.WriteObjectPropertyName(name);
}
}
public static void WriteEcmaScriptDate(JsonWriter writer, DateTime value)
{
if (value.Kind == DateTimeKind.Unspecified)
{
// unknown timezones serialize directly to become browser-local
writer.TextWriter.Write(
EcmaScriptWriter.EcmaScriptDateCtor7,
value.Year, // yyyy
value.Month-1, // 0-11
value.Day, // 1-31
value.Hour, // 0-23
value.Minute, // 0-60
value.Second, // 0-60
value.Millisecond); // 0-999
return;
}
if (value.Kind == DateTimeKind.Local)
{
// convert server-local to UTC
value = value.ToUniversalTime();
}
// find the time since Jan 1, 1970
TimeSpan duration = value.Subtract(EcmaScriptWriter.EcmaScriptEpoch);
// get the total milliseconds
long ticks = (long)duration.TotalMilliseconds;
// write out as a Date constructor
writer.TextWriter.Write(
EcmaScriptWriter.EcmaScriptDateCtor1,
ticks);
}
/// <summary>
/// Outputs a .NET Regex as an ECMAScript RegExp literal.
/// Defaults to global matching off.
/// </summary>
/// <param name="writer"></param>
/// <param name="regex"></param>
/// <remarks>
/// http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-262.pdf
/// </remarks>
public static void WriteEcmaScriptRegExp(JsonWriter writer, Regex regex)
{
EcmaScriptWriter.WriteEcmaScriptRegExp(writer, regex, false);
}
/// <summary>
/// Outputs a .NET Regex as an ECMAScript RegExp literal.
/// </summary>
/// <param name="writer"></param>
/// <param name="regex"></param>
/// <param name="isGlobal"></param>
/// <remarks>
/// http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-262.pdf
/// </remarks>
public static void WriteEcmaScriptRegExp(JsonWriter writer, Regex regex, bool isGlobal)
{
if (regex == null)
{
writer.TextWriter.Write(JsonReader.LiteralNull);
return;
}
// Regex.ToString() returns the original pattern
string pattern = regex.ToString();
if (String.IsNullOrEmpty(pattern))
{
// must output something otherwise becomes a code comment
pattern = EcmaScriptWriter.EmptyRegExpLiteral;
}
string modifiers = isGlobal ? "g" : "";
switch (regex.Options & (RegexOptions.IgnoreCase|RegexOptions.Multiline))
{
case RegexOptions.IgnoreCase:
{
modifiers += "i";
break;
}
case RegexOptions.Multiline:
{
modifiers += "m";
break;
}
case RegexOptions.IgnoreCase|RegexOptions.Multiline:
{
modifiers += "im";
break;
}
}
writer.TextWriter.Write(EcmaScriptWriter.RegExpLiteralDelim);
int length = pattern.Length;
int start = 0;
for (int i = start; i < length; i++)
{
switch (pattern[i])
{
case EcmaScriptWriter.RegExpLiteralDelim:
{
writer.TextWriter.Write(pattern.Substring(start, i - start));
start = i + 1;
writer.TextWriter.Write(EcmaScriptWriter.OperatorCharEscape);
writer.TextWriter.Write(pattern[i]);
break;
}
}
}
writer.TextWriter.Write(pattern.Substring(start, length - start));
writer.TextWriter.Write(EcmaScriptWriter.RegExpLiteralDelim);
writer.TextWriter.Write(modifiers);
}
#endregion Writer Methods
}
}
| |
//
// PixbufUtils.cs
//
// Author:
// Stephane Delcroix <[email protected]>
// Ruben Vermeersch <[email protected]>
//
// Copyright (C) 2009-2010 Novell, Inc.
// Copyright (C) 2009 Stephane Delcroix
// Copyright (C) 2009-2010 Ruben Vermeersch
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using Gdk;
using TagLib.Image;
namespace FSpot.Utils
{
public static class PixbufUtils
{
static public ImageOrientation Rotate270 (ImageOrientation orientation)
{
if (orientation == ImageOrientation.None) {
orientation = ImageOrientation.TopLeft;
}
ImageOrientation [] rot = new ImageOrientation [] {
ImageOrientation.LeftBottom,
ImageOrientation.LeftTop,
ImageOrientation.RightTop,
ImageOrientation.RightBottom,
ImageOrientation.BottomLeft,
ImageOrientation.TopLeft,
ImageOrientation.TopRight,
ImageOrientation.BottomRight
};
orientation = rot [((int)orientation) -1];
return orientation;
}
static public ImageOrientation Rotate90 (ImageOrientation orientation)
{
orientation = Rotate270 (orientation);
orientation = Rotate270 (orientation);
orientation = Rotate270 (orientation);
return orientation;
}
public static Rectangle TransformOrientation (Pixbuf src, Rectangle args, ImageOrientation orientation)
{
return TransformOrientation (src.Width, src.Height, args, orientation);
}
public static Rectangle TransformOrientation (int total_width, int total_height, Rectangle args, ImageOrientation orientation)
{
Rectangle area = args;
switch (orientation) {
case ImageOrientation.BottomRight:
area.X = total_width - args.X - args.Width;
area.Y = total_height - args.Y - args.Height;
break;
case ImageOrientation.TopRight:
area.X = total_width - args.X - args.Width;
break;
case ImageOrientation.BottomLeft:
area.Y = total_height - args.Y - args.Height;
break;
case ImageOrientation.LeftTop:
area.X = args.Y;
area.Y = args.X;
area.Width = args.Height;
area.Height = args.Width;
break;
case ImageOrientation.RightBottom:
area.X = total_height - args.Y - args.Height;
area.Y = total_width - args.X - args.Width;
area.Width = args.Height;
area.Height = args.Width;
break;
case ImageOrientation.RightTop:
area.X = total_height - args.Y - args.Height;
area.Y = args.X;
area.Width = args.Height;
area.Height = args.Width;
break;
case ImageOrientation.LeftBottom:
area.X = args.Y;
area.Y = total_width - args.X - args.Width;
area.Width = args.Height;
area.Height = args.Width;
break;
default:
break;
}
return area;
}
public static Point TransformOrientation (int total_width, int total_height, Point args, ImageOrientation orientation)
{
Point p = args;
switch (orientation) {
default:
case ImageOrientation.TopLeft:
break;
case ImageOrientation.TopRight:
p.X = total_width - p.X;
break;
case ImageOrientation.BottomRight:
p.X = total_width - p.X;
p.Y = total_height - p.Y;
break;
case ImageOrientation.BottomLeft:
p.Y = total_height - p.Y;
break;
case ImageOrientation.LeftTop:
p.X = args.Y;
p.Y = args.X;
break;
case ImageOrientation.RightTop:
p.X = total_height - args.Y;
p.Y = args.X;
break;
case ImageOrientation.RightBottom:
p.X = total_height - args.Y;
p.Y = total_width - args.X;
break;
case ImageOrientation.LeftBottom:
p.X = args.Y;
p.Y = total_width - args.X;
break;
}
return p;
}
public static ImageOrientation ReverseTransformation (ImageOrientation orientation)
{
switch (orientation) {
default:
case ImageOrientation.TopLeft:
case ImageOrientation.TopRight:
case ImageOrientation.BottomRight:
case ImageOrientation.BottomLeft:
return orientation;
case ImageOrientation.LeftTop:
return ImageOrientation.RightBottom;
case ImageOrientation.RightTop:
return ImageOrientation.LeftBottom;
case ImageOrientation.RightBottom:
return ImageOrientation.LeftTop;
case ImageOrientation.LeftBottom:
return ImageOrientation.RightTop;
}
}
public static Pixbuf TransformOrientation (Pixbuf src, ImageOrientation orientation)
{
Pixbuf dest;
switch (orientation) {
default:
case ImageOrientation.TopLeft:
dest = PixbufUtils.ShallowCopy (src);
break;
case ImageOrientation.TopRight:
dest = src.Flip (false);
break;
case ImageOrientation.BottomRight:
dest = src.RotateSimple (PixbufRotation.Upsidedown);
break;
case ImageOrientation.BottomLeft:
dest = src.Flip (true);
break;
case ImageOrientation.LeftTop:
using (var rotated = src.RotateSimple (PixbufRotation.Clockwise)) {
dest = rotated.Flip (false);
}
break;
case ImageOrientation.RightTop:
dest = src.RotateSimple (PixbufRotation.Clockwise);
break;
case ImageOrientation.RightBottom:
using (var rotated = src.RotateSimple (PixbufRotation.Counterclockwise)) {
dest = rotated.Flip (false);
}
break;
case ImageOrientation.LeftBottom:
dest = src.RotateSimple (PixbufRotation.Counterclockwise);
break;
}
return dest;
}
public static Pixbuf ShallowCopy (this Pixbuf pixbuf)
{
if (pixbuf == null)
return null;
Pixbuf result = new Pixbuf (pixbuf, 0, 0, pixbuf.Width, pixbuf.Height);
return result;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using EnvDTE;
using Microsoft.VisualStudio.Shell.Interop;
using NuGet.Resources;
using NuGet.VisualStudio.Resources;
namespace NuGet.VisualStudio
{
public class VsPackageManager : PackageManager, IVsPackageManager
{
private readonly ISharedPackageRepository _sharedRepository;
private readonly IDictionary<string, IProjectManager> _projects;
private readonly ISolutionManager _solutionManager;
private readonly IFileSystemProvider _fileSystemProvider;
private readonly IDeleteOnRestartManager _deleteOnRestartManager;
private readonly VsPackageInstallerEvents _packageEvents;
private bool _bindingRedirectEnabled = true;
private readonly IVsFrameworkMultiTargeting _frameworkMultiTargeting;
private bool _repositoryOperationPending;
public VsPackageManager(ISolutionManager solutionManager,
IPackageRepository sourceRepository,
IFileSystemProvider fileSystemProvider,
IFileSystem fileSystem,
ISharedPackageRepository sharedRepository,
IDeleteOnRestartManager deleteOnRestartManager,
VsPackageInstallerEvents packageEvents,
IVsFrameworkMultiTargeting frameworkMultiTargeting = null)
: base(sourceRepository, new DefaultPackagePathResolver(fileSystem), fileSystem, sharedRepository)
{
_solutionManager = solutionManager;
_sharedRepository = sharedRepository;
_packageEvents = packageEvents;
_fileSystemProvider = fileSystemProvider;
_deleteOnRestartManager = deleteOnRestartManager;
_frameworkMultiTargeting = frameworkMultiTargeting;
_projects = new Dictionary<string, IProjectManager>(StringComparer.OrdinalIgnoreCase);
}
public bool BindingRedirectEnabled
{
get { return _bindingRedirectEnabled; }
set { _bindingRedirectEnabled = value; }
}
internal void EnsureCached(Project project)
{
string projectUniqueName = project.GetUniqueName();
if (_projects.ContainsKey(projectUniqueName))
{
return;
}
_projects[projectUniqueName] = CreateProjectManager(project);
}
public virtual IProjectManager GetProjectManager(Project project)
{
EnsureCached(project);
IProjectManager projectManager;
bool projectExists = _projects.TryGetValue(project.GetUniqueName(), out projectManager);
Debug.Assert(projectExists, "Unknown project");
return projectManager;
}
private IProjectManager CreateProjectManager(Project project)
{
// Create the project system
IProjectSystem projectSystem = VsProjectSystemFactory.CreateProjectSystem(project, _fileSystemProvider);
var repository = new PackageReferenceRepository(projectSystem, project.GetProperName(), _sharedRepository);
// Ensure the logger is null while registering the repository
FileSystem.Logger = null;
Logger = null;
// Ensure that this repository is registered with the shared repository if it needs to be
repository.RegisterIfNecessary();
// The source repository of the project is an aggregate since it might need to look for all
// available packages to perform updates on dependent packages
var sourceRepository = CreateProjectManagerSourceRepository();
var projectManager = new ProjectManager(sourceRepository, PathResolver, projectSystem, repository);
// The package reference repository also provides constraints for packages (via the allowedVersions attribute)
projectManager.ConstraintProvider = repository;
return projectManager;
}
public void InstallPackage(
IEnumerable<Project> projects,
IPackage package,
IEnumerable<PackageOperation> operations,
bool ignoreDependencies,
bool allowPrereleaseVersions,
ILogger logger,
IPackageOperationEventListener packageOperationEventListener)
{
if (package == null)
{
throw new ArgumentNullException("package");
}
if (operations == null)
{
throw new ArgumentNullException("operations");
}
if (projects == null)
{
throw new ArgumentNullException("projects");
}
using (StartInstallOperation(package.Id, package.Version.ToString()))
{
ExecuteOperationsWithPackage(
projects,
package,
operations,
projectManager => AddPackageReference(projectManager, package.Id, package.Version, ignoreDependencies, allowPrereleaseVersions),
logger,
packageOperationEventListener);
}
}
public virtual void InstallPackage(
IProjectManager projectManager,
string packageId,
SemanticVersion version,
bool ignoreDependencies,
bool allowPrereleaseVersions,
ILogger logger)
{
InstallPackage(projectManager, packageId, version, ignoreDependencies, allowPrereleaseVersions,
skipAssemblyReferences: false, logger: logger);
}
public void InstallPackage(
IProjectManager projectManager,
string packageId,
SemanticVersion version,
bool ignoreDependencies,
bool allowPrereleaseVersions,
bool skipAssemblyReferences,
ILogger logger)
{
try
{
InitializeLogger(logger, projectManager);
IPackage package = PackageRepositoryHelper.ResolvePackage(SourceRepository, LocalRepository, packageId, version, allowPrereleaseVersions);
using (StartInstallOperation(packageId, package.Version.ToString()))
{
if (skipAssemblyReferences)
{
package = new SkipAssemblyReferencesPackage(package);
}
RunSolutionAction(() =>
{
InstallPackage(
package,
projectManager != null ? projectManager.Project.TargetFramework : null,
ignoreDependencies,
allowPrereleaseVersions);
if (!WhatIf)
{
AddPackageReference(projectManager, package, ignoreDependencies, allowPrereleaseVersions);
}
});
}
}
finally
{
ClearLogger(projectManager);
}
}
public void InstallPackage(IProjectManager projectManager, IPackage package, IEnumerable<PackageOperation> operations, bool ignoreDependencies,
bool allowPrereleaseVersions, ILogger logger)
{
if (package == null)
{
throw new ArgumentNullException("package");
}
if (operations == null)
{
throw new ArgumentNullException("operations");
}
using (StartInstallOperation(package.Id, package.Version.ToString()))
{
ExecuteOperationsWithPackage(
projectManager,
package,
operations,
() => AddPackageReference(projectManager, package.Id, package.Version, ignoreDependencies, allowPrereleaseVersions),
logger);
}
}
public void UninstallPackage(IProjectManager projectManager, string packageId, SemanticVersion version, bool forceRemove, bool removeDependencies)
{
UninstallPackage(projectManager, packageId, version, forceRemove, removeDependencies, NullLogger.Instance);
}
public virtual void UninstallPackage(IProjectManager projectManager, string packageId, SemanticVersion version, bool forceRemove, bool removeDependencies, ILogger logger)
{
EventHandler<PackageOperationEventArgs> uninstallingHandler =
(sender, e) => _packageEvents.NotifyUninstalling(e);
EventHandler<PackageOperationEventArgs> uninstalledHandler =
(sender, e) => _packageEvents.NotifyUninstalled(e);
try
{
InitializeLogger(logger, projectManager);
bool appliesToProject;
IPackage package = FindLocalPackage(projectManager,
packageId,
version,
CreateAmbiguousUninstallException,
out appliesToProject);
PackageUninstalling += uninstallingHandler;
PackageUninstalled += uninstalledHandler;
if (appliesToProject)
{
RemovePackageReference(projectManager, packageId, forceRemove, removeDependencies);
}
else
{
UninstallPackage(package, forceRemove, removeDependencies);
}
}
finally
{
PackageUninstalling -= uninstallingHandler;
PackageUninstalled -= uninstalledHandler;
ClearLogger(projectManager);
}
}
public void UpdatePackage(
IEnumerable<Project> projects,
IPackage package,
IEnumerable<PackageOperation> operations,
bool updateDependencies,
bool allowPrereleaseVersions,
ILogger logger,
IPackageOperationEventListener packageOperationEventListener)
{
if (operations == null)
{
throw new ArgumentNullException("operations");
}
if (projects == null)
{
throw new ArgumentNullException("projects");
}
using (StartUpdateOperation(package.Id, package.Version.ToString()))
{
ExecuteOperationsWithPackage(
projects,
package,
operations,
projectManager => UpdatePackageReference(projectManager, package.Id, package.Version, updateDependencies, allowPrereleaseVersions),
logger,
packageOperationEventListener);
}
}
public virtual void UpdatePackage(IProjectManager projectManager, string packageId, SemanticVersion version, bool updateDependencies, bool allowPrereleaseVersions, ILogger logger)
{
UpdatePackage(projectManager,
packageId,
() => UpdatePackageReference(projectManager, packageId, version, updateDependencies, allowPrereleaseVersions),
() => SourceRepository.FindPackage(packageId, version, allowPrereleaseVersions, allowUnlisted: false),
updateDependencies,
allowPrereleaseVersions,
logger);
}
private void UpdatePackage(IProjectManager projectManager, string packageId, Action projectAction, Func<IPackage> resolvePackage, bool updateDependencies, bool allowPrereleaseVersions, ILogger logger)
{
try
{
InitializeLogger(logger, projectManager);
bool appliesToProject;
IPackage package = FindLocalPackageForUpdate(projectManager, packageId, out appliesToProject);
// Find the package we're going to update to
IPackage newPackage = resolvePackage();
if (newPackage != null && package.Version != newPackage.Version)
{
using (StartUpdateOperation(packageId, newPackage.Version.ToString()))
{
if (appliesToProject)
{
RunSolutionAction(projectAction);
}
else
{
// We might be updating a solution only package
UpdatePackage(newPackage, updateDependencies, allowPrereleaseVersions);
}
}
}
else
{
Logger.Log(MessageLevel.Info, VsResources.NoUpdatesAvailable, packageId);
}
}
finally
{
ClearLogger(projectManager);
}
}
public void UpdatePackages(IProjectManager projectManager, IEnumerable<IPackage> packages, IEnumerable<PackageOperation> operations, bool updateDependencies, bool allowPrereleaseVersions, ILogger logger)
{
if (packages == null)
{
throw new ArgumentNullException("packages");
}
if (operations == null)
{
throw new ArgumentNullException("operations");
}
using (StartUpdateOperation(packageId: null, packageVersion: null))
{
ExecuteOperationsWithPackage(
projectManager,
null,
operations,
() =>
{
foreach (var package in packages)
{
UpdatePackageReference(projectManager, package, updateDependencies, allowPrereleaseVersions);
}
},
logger);
}
}
public void UpdatePackage(string packageId, IVersionSpec versionSpec, bool updateDependencies, bool allowPrereleaseVersions, ILogger logger, IPackageOperationEventListener eventListener)
{
UpdatePackage(packageId,
projectManager => UpdatePackageReference(projectManager, packageId, versionSpec, updateDependencies, allowPrereleaseVersions),
() => SourceRepository.FindPackage(packageId, versionSpec, allowPrereleaseVersions, allowUnlisted: false),
updateDependencies,
allowPrereleaseVersions,
logger,
eventListener);
}
public void UpdatePackage(string packageId, SemanticVersion version, bool updateDependencies, bool allowPrereleaseVersions, ILogger logger, IPackageOperationEventListener eventListener)
{
UpdatePackage(packageId,
projectManager => UpdatePackageReference(projectManager, packageId, version, updateDependencies, allowPrereleaseVersions),
() => SourceRepository.FindPackage(packageId, version, allowPrereleaseVersions, allowUnlisted: false),
updateDependencies,
allowPrereleaseVersions,
logger,
eventListener);
}
public void UpdatePackages(bool updateDependencies, bool allowPrereleaseVersions, ILogger logger, IPackageOperationEventListener eventListener)
{
UpdatePackages(updateDependencies, safeUpdate: false, allowPrereleaseVersions: allowPrereleaseVersions, logger: logger, eventListener: eventListener);
}
public void UpdatePackages(IProjectManager projectManager, bool updateDependencies, bool allowPrereleaseVersions, ILogger logger)
{
UpdatePackages(projectManager, updateDependencies, safeUpdate: false, allowPrereleaseVersions: allowPrereleaseVersions, logger: logger);
}
public void UpdateSolutionPackages(IEnumerable<IPackage> packages, IEnumerable<PackageOperation> operations, bool updateDependencies, bool allowPrereleaseVersions, ILogger logger, IPackageOperationEventListener eventListener)
{
if (packages == null)
{
throw new ArgumentNullException("packages");
}
if (operations == null)
{
throw new ArgumentNullException("operations");
}
try
{
InitializeLogger(logger, null);
RunSolutionAction(() =>
{
// update all packages in the 'packages' folder
foreach (var operation in operations)
{
Execute(operation);
}
if (eventListener == null)
{
eventListener = NullPackageOperationEventListener.Instance;
}
foreach (Project project in _solutionManager.GetProjects())
{
try
{
eventListener.OnBeforeAddPackageReference(project);
IProjectManager projectManager = GetProjectManager(project);
InitializeLogger(logger, projectManager);
foreach (var package in packages)
{
// only perform update when the local package exists and has smaller version than the new version
var localPackage = projectManager.LocalRepository.FindPackage(package.Id);
if (localPackage != null && localPackage.Version < package.Version)
{
UpdatePackageReference(projectManager, package, updateDependencies, allowPrereleaseVersions);
}
}
ClearLogger(projectManager);
}
catch (Exception ex)
{
eventListener.OnAddPackageReferenceError(project, ex);
}
finally
{
eventListener.OnAfterAddPackageReference(project);
}
}
});
}
finally
{
ClearLogger(null);
}
}
public void SafeUpdatePackages(IProjectManager projectManager, bool updateDependencies, bool allowPrereleaseVersions, ILogger logger)
{
UpdatePackages(projectManager, updateDependencies, safeUpdate: true, allowPrereleaseVersions: allowPrereleaseVersions, logger: logger);
}
public void SafeUpdatePackage(string packageId, bool updateDependencies, bool allowPrereleaseVersions, ILogger logger, IPackageOperationEventListener eventListener)
{
UpdatePackage(packageId,
projectManager => UpdatePackageReference(projectManager, packageId, GetSafeRange(projectManager, packageId), updateDependencies, allowPrereleaseVersions),
() => SourceRepository.FindPackage(packageId, GetSafeRange(packageId), allowPrereleaseVersions: false, allowUnlisted: false),
updateDependencies,
allowPrereleaseVersions,
logger,
eventListener);
}
public void SafeUpdatePackage(IProjectManager projectManager, string packageId, bool updateDependencies, bool allowPrereleaseVersions, ILogger logger)
{
UpdatePackage(projectManager,
packageId,
() => UpdatePackageReference(projectManager, packageId, GetSafeRange(projectManager, packageId), updateDependencies, allowPrereleaseVersions),
() => SourceRepository.FindPackage(packageId, GetSafeRange(packageId), allowPrereleaseVersions: false, allowUnlisted: false),
updateDependencies,
allowPrereleaseVersions,
logger);
}
public void SafeUpdatePackages(bool updateDependencies, bool allowPrereleaseVersions, ILogger logger, IPackageOperationEventListener eventListener)
{
UpdatePackages(updateDependencies, safeUpdate: true, allowPrereleaseVersions: allowPrereleaseVersions, logger: logger, eventListener: eventListener);
}
// Reinstall all packages in all projects
public void ReinstallPackages(
bool updateDependencies,
bool allowPrereleaseVersions,
ILogger logger,
IPackageOperationEventListener eventListener)
{
//1) Reinstall solution packages first
//2) On Each Project, call UninstallAllPackages(IProjectManager, Dictionary<Tuple<string, SemanticVersion>, bool>, out packagesInSourceRepository). And, create a dictionary <projectManager, packages>
//3) Append all packagesInSourceRepository into allPackagesInSourceRepository
//4) Call InstallWalker.ResolveOperations(allPackagesInSourceRepository, out IList<IPackage> packagesByDependencyOrder)
//5) Call for each entry in Dictionary<projectManager, packages>
// InitializeLogger, RunSolutionAction( call projectManager.AddPackageReference(IPackage, ..., ...)
// Change it to array so that the enumeration is not modified during enumeration to reinstall solution packages
var packages = LocalRepository.GetPackages().ToArray();
foreach (var package in packages)
{
if (!IsProjectLevel(package))
{
ReinstallSolutionPackage(package, updateDependencies, allowPrereleaseVersions, logger);
}
}
// Now, take care of project-level packages
var packagesInProject = new Dictionary<IProjectManager, HashSet<IPackage>>();
var verifiedPackagesInSourceRepository = new Dictionary<PackageName, IPackage>();
HashSet<IPackage> allPackagesToBeReinstalled = new HashSet<IPackage>();
// first uninstall all the packages from each project
RunActionOnProjects(
_solutionManager.GetProjects(),
project =>
{
IProjectManager projectManager = GetProjectManager(project);
HashSet<IPackage> packagesToBeReinstalled;
UninstallPackagesForReinstall(projectManager, updateDependencies, logger, verifiedPackagesInSourceRepository, out packagesToBeReinstalled);
Debug.Assert(!packagesInProject.ContainsKey(projectManager));
packagesInProject[projectManager] = packagesToBeReinstalled;
allPackagesToBeReinstalled.AddRange(packagesToBeReinstalled);
},
logger,
eventListener ?? NullPackageOperationEventListener.Instance);
// NOTE THAT allowPrereleaseVersions should be true for pre-release packages alone, even if the user did not specify it
// since we are trying to reinstall packages here. However, ResolveOperations below will take care of this problem via allowPrereleaseVersionsBasedOnPackage parameter
var installWalker = new InstallWalker(LocalRepository, SourceRepository, null, logger ?? NullLogger.Instance,
ignoreDependencies: !updateDependencies, allowPrereleaseVersions: allowPrereleaseVersions);
IList<IPackage> packagesUninstalledInDependencyOrder;
var operations = installWalker.ResolveOperations(allPackagesToBeReinstalled, out packagesUninstalledInDependencyOrder, allowPrereleaseVersionsBasedOnPackage: true);
ExecuteOperationsWithPackage(
_solutionManager.GetProjects(),
null,
operations,
projectManager =>
{
foreach (var package in packagesUninstalledInDependencyOrder)
{
HashSet<IPackage> packagesToBeReinstalled;
if (packagesInProject.TryGetValue(projectManager, out packagesToBeReinstalled) && packagesToBeReinstalled.Contains(package))
{
AddPackageReference(projectManager, package, ignoreDependencies: !updateDependencies, allowPrereleaseVersions: allowPrereleaseVersions || !package.IsReleaseVersion());
}
}
},
logger,
eventListener);
}
// Reinstall all packages in the specified project
public void ReinstallPackages(
IProjectManager projectManager,
bool updateDependencies,
bool allowPrereleaseVersions,
ILogger logger)
{
//1) Call UninstallPackagesForReinstall(IProjectManager, Empty Dictionary, out packagesUninstalledForReinstallation)
//2) Call InstallWalker.ResolveOperations(packagesInSourceRepository, out IList<IPackage> packagesByDependencyOrder)
//3) Call ExecuteOperationsWithPackage( call projectManager.AddPackageReference(IPackage, ..., ...)
HashSet<IPackage> packagesToBeReinstalled;
UninstallPackagesForReinstall(projectManager, updateDependencies, logger, new Dictionary<PackageName, IPackage>(), out packagesToBeReinstalled);
// NOTE THAT allowPrereleaseVersions should be true for pre-release packages alone, even if the user did not specify it
// since we are trying to reinstall packages here. However, ResolveOperations below will take care of this problem via allowPrereleaseVersionsBasedOnPackage parameter
var installWalker = new InstallWalker(projectManager.LocalRepository, SourceRepository, projectManager.Project.TargetFramework, logger ?? NullLogger.Instance,
ignoreDependencies: !updateDependencies, allowPrereleaseVersions: allowPrereleaseVersions);
IList<IPackage> packagesUninstalledInDependencyOrder;
var operations = installWalker.ResolveOperations(packagesToBeReinstalled, out packagesUninstalledInDependencyOrder, allowPrereleaseVersionsBasedOnPackage: true);
ExecuteOperationsWithPackage(
projectManager,
null,
operations,
() =>
{
foreach (var package in packagesUninstalledInDependencyOrder)
{
AddPackageReference(projectManager, package, ignoreDependencies: !updateDependencies, allowPrereleaseVersions: allowPrereleaseVersions || !package.IsReleaseVersion());
}
},
logger);
}
/// <summary>
/// Reinstall the specified package in all projects.
/// </summary>
public void ReinstallPackage(
string packageId,
bool updateDependencies,
bool allowPrereleaseVersions,
ILogger logger,
IPackageOperationEventListener eventListener)
{
bool appliesToProject;
IPackage package = FindLocalPackage(packageId, out appliesToProject);
if (appliesToProject)
{
ReinstallPackageToAllProjects(packageId, updateDependencies, allowPrereleaseVersions, logger, eventListener);
}
else
{
ReinstallSolutionPackage(package, updateDependencies, allowPrereleaseVersions, logger);
}
}
/// <summary>
/// Reinstall the specified package in the specified project.
/// </summary>
public void ReinstallPackage(
IProjectManager projectManager,
string packageId,
bool updateDependencies,
bool allowPrereleaseVersions,
ILogger logger)
{
bool appliesToProject;
IPackage package = FindLocalPackageForUpdate(projectManager, packageId, out appliesToProject);
if (appliesToProject)
{
ReinstallPackageInProject(projectManager, package, updateDependencies, allowPrereleaseVersions, logger);
}
else
{
ReinstallSolutionPackage(package, updateDependencies, allowPrereleaseVersions, logger);
}
}
/// <summary>
/// Reinstall the specified package in the specified project, taking care of logging too.
/// </summary>
private void ReinstallPackageInProject(
IProjectManager projectManager,
IPackage package,
bool updateDependencies,
bool allowPrereleaseVersions,
ILogger logger)
{
logger = logger ?? NullLogger.Instance;
IDisposable disposableAction = StartReinstallOperation(package.Id, package.Version.ToString());
try
{
InitializeLogger(logger, projectManager);
logger.Log(MessageLevel.Info, VsResources.ReinstallProjectPackage, package, projectManager.Project.ProjectName);
// Before we start reinstalling, need to make sure the package exists in the source repository.
// Otherwise, the package will be uninstalled and can't be reinstalled.
if (SourceRepository.Exists(package))
{
RunSolutionAction(
() =>
{
UninstallPackage(
projectManager,
package.Id,
package.Version,
forceRemove: true,
removeDependencies: updateDependencies,
logger: logger);
InstallPackage(
projectManager,
package.Id,
package.Version,
ignoreDependencies: !updateDependencies,
allowPrereleaseVersions: allowPrereleaseVersions || !package.IsReleaseVersion(),
logger: logger);
});
}
else
{
logger.Log(
MessageLevel.Warning,
VsResources.PackageRestoreSkipForProject,
package.GetFullName(),
projectManager.Project.ProjectName);
}
}
finally
{
ClearLogger(projectManager);
disposableAction.Dispose();
}
}
// Reinstall one package in all projects.
// We need to uninstall the package from all projects BEFORE
// reinstalling it back, so that the package will be refreshed from source repository.
private void ReinstallPackageToAllProjects(
string packageId,
bool updateDependencies,
bool allowPrereleaseVersions,
ILogger logger,
IPackageOperationEventListener eventListener)
{
logger = logger ?? NullLogger.Instance;
eventListener = eventListener ?? NullPackageOperationEventListener.Instance;
var projectsHasPackage = new Dictionary<Project, SemanticVersion>();
var versionsChecked = new Dictionary<SemanticVersion, bool>();
// first uninstall from all projects that has the package installed
RunActionOnProjects(
_solutionManager.GetProjects(),
project =>
{
IProjectManager projectManager = GetProjectManager(project);
// find the package version installed in this project
IPackage projectPackage = projectManager.LocalRepository.FindPackage(packageId);
if (projectPackage != null)
{
bool packageExistInSource;
if (!versionsChecked.TryGetValue(projectPackage.Version, out packageExistInSource))
{
// version has not been checked, so check it here
packageExistInSource = SourceRepository.Exists(packageId, projectPackage.Version);
// mark the version as checked so that we don't have to check again if we
// encounter another project with the same version.
versionsChecked[projectPackage.Version] = packageExistInSource;
}
if (packageExistInSource)
{
// save the version installed in this project so that we can restore the correct version later
projectsHasPackage.Add(project, projectPackage.Version);
UninstallPackage(
projectManager,
packageId,
version: null,
forceRemove: true,
removeDependencies: updateDependencies,
logger: logger);
}
else
{
logger.Log(
MessageLevel.Warning,
VsResources.PackageRestoreSkipForProject,
projectPackage.GetFullName(),
project.Name);
}
}
},
logger,
eventListener);
// now reinstall back to all the affected projects
RunActionOnProjects(
projectsHasPackage.Keys,
project =>
{
var projectManager = GetProjectManager(project);
if (!projectManager.LocalRepository.Exists(packageId))
{
SemanticVersion oldVersion = projectsHasPackage[project];
using (StartReinstallOperation(packageId, oldVersion.ToString()))
{
InstallPackage(
projectManager,
packageId,
version: oldVersion,
ignoreDependencies: !updateDependencies,
allowPrereleaseVersions: allowPrereleaseVersions || !String.IsNullOrEmpty(oldVersion.SpecialVersion),
logger: logger);
}
}
},
logger,
eventListener);
}
private void UninstallPackagesForReinstall(
IProjectManager projectManager,
bool updateDependencies,
ILogger logger,
Dictionary<PackageName, IPackage> verifiedPackagesInSourceRepository,
out HashSet<IPackage> packagesToBeReinstalled)
{
packagesToBeReinstalled = new HashSet<IPackage>();
logger = logger ?? NullLogger.Instance;
try
{
InitializeLogger(logger, projectManager);
var packages = projectManager.LocalRepository.GetPackages().ToArray();
foreach(IPackage package in packages)
{
IDisposable disposableAction = StartReinstallOperation(package.Id, package.Version.ToString());
try
{
logger.Log(MessageLevel.Info, VsResources.ReinstallProjectPackage, package, projectManager.Project.ProjectName);
IPackage packageInSourceRepository;
PackageName packageName = new PackageName(package.Id, package.Version);
if (!verifiedPackagesInSourceRepository.TryGetValue(packageName, out packageInSourceRepository))
{
packageInSourceRepository = SourceRepository.FindPackage(package.Id, package.Version);
verifiedPackagesInSourceRepository[packageName] = packageInSourceRepository;
}
if (packageInSourceRepository != null)
{
packagesToBeReinstalled.Add(packageInSourceRepository);
RunSolutionAction(
() =>
{
// We set remove dependencies to false since we will remove all the packages anyways
UninstallPackage(
projectManager,
package.Id,
package.Version,
forceRemove: true,
removeDependencies: false,
logger: logger);
});
}
else
{
logger.Log(
MessageLevel.Warning,
VsResources.PackageRestoreSkipForProject,
package.GetFullName(),
projectManager.Project.ProjectName);
}
}
catch (PackageNotInstalledException e)
{
logger.Log(MessageLevel.Warning, ExceptionUtility.Unwrap(e).Message);
}
catch (Exception e)
{
logger.Log(MessageLevel.Error, ExceptionUtility.Unwrap(e).Message);
}
finally
{
ClearLogger(projectManager);
disposableAction.Dispose();
}
}
}
finally
{
ClearLogger(projectManager);
}
}
private static PackageAction ReverseAction(PackageAction packageAction)
{
return packageAction == PackageAction.Install ?
PackageAction.Uninstall :
PackageAction.Install;
}
private void ReinstallSolutionPackage(
IPackage package,
bool updateDependencies,
bool allowPrereleaseVersions,
ILogger logger)
{
logger = logger ?? NullLogger.Instance;
var disposableAction = StartReinstallOperation(package.Id, package.Version.ToString());
try
{
InitializeLogger(logger);
logger.Log(MessageLevel.Info, VsResources.ReinstallSolutionPackage, package);
if (SourceRepository.Exists(package))
{
RunSolutionAction(
() =>
{
UninstallPackage(package, forceRemove: true, removeDependencies: !updateDependencies);
// Bug 2883: We must NOT use the overload that accepts 'package' object here,
// because after the UninstallPackage() call above, the package no longer exists.
InstallPackage(package.Id, package.Version, ignoreDependencies: !updateDependencies, allowPrereleaseVersions: allowPrereleaseVersions || !package.IsReleaseVersion());
});
}
else
{
logger.Log(
MessageLevel.Warning,
VsResources.PackageRestoreSkipForSolution,
package.GetFullName());
}
}
finally
{
ClearLogger();
disposableAction.Dispose();
}
}
protected override void ExecuteUninstall(IPackage package)
{
// Check if the package is in use before removing it
if (!_sharedRepository.IsReferenced(package.Id, package.Version))
{
base.ExecuteUninstall(package);
}
}
private IPackage FindLocalPackageForUpdate(IProjectManager projectManager, string packageId, out bool appliesToProject)
{
return FindLocalPackage(projectManager, packageId, null /* version */, CreateAmbiguousUpdateException, out appliesToProject);
}
private IPackage FindLocalPackage(IProjectManager projectManager,
string packageId,
SemanticVersion version,
Func<IProjectManager, IList<IPackage>, Exception> getAmbiguousMatchException,
out bool appliesToProject)
{
IPackage package = null;
bool existsInProject = false;
appliesToProject = false;
if (projectManager != null)
{
// Try the project repository first
package = projectManager.LocalRepository.FindPackage(packageId, version);
existsInProject = package != null;
}
// Fallback to the solution repository (it might be a solution only package)
if (package == null)
{
if (version != null)
{
// Get the exact package
package = LocalRepository.FindPackage(packageId, version);
}
else
{
// Get all packages by this name to see if we find an ambiguous match
var packages = LocalRepository.FindPackagesById(packageId).ToList();
if (packages.Count > 1)
{
throw getAmbiguousMatchException(projectManager, packages);
}
// Pick the only one of default if none match
package = packages.SingleOrDefault();
}
}
// Can't find the package in the solution or in the project then fail
if (package == null)
{
throw new InvalidOperationException(
String.Format(CultureInfo.CurrentCulture,
VsResources.UnknownPackage, packageId));
}
appliesToProject = IsProjectLevel(package);
if (appliesToProject)
{
if (!existsInProject)
{
if (_sharedRepository.IsReferenced(package.Id, package.Version))
{
// If the package doesn't exist in the project and is referenced by other projects
// then fail.
if (projectManager != null)
{
if (version == null)
{
throw new InvalidOperationException(
String.Format(CultureInfo.CurrentCulture,
VsResources.UnknownPackageInProject,
package.Id,
projectManager.Project.ProjectName));
}
throw new InvalidOperationException(
String.Format(CultureInfo.CurrentCulture,
VsResources.UnknownPackageInProject,
package.GetFullName(),
projectManager.Project.ProjectName));
}
}
else
{
// The operation applies to solution level since it's not installed in the current project
// but it is installed in some other project
appliesToProject = false;
}
}
}
// Can't have a project level operation if no project was specified
if (appliesToProject && projectManager == null)
{
throw new InvalidOperationException(VsResources.ProjectNotSpecified);
}
return package;
}
internal IPackage FindLocalPackage(string packageId, out bool appliesToProject)
{
// It doesn't matter if there are multiple versions of the package installed at solution level,
// we just want to know that one exists.
var packages = LocalRepository.FindPackagesById(packageId).OrderByDescending(p => p.Version).ToList();
// Can't find the package in the solution.
if (!packages.Any())
{
throw new InvalidOperationException(
String.Format(CultureInfo.CurrentCulture,
VsResources.UnknownPackage, packageId));
}
foreach (IPackage package in packages)
{
appliesToProject = IsProjectLevel(package);
if (!appliesToProject)
{
if (packages.Count > 1)
{
throw CreateAmbiguousUpdateException(projectManager: null, packages: packages);
}
}
else if (!_sharedRepository.IsReferenced(package.Id, package.Version))
{
Logger.Log(MessageLevel.Warning, String.Format(CultureInfo.CurrentCulture,
VsResources.Warning_PackageNotReferencedByAnyProject, package.Id, package.Version));
// Try next package
continue;
}
// Found a package with package Id as 'packageId' which is installed in at least 1 project
return package;
}
// There are one or more packages with package Id as 'packageId'
// BUT, none of them is installed in a project
// it's probably a borked install.
throw new PackageNotInstalledException(
String.Format(CultureInfo.CurrentCulture,
VsResources.PackageNotInstalledInAnyProject, packageId));
}
/// <summary>
/// Check to see if this package applies to a project based on 2 criteria:
/// 1. The package has project content (i.e. content that can be applied to a project lib or content files)
/// 2. The package is referenced by any other project
/// 3. The package has at least one dependecy
///
/// This logic will probably fail in one edge case. If there is a meta package that applies to a project
/// that ended up not being installed in any of the projects and it only exists at solution level.
/// If this happens, then we think that the following operation applies to the solution instead of showing an error.
/// To solve that edge case we'd have to walk the graph to find out what the package applies to.
///
/// Technically, the third condition is not totally accurate because a solution-level package can depend on another
/// solution-level package. However, doing that check here is expensive and we haven't seen such a package.
/// This condition here is more geared towards guarding against metadata packages, i.e. we shouldn't treat metadata packages
/// as solution-level ones.
/// </summary>
public bool IsProjectLevel(IPackage package)
{
return package.HasProjectContent() ||
package.DependencySets.SelectMany(p => p.Dependencies).Any() ||
_sharedRepository.IsReferenced(package.Id, package.Version);
}
private Exception CreateAmbiguousUpdateException(IProjectManager projectManager, IList<IPackage> packages)
{
if (projectManager != null && packages.Any(IsProjectLevel))
{
return new InvalidOperationException(
String.Format(CultureInfo.CurrentCulture,
VsResources.UnknownPackageInProject,
packages[0].Id,
projectManager.Project.ProjectName));
}
return new InvalidOperationException(
String.Format(CultureInfo.CurrentCulture,
VsResources.AmbiguousUpdate,
packages[0].Id));
}
private Exception CreateAmbiguousUninstallException(IProjectManager projectManager, IList<IPackage> packages)
{
if (projectManager != null && packages.Any(IsProjectLevel))
{
return new InvalidOperationException(
String.Format(CultureInfo.CurrentCulture,
VsResources.AmbiguousProjectLevelUninstal,
packages[0].Id,
projectManager.Project.ProjectName));
}
return new InvalidOperationException(
String.Format(CultureInfo.CurrentCulture,
VsResources.AmbiguousUninstall,
packages[0].Id));
}
private void RemovePackageReference(IProjectManager projectManager, string packageId, bool forceRemove, bool removeDependencies)
{
RunProjectAction(projectManager, () => projectManager.RemovePackageReference(packageId, forceRemove, removeDependencies));
}
// If the remote package is already determined, consider using the overload which directly takes in the remote package
// Can avoid calls FindPackage calls to source repository
private void UpdatePackageReference(IProjectManager projectManager, string packageId, SemanticVersion version, bool updateDependencies, bool allowPrereleaseVersions)
{
string versionString = version == null ? null : version.ToString();
using (StartUpdateOperation(packageId, versionString))
{
RunProjectAction(projectManager, () => projectManager.UpdatePackageReference(packageId, version, updateDependencies, allowPrereleaseVersions));
}
}
private void UpdatePackageReference(IProjectManager projectManager, string packageId, IVersionSpec versionSpec, bool updateDependencies, bool allowPrereleaseVersions)
{
using (StartUpdateOperation(packageId, packageVersion: null))
{
RunProjectAction(projectManager, () => projectManager.UpdatePackageReference(packageId, versionSpec, updateDependencies, allowPrereleaseVersions));
}
}
private void UpdatePackageReference(IProjectManager projectManager, IPackage package, bool updateDependencies, bool allowPrereleaseVersions)
{
using (StartUpdateOperation(package.Id, package.Version.ToString()))
{
RunProjectAction(projectManager, () => projectManager.UpdatePackageReference(package, updateDependencies, allowPrereleaseVersions));
}
}
private void AddPackageReference(IProjectManager projectManager, string packageId, SemanticVersion version, bool ignoreDependencies, bool allowPrereleaseVersions)
{
RunProjectAction(projectManager, () => projectManager.AddPackageReference(packageId, version, ignoreDependencies, allowPrereleaseVersions));
}
private void AddPackageReference(IProjectManager projectManager, IPackage package, bool ignoreDependencies, bool allowPrereleaseVersions)
{
RunProjectAction(projectManager, () => projectManager.AddPackageReference(package, ignoreDependencies, allowPrereleaseVersions));
}
private void ExecuteOperationsWithPackage(IEnumerable<Project> projects, IPackage package, IEnumerable<PackageOperation> operations, Action<IProjectManager> projectAction, ILogger logger, IPackageOperationEventListener eventListener)
{
if (eventListener == null)
{
eventListener = NullPackageOperationEventListener.Instance;
}
ExecuteOperationsWithPackage(
null,
package,
operations,
() =>
{
bool successfulAtLeastOnce = false;
foreach (var project in projects)
{
try
{
eventListener.OnBeforeAddPackageReference(project);
IProjectManager projectManager = GetProjectManager(project);
InitializeLogger(logger, projectManager);
projectAction(projectManager);
successfulAtLeastOnce = true;
ClearLogger(projectManager);
}
catch (Exception ex)
{
eventListener.OnAddPackageReferenceError(project, ex);
}
finally
{
eventListener.OnAfterAddPackageReference(project);
}
}
// Throw an exception only if all the update failed for all projects
// so we rollback any solution level operations that might have happened
if (projects.Any() && !successfulAtLeastOnce)
{
throw new InvalidOperationException(VsResources.OperationFailed);
}
},
logger);
}
private void ExecuteOperationsWithPackage(IProjectManager projectManager, IPackage package, IEnumerable<PackageOperation> operations, Action action, ILogger logger)
{
try
{
InitializeLogger(logger, projectManager);
RunSolutionAction(() =>
{
if (operations.Any())
{
foreach (var operation in operations)
{
Execute(operation);
}
}
else if (package != null && LocalRepository.Exists(package))
{
Logger.Log(MessageLevel.Info, VsResources.Log_PackageAlreadyInstalled, package.GetFullName());
}
action();
});
}
finally
{
ClearLogger(projectManager);
}
}
private Project GetProject(IProjectManager projectManager)
{
// We only support project systems that implement IVsProjectSystem
var vsProjectSystem = projectManager.Project as IVsProjectSystem;
if (vsProjectSystem == null)
{
return null;
}
// Find the project by it's unique name
return _solutionManager.GetProject(vsProjectSystem.UniqueName);
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "If we failed to add binding redirects we don't want it to stop the install/update.")]
private void AddBindingRedirects(IProjectManager projectManager)
{
// Find the project by it's unique name
Project project = GetProject(projectManager);
// If we can't find the project or it doesn't support binding redirects then don't add any redirects
if (project == null || !project.SupportsBindingRedirects())
{
return;
}
try
{
RuntimeHelpers.AddBindingRedirects(_solutionManager, project, _fileSystemProvider, _frameworkMultiTargeting);
}
catch (Exception e)
{
// If there was an error adding binding redirects then print a warning and continue
Logger.Log(MessageLevel.Warning, String.Format(CultureInfo.CurrentCulture, VsResources.Warning_FailedToAddBindingRedirects, projectManager.Project.ProjectName, e.Message));
}
}
private void InitializeLogger(ILogger logger, IProjectManager projectManager = null)
{
// Setup logging on all of our objects
Logger = logger;
FileSystem.Logger = logger;
if (projectManager != null)
{
projectManager.Logger = logger;
projectManager.Project.Logger = logger;
}
}
private void ClearLogger(IProjectManager projectManager = null)
{
// clear logging on all of our objects
Logger = null;
FileSystem.Logger = null;
if (projectManager != null)
{
projectManager.Logger = null;
projectManager.Project.Logger = null;
}
}
/// <summary>
/// Runs the specified action and rolls back any installed packages if on failure.
/// </summary>
private void RunSolutionAction(Action action)
{
var packagesAdded = new List<IPackage>();
EventHandler<PackageOperationEventArgs> installHandler = (sender, e) =>
{
// Record packages that we are installing so that if one fails, we can rollback
packagesAdded.Add(e.Package);
_packageEvents.NotifyInstalling(e);
};
EventHandler<PackageOperationEventArgs> installedHandler = (sender, e) =>
{
_packageEvents.NotifyInstalled(e);
};
PackageInstalling += installHandler;
PackageInstalled += installedHandler;
try
{
// Execute the action
action();
}
catch
{
if (packagesAdded.Any())
{
// Only print the rollback warning if we have something to rollback
Logger.Log(MessageLevel.Warning, VsResources.Warning_RollingBack);
}
// Don't log anything during the rollback
Logger = null;
// Rollback the install if it fails
Uninstall(packagesAdded);
throw;
}
finally
{
// Remove the event handler
PackageInstalling -= installHandler;
PackageInstalled -= installedHandler;
}
}
/// <summary>
/// Runs the action on projects and log any error that may occur.
/// </summary>
private void RunActionOnProjects(
IEnumerable<Project> projects,
Action<Project> action,
ILogger logger,
IPackageOperationEventListener eventListener)
{
foreach (var project in projects)
{
try
{
eventListener.OnBeforeAddPackageReference(project);
action(project);
}
catch (Exception exception)
{
logger.Log(MessageLevel.Error, ExceptionUtility.Unwrap(exception).Message);
eventListener.OnAddPackageReferenceError(project, exception);
}
finally
{
eventListener.OnAfterAddPackageReference(project);
}
}
}
/// <summary>
/// Runs action on the project manager and rollsback any package installs if it fails.
/// </summary>
private void RunProjectAction(IProjectManager projectManager, Action action)
{
if (projectManager == null)
{
return;
}
// Keep track of what was added and removed
var packagesAdded = new Stack<IPackage>();
var packagesRemoved = new List<IPackage>();
EventHandler<PackageOperationEventArgs> removeHandler = (sender, e) =>
{
packagesRemoved.Add(e.Package);
_packageEvents.NotifyReferenceRemoved(e);
};
EventHandler<PackageOperationEventArgs> addingHandler = (sender, e) =>
{
packagesAdded.Push(e.Package);
_packageEvents.NotifyReferenceAdded(e);
// If this package doesn't exist at solution level (it might not be because of leveling)
// then we need to install it.
if (!LocalRepository.Exists(e.Package))
{
ExecuteInstall(e.Package);
}
};
// Try to get the project for this project manager
Project project = GetProject(projectManager);
IVsProjectBuildSystem build = null;
if (project != null)
{
build = project.ToVsProjectBuildSystem();
}
// Add the handlers
projectManager.PackageReferenceRemoved += removeHandler;
projectManager.PackageReferenceAdding += addingHandler;
try
{
if (build != null)
{
// Start a batch edit so there is no background compilation until we're done
// processing project actions
build.StartBatchEdit();
}
action();
if (!WhatIf && BindingRedirectEnabled && projectManager.Project.IsBindingRedirectSupported)
{
// Only add binding redirects if install was successful
AddBindingRedirects(projectManager);
}
}
catch
{
// We need to Remove the handlers here since we're going to attempt
// a rollback and we don't want modify the collections while rolling back.
projectManager.PackageReferenceRemoved -= removeHandler;
projectManager.PackageReferenceAdding -= addingHandler;
// When things fail attempt a rollback
RollbackProjectActions(projectManager, packagesAdded, packagesRemoved);
// Rollback solution packages
Uninstall(packagesAdded);
// Clear removed packages so we don't try to remove them again (small optimization)
packagesRemoved.Clear();
throw;
}
finally
{
if (build != null)
{
// End the batch edit when we are done.
build.EndBatchEdit();
}
// Remove the handlers
projectManager.PackageReferenceRemoved -= removeHandler;
projectManager.PackageReferenceAdding -= addingHandler;
// Remove any packages that would be removed as a result of updating a dependency or the package itself
// We can execute the uninstall directly since we don't need to resolve dependencies again.
Uninstall(packagesRemoved);
}
}
private static void RollbackProjectActions(IProjectManager projectManager, IEnumerable<IPackage> packagesAdded, IEnumerable<IPackage> packagesRemoved)
{
// Disable logging when rolling back project operations
projectManager.Logger = null;
foreach (var package in packagesAdded)
{
// Remove each package that was added
projectManager.RemovePackageReference(package, forceRemove: false, removeDependencies: false);
}
foreach (var package in packagesRemoved)
{
// Add each package that was removed
projectManager.AddPackageReference(package, ignoreDependencies: true, allowPrereleaseVersions: true);
}
}
private void Uninstall(IEnumerable<IPackage> packages)
{
// Packages added to the sequence are added in the order in which they were visited. However for operations on satellite packages to work correctly,
// we need to ensure they are always uninstalled prior to the corresponding core package. To address this, we run it by Reduce which reorders it for us and ensures it
// returns the minimal set of operations required.
var packageOperations = packages.Select(p => new PackageOperation(p, PackageAction.Uninstall))
.Reduce();
foreach (var operation in packageOperations)
{
if (WhatIf)
{
Logger.Log(MessageLevel.Info, NuGet.Resources.NuGetResources.Log_PackageOperation,
operation.Action,
operation.Package);
}
else
{
ExecuteUninstall(operation.Package);
}
}
}
private void UpdatePackage(
string packageId,
Action<IProjectManager> projectAction,
Func<IPackage> resolvePackage,
bool updateDependencies,
bool allowPrereleaseVersions,
ILogger logger,
IPackageOperationEventListener eventListener)
{
bool appliesToProject;
IPackage package = FindLocalPackage(packageId, out appliesToProject);
if (appliesToProject)
{
eventListener = eventListener ?? NullPackageOperationEventListener.Instance;
foreach (var project in _solutionManager.GetProjects())
{
IProjectManager projectManager = GetProjectManager(project);
try
{
InitializeLogger(logger, projectManager);
if (projectManager.LocalRepository.Exists(packageId))
{
eventListener.OnBeforeAddPackageReference(project);
try
{
RunSolutionAction(() => projectAction(projectManager));
}
catch (Exception e)
{
logger.Log(MessageLevel.Error, ExceptionUtility.Unwrap(e).Message);
eventListener.OnAddPackageReferenceError(project, e);
}
finally
{
eventListener.OnAfterAddPackageReference(project);
}
}
}
finally
{
ClearLogger(projectManager);
}
}
}
else
{
// Find the package we're going to update to
IPackage newPackage = resolvePackage();
if (newPackage != null && package.Version != newPackage.Version)
{
IDisposable operationDisposable = StartUpdateOperation(newPackage.Id, newPackage.Version.ToString());
try
{
InitializeLogger(logger, projectManager: null);
// We might be updating a solution only package
UpdatePackage(newPackage, updateDependencies, allowPrereleaseVersions);
}
finally
{
ClearLogger(projectManager: null);
operationDisposable.Dispose();
}
}
else
{
logger.Log(MessageLevel.Info, VsResources.NoUpdatesAvailable, packageId);
}
}
}
private void UpdatePackages(IProjectManager projectManager, bool updateDependencies, bool safeUpdate, bool allowPrereleaseVersions, ILogger logger)
{
UpdatePackages(projectManager.LocalRepository, package =>
{
if (safeUpdate)
{
SafeUpdatePackage(projectManager, package.Id, updateDependencies, allowPrereleaseVersions, logger);
}
else
{
UpdatePackage(projectManager, package.Id, version: null, updateDependencies: updateDependencies,
allowPrereleaseVersions: allowPrereleaseVersions, logger: logger);
}
}, logger);
}
private void UpdatePackages(bool updateDependencies, bool safeUpdate, bool allowPrereleaseVersions, ILogger logger, IPackageOperationEventListener eventListener)
{
UpdatePackages(LocalRepository, package =>
{
if (safeUpdate)
{
SafeUpdatePackage(package.Id, updateDependencies, allowPrereleaseVersions, logger, eventListener);
}
else
{
UpdatePackage(package.Id, version: null, updateDependencies: updateDependencies, allowPrereleaseVersions: allowPrereleaseVersions, logger: logger, eventListener: eventListener);
}
},
logger);
}
private void UpdatePackages(IPackageRepository localRepository, Action<IPackage> updateAction, ILogger logger)
{
// BUGBUG: TargetFramework should be passed for more efficient package walking
var packageSorter = new PackageSorter(targetFramework: null);
// Get the packages in reverse dependency order then run update on each one i.e. if A -> B run Update(A) then Update(B)
var packages = packageSorter.GetPackagesByDependencyOrder(localRepository).Reverse();
foreach (var package in packages)
{
// While updating we might remove packages that were initially in the list. e.g.
// A 1.0 -> B 2.0, A 2.0 -> [], since updating to A 2.0 removes B, we end up skipping it.
if (localRepository.Exists(package.Id))
{
try
{
updateAction(package);
}
catch (PackageNotInstalledException e)
{
logger.Log(MessageLevel.Warning, ExceptionUtility.Unwrap(e).Message);
}
catch (Exception e)
{
logger.Log(MessageLevel.Error, ExceptionUtility.Unwrap(e).Message);
}
}
}
}
private IPackageRepository CreateProjectManagerSourceRepository()
{
// The source repo for the project manager is the aggregate of the shared repo and the selected repo.
// For dependency resolution, we want VS to look for packages in the selected source and then use the fallback logic
var fallbackRepository = SourceRepository as FallbackRepository;
if (fallbackRepository != null)
{
var primaryRepositories = new[] { _sharedRepository, fallbackRepository.SourceRepository.Clone() };
return new FallbackRepository(new AggregateRepository(primaryRepositories), fallbackRepository.DependencyResolver);
}
return new AggregateRepository(new[] { _sharedRepository, SourceRepository.Clone() });
}
private IVersionSpec GetSafeRange(string packageId)
{
bool appliesToProject;
IPackage package = FindLocalPackage(packageId, out appliesToProject);
return VersionUtility.GetSafeRange(package.Version);
}
private IVersionSpec GetSafeRange(IProjectManager projectManager, string packageId)
{
bool appliesToProject;
IPackage package = FindLocalPackageForUpdate(projectManager, packageId, out appliesToProject);
return VersionUtility.GetSafeRange(package.Version);
}
protected override void OnUninstalled(PackageOperationEventArgs e)
{
base.OnUninstalled(e);
_deleteOnRestartManager.MarkPackageDirectoryForDeletion(e.Package);
}
private IDisposable StartInstallOperation(string packageId, string packageVersion)
{
return StartOperation(RepositoryOperationNames.Install, packageId, packageVersion);
}
private IDisposable StartUpdateOperation(string packageId, string packageVersion)
{
return StartOperation(RepositoryOperationNames.Update, packageId, packageVersion);
}
private IDisposable StartReinstallOperation(string packageId, string packageVersion)
{
return StartOperation(RepositoryOperationNames.Reinstall, packageId, packageVersion);
}
private IDisposable StartOperation(string operation, string packageId, string mainPackageVersion)
{
// If there's a pending operation, don't allow another one to start.
// This is for the Reinstall case. Because Reinstall just means
// uninstalling and installing, we don't want the child install operation
// to override Reinstall value.
if (_repositoryOperationPending)
{
return DisposableAction.NoOp;
}
_repositoryOperationPending = true;
return DisposableAction.All(
SourceRepository.StartOperation(operation, packageId, mainPackageVersion),
new DisposableAction(() => _repositoryOperationPending = false));
}
}
}
| |
using System.Text.RegularExpressions;
using System.Diagnostics;
using System;
using System.Drawing.Drawing2D;
using System.Windows.Forms;
using System.Collections;
using System.Drawing;
using Microsoft.VisualBasic;
using System.Data.SqlClient;
using System.Data;
using System.Collections.Generic;
using WeifenLuo.WinFormsUI;
using Microsoft.Win32;
using WeifenLuo;
using System.ComponentModel;
namespace SoftLogik.Win
{
namespace UI
{
/// <summary>
/// Wrap the TabOrderManager class and supply extendee controls with a custom tab scheme.
/// </summary>
[ProvideProperty("TabScheme", typeof(Control)), Description("Wrap the TabOrderManager class and supply extendee controls with a custom tab scheme"), ToolboxBitmap(typeof(TabSchemeProvider), "TabSchemeProvider")]public class TabSchemeProvider : Component, IExtenderProvider
{
#region MEMBER VARIABLES
/// <summary>
/// Hashtable to store the controls that use our extender property.
/// </summary>
private Hashtable extendees = new Hashtable();
/// <summary>
/// The form we're hosted on, which will be calculated by watching the extendees entering the control hierarchy.
/// </summary>
private System.Windows.Forms.Form topLevelForm = null;
#endregion
#region PUBLIC PROPERTIES
#endregion
public TabSchemeProvider()
{
InitializeComponent();
}
private void InitializeComponent()
{
}
/// <summary>
/// Get whether or not we're managing a given control.
/// </summary>
/// <param name="c"></param>
/// <returns></returns>
[DefaultValue(VisualTabOrderManager.TabScheme.None)]public VisualTabOrderManager.TabScheme GetTabScheme(Control c)
{
if (! extendees.Contains(c))
{
return VisualTabOrderManager.TabScheme.None;
}
return ((VisualTabOrderManager.TabScheme) (extendees[c]));
}
/// <summary>
/// Hook up to the form load event and indicate that we've done so.
/// </summary>
private void HookFormLoad()
{
if (topLevelForm != null)
{
topLevelForm.Load += new System.EventHandler(TopLevelForm_Load);
}
}
/// <summary>
/// Unhook from the form load event and indicate that we need to do so again before applying tab schemes.
/// </summary>
private void UnhookFormLoad()
{
if (topLevelForm != null)
{
topLevelForm.Load -= new System.EventHandler(TopLevelForm_Load);
}
}
/// <summary>
/// Hook up to all of the parent changed events for this control and its ancestors so that we are informed
/// if and when they are added to the top-level form (whose load event we need).
/// It's not adequate to look at just the control, because it may have been added to its parent, but the parent
/// may not be descendent of the form -yet-.
/// </summary>
/// <param name="c"></param>
private void HookParentChangedEvents(Control c)
{
while (c != null)
{
c.ParentChanged += new System.EventHandler(Extendee_ParentChanged);
c = c.Parent;
}
}
/// <summary>
/// Set the tab scheme to use on a given control
/// </summary>
/// <param name="c"></param>
public void SetTabScheme(Control c, VisualTabOrderManager.TabScheme val)
{
if (val != VisualTabOrderManager.TabScheme.None)
{
extendees[c] = val;
if (topLevelForm == null)
{
if (c.TopLevelControl != null)
{
//' We're in luck.
//' This is the form, or this control knows about it, so take the opportunity to grab it and wire up to its Load event.
topLevelForm = (Form) c.TopLevelControl;
HookFormLoad();
}
else
{
//' Set up to wait around until this control or one of its ancestors is added to the form's control hierarchy.
HookParentChangedEvents(c);
}
}
}
else if (extendees.Contains(c))
{
extendees.Remove(c);
//' If we no longer have any extendees, we don't need to be wired up to the form load event.
if (extendees.Count == 0)
{
UnhookFormLoad();
}
}
}
#region IExtenderProvider Members
public bool CanExtend(object extendee)
{
return ((extendee) is Form|| (extendee) is Panel|| (extendee) is GroupBox|| (extendee) is UserControl);
}
#endregion
public void TopLevelForm_Load(object sender, EventArgs e)
{
Form f = (Form) sender;
VisualTabOrderManager tom = new VisualTabOrderManager(f);
//// Add an override for everything with a tab scheme set EXCEPT for the form, which
//// serves as the root of the whole process.
VisualTabOrderManager.TabScheme formScheme = VisualTabOrderManager.TabScheme.None;
IDictionaryEnumerator extendeeEnumerator = extendees.GetEnumerator();
while (extendeeEnumerator.MoveNext())
{
Control c = (Control) extendeeEnumerator.Key;
VisualTabOrderManager.TabScheme scheme = (VisualTabOrderManager.TabScheme) extendeeEnumerator.Value;
if (c == f)
{
formScheme = scheme;
}
else
{
tom.SetSchemeForControl(c, scheme);
}
}
tom.SetTabOrder(formScheme);
}
/// <summary>
/// We track when each extendee's parent is changed, and also when their parents are changed, until
/// SOMEBODY finally changes their parent to the form, at which point we can hook the load to apply
/// the tab schemes.
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void Extendee_ParentChanged(object sender, EventArgs e)
{
if (topLevelForm != null)
{
//// We've already found the form and attached a load event handler, so there's nothing left to do.
return;
}
Control c = (Control) sender;
if (c.TopLevelControl != null&& c.TopLevelControl is Form)
{
//' We found the form, so we're done.
topLevelForm = (Form) c.TopLevelControl;
HookFormLoad();
}
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Fixtures.AcceptanceTestsBodyFormData
{
using Microsoft.Rest;
using Models;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// Formdata operations.
/// </summary>
public partial class Formdata : IServiceOperations<AutoRestSwaggerBATFormDataService>, IFormdata
{
/// <summary>
/// Initializes a new instance of the Formdata class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public Formdata(AutoRestSwaggerBATFormDataService client)
{
if (client == null)
{
throw new System.ArgumentNullException("client");
}
Client = client;
}
/// <summary>
/// Gets a reference to the AutoRestSwaggerBATFormDataService
/// </summary>
public AutoRestSwaggerBATFormDataService Client { get; private set; }
/// <summary>
/// Upload file
/// </summary>
/// <param name='fileContent'>
/// File to upload.
/// </param>
/// <param name='fileName'>
/// File name to upload. Name has to be spelled exactly as written here.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse<Stream>> UploadFileWithHttpMessagesAsync(Stream fileContent, string fileName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (fileContent == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "fileContent");
}
if (fileName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "fileName");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("fileContent", fileContent);
tracingParameters.Add("fileName", fileName);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "UploadFile", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "formdata/stream/uploadfile").ToString();
// Create HTTP transport objects
var _httpRequest = new System.Net.Http.HttpRequestMessage();
System.Net.Http.HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new System.Net.Http.HttpMethod("POST");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
System.Net.Http.MultipartFormDataContent _multiPartContent = new System.Net.Http.MultipartFormDataContent();
if (fileContent != null)
{
System.Net.Http.StreamContent _fileContent = new System.Net.Http.StreamContent(fileContent);
_fileContent.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/octet-stream");
FileStream _fileContentAsFileStream = fileContent as FileStream;
if (_fileContentAsFileStream != null)
{
System.Net.Http.Headers.ContentDispositionHeaderValue _contentDispositionHeaderValue = new System.Net.Http.Headers.ContentDispositionHeaderValue("form-data");
_contentDispositionHeaderValue.Name = "fileContent";
_contentDispositionHeaderValue.FileName = _fileContentAsFileStream.Name;
_fileContent.Headers.ContentDisposition = _contentDispositionHeaderValue;
}
_multiPartContent.Add(_fileContent, "fileContent");
}
if (fileName != null)
{
System.Net.Http.StringContent _fileName = new System.Net.Http.StringContent(fileName, System.Text.Encoding.UTF8);
_multiPartContent.Add(_fileName, "fileName");
}
_httpRequest.Content = _multiPartContent;
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, System.Net.Http.HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (Newtonsoft.Json.JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<Stream>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_result.Body = await _httpResponse.Content.ReadAsStreamAsync().ConfigureAwait(false);
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Upload file
/// </summary>
/// <param name='fileContent'>
/// File to upload.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse<Stream>> UploadFileViaBodyWithHttpMessagesAsync(Stream fileContent, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (fileContent == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "fileContent");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("fileContent", fileContent);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "UploadFileViaBody", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "formdata/stream/uploadfile").ToString();
// Create HTTP transport objects
var _httpRequest = new System.Net.Http.HttpRequestMessage();
System.Net.Http.HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new System.Net.Http.HttpMethod("PUT");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
if(fileContent == null)
{
throw new System.ArgumentNullException("fileContent");
}
if (fileContent != null && fileContent != Stream.Null)
{
_httpRequest.Content = new System.Net.Http.StreamContent(fileContent);
_httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/octet-stream");
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, System.Net.Http.HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (Newtonsoft.Json.JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<Stream>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_result.Body = await _httpResponse.Content.ReadAsStreamAsync().ConfigureAwait(false);
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
using System;
using UnityEngine;
using UnityStandardAssets.CrossPlatformInput;
using UnityStandardAssets.Utility;
using Random = UnityEngine.Random;
namespace UnityStandardAssets.Characters.FirstPerson
{
[RequireComponent(typeof (CharacterController))]
[RequireComponent(typeof (AudioSource))]
public class FirstPersonController : MonoBehaviour
{
[SerializeField] private bool m_IsWalking;
[SerializeField] private float m_WalkSpeed;
[SerializeField] private float m_RunSpeed;
[SerializeField] [Range(0f, 1f)] private float m_RunstepLenghten;
[SerializeField] private float m_JumpSpeed;
[SerializeField] private float m_StickToGroundForce;
[SerializeField] private float m_GravityMultiplier;
[SerializeField] private MouseLook m_MouseLook;
[SerializeField] private bool m_UseFovKick;
[SerializeField] private FOVKick m_FovKick = new FOVKick();
[SerializeField] private bool m_UseHeadBob;
[SerializeField] private CurveControlledBob m_HeadBob = new CurveControlledBob();
[SerializeField] private LerpControlledBob m_JumpBob = new LerpControlledBob();
[SerializeField] private float m_StepInterval;
[SerializeField] private AudioClip[] m_FootstepSounds; // an array of footstep sounds that will be randomly selected from.
[SerializeField] private AudioClip m_JumpSound; // the sound played when character leaves the ground.
[SerializeField] private AudioClip m_LandSound; // the sound played when character touches back on ground.
public Transform target; // The position that that camera will be following.
public float smoothing = 5f;
Vector3 offset;
private Camera m_Camera;
private bool m_Jump;
private float m_YRotation;
private Vector2 m_Input;
private Vector3 m_MoveDir = Vector3.zero;
private CharacterController m_CharacterController;
private CollisionFlags m_CollisionFlags;
private bool m_PreviouslyGrounded;
private Vector3 m_OriginalCameraPosition;
private float m_StepCycle;
private float m_NextStep;
private bool m_Jumping;
private AudioSource m_AudioSource;
// Use this for initialization
private void Start()
{
m_CharacterController = GetComponent<CharacterController>();
m_Camera = Camera.main;
m_OriginalCameraPosition = m_Camera.transform.localPosition;
m_FovKick.Setup(m_Camera);
m_HeadBob.Setup(m_Camera, m_StepInterval);
m_StepCycle = 0f;
m_NextStep = m_StepCycle/2f;
m_Jumping = false;
m_AudioSource = GetComponent<AudioSource>();
m_MouseLook.Init(transform , m_Camera.transform);
offset = transform.position - target.position;
}
// Update is called once per frame
private void Update()
{
RotateView();
// the jump state needs to read here to make sure it is not missed
if (!m_Jump)
{
m_Jump = CrossPlatformInputManager.GetButtonDown("Jump");
}
if (!m_PreviouslyGrounded && m_CharacterController.isGrounded)
{
StartCoroutine(m_JumpBob.DoBobCycle());
PlayLandingSound();
m_MoveDir.y = 0f;
m_Jumping = false;
}
if (!m_CharacterController.isGrounded && !m_Jumping && m_PreviouslyGrounded)
{
m_MoveDir.y = 0f;
}
m_PreviouslyGrounded = m_CharacterController.isGrounded;
}
private void PlayLandingSound()
{
m_AudioSource.clip = m_LandSound;
m_AudioSource.Play();
m_NextStep = m_StepCycle + .5f;
}
private void FixedUpdate()
{
float speed;
GetInput(out speed);
// always move along the camera forward as it is the direction that it being aimed at
Vector3 desiredMove = transform.forward*m_Input.y + transform.right*m_Input.x;
// get a normal for the surface that is being touched to move along it
RaycastHit hitInfo;
Physics.SphereCast(transform.position, m_CharacterController.radius, Vector3.down, out hitInfo,
m_CharacterController.height/2f);
desiredMove = Vector3.ProjectOnPlane(desiredMove, hitInfo.normal).normalized;
m_MoveDir.x = desiredMove.x*speed;
m_MoveDir.z = desiredMove.z*speed;
if (m_CharacterController.isGrounded)
{
m_MoveDir.y = -m_StickToGroundForce;
if (m_Jump)
{
m_MoveDir.y = m_JumpSpeed;
PlayJumpSound();
m_Jump = false;
m_Jumping = true;
}
}
else
{
m_MoveDir += Physics.gravity*m_GravityMultiplier*Time.fixedDeltaTime;
}
m_CollisionFlags = m_CharacterController.Move(m_MoveDir*Time.fixedDeltaTime);
ProgressStepCycle(speed);
UpdateCameraPosition(speed);
Vector3 targetCamPos = target.position + offset;
// Smoothly interpolate between the camera's current position and it's target position.
transform.position = Vector3.Lerp (transform.position, targetCamPos, smoothing * Time.deltaTime);
}
private void PlayJumpSound()
{
m_AudioSource.clip = m_JumpSound;
m_AudioSource.Play();
}
private void ProgressStepCycle(float speed)
{
if (m_CharacterController.velocity.sqrMagnitude > 0 && (m_Input.x != 0 || m_Input.y != 0))
{
m_StepCycle += (m_CharacterController.velocity.magnitude + (speed*(m_IsWalking ? 1f : m_RunstepLenghten)))*
Time.fixedDeltaTime;
}
if (!(m_StepCycle > m_NextStep))
{
return;
}
m_NextStep = m_StepCycle + m_StepInterval;
PlayFootStepAudio();
}
private void PlayFootStepAudio()
{
if (!m_CharacterController.isGrounded)
{
return;
}
// pick & play a random footstep sound from the array,
// excluding sound at index 0
int n = Random.Range(1, m_FootstepSounds.Length);
m_AudioSource.clip = m_FootstepSounds[n];
m_AudioSource.PlayOneShot(m_AudioSource.clip);
// move picked sound to index 0 so it's not picked next time
m_FootstepSounds[n] = m_FootstepSounds[0];
m_FootstepSounds[0] = m_AudioSource.clip;
}
private void UpdateCameraPosition(float speed)
{
Vector3 newCameraPosition;
if (!m_UseHeadBob)
{
return;
}
if (m_CharacterController.velocity.magnitude > 0 && m_CharacterController.isGrounded)
{
m_Camera.transform.localPosition =
m_HeadBob.DoHeadBob(m_CharacterController.velocity.magnitude +
(speed*(m_IsWalking ? 1f : m_RunstepLenghten)));
newCameraPosition = m_Camera.transform.localPosition;
newCameraPosition.y = m_Camera.transform.localPosition.y - m_JumpBob.Offset();
}
else
{
newCameraPosition = m_Camera.transform.localPosition;
newCameraPosition.y = m_OriginalCameraPosition.y - m_JumpBob.Offset();
}
m_Camera.transform.localPosition = newCameraPosition;
}
private void GetInput(out float speed)
{
// Read input
float horizontal = CrossPlatformInputManager.GetAxis("Horizontal");
float vertical = CrossPlatformInputManager.GetAxis("Vertical");
bool waswalking = m_IsWalking;
#if !MOBILE_INPUT
// On standalone builds, walk/run speed is modified by a key press.
// keep track of whether or not the character is walking or running
m_IsWalking = !Input.GetKey(KeyCode.LeftShift);
#endif
// set the desired speed to be walking or running
speed = m_IsWalking ? m_WalkSpeed : m_RunSpeed;
m_Input = new Vector2(horizontal, vertical);
// normalize input if it exceeds 1 in combined length:
if (m_Input.sqrMagnitude > 1)
{
m_Input.Normalize();
}
// handle speed change to give an fov kick
// only if the player is going to a run, is running and the fovkick is to be used
if (m_IsWalking != waswalking && m_UseFovKick && m_CharacterController.velocity.sqrMagnitude > 0)
{
StopAllCoroutines();
StartCoroutine(!m_IsWalking ? m_FovKick.FOVKickUp() : m_FovKick.FOVKickDown());
}
}
private void RotateView()
{
m_MouseLook.LookRotation (transform, m_Camera.transform);
}
private void OnControllerColliderHit(ControllerColliderHit hit)
{
Rigidbody body = hit.collider.attachedRigidbody;
//dont move the rigidbody if the character is on top of it
if (m_CollisionFlags == CollisionFlags.Below)
{
return;
}
if (body == null || body.isKinematic)
{
return;
}
body.AddForceAtPosition(m_CharacterController.velocity*0.1f, hit.point, ForceMode.Impulse);
}
}
}
| |
/*
* Copyright 2011 The Poderosa Project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* $Id: SFTPForm.cs,v 1.4 2012/05/05 12:42:45 kzmi Exp $
*/
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Drawing;
using System.Text;
using System.Windows.Forms;
using Granados.Poderosa.SFTP;
using Granados.Poderosa.FileTransfer;
using Granados.SSH2;
using System.Threading;
using System.Collections;
using System.IO;
using System.Diagnostics;
namespace Poderosa.SFTP {
/// <summary>
/// SFTP interface
/// </summary>
public partial class SFTPForm : Form {
#region Private fields
private Form _ownerForm;
private SFTPClient _sftp;
private readonly String _remoteName;
private Thread _sftpThread = null;
private bool _sftpExecuting = false;
private bool _treeConstructing = false;
private bool _closedByOwner = false;
private bool _formClosed = false;
private string _saveFolderPath = null;
// keep focus before and after controls are disabled temporarily
private Control _prevActiveControl = null;
private Cancellation _fileTransferCancellation = null;
#endregion
#region Private constants
private const int PROGRESSBAR_MAX = Int32.MaxValue;
private const int IMAGE_INDEX_ROOT = 0;
private const int IMAGE_INDEX_FOLDER_CLOSE = 1;
private const int IMAGE_INDEX_FOLDER_OPEN = 2;
private const int IMAGE_INDEX_FILE = 3;
private const int IMAGE_INDEX_SYMBOLICLINK = 4;
#endregion
#region NodeType
private enum NodeType {
Root,
File,
Directory,
SymbolicLink,
}
#endregion
#region NodeTag
private class NodeTag {
public readonly NodeType Type;
public readonly string SortKey;
public readonly SFTPFileInfo FileInfo;
private NodeTag(NodeType nodeType, string sortKey, SFTPFileInfo fileInfo) {
this.Type = nodeType;
this.SortKey = sortKey;
this.FileInfo = fileInfo;
}
public static NodeTag CreateForRoot() {
return new NodeTag(NodeType.Root, String.Empty, null);
}
public static NodeTag CreateForDirectory(string name) {
return new NodeTag(NodeType.Directory, "D:" + name, null);
}
public static NodeTag CreateForFileOrDirectory(SFTPFileInfo fileInfo) {
NodeType nodeType;
string prefix;
if (UnixPermissions.IsDirectory(fileInfo.Permissions)) {
nodeType = NodeType.Directory;
prefix = "D:";
}
else if (UnixPermissions.IsSymbolicLink(fileInfo.Permissions)) {
nodeType = NodeType.SymbolicLink;
prefix = "F:";
}
else {
nodeType = NodeType.File;
prefix = "F:";
}
return new NodeTag(nodeType, prefix + fileInfo.FileName, fileInfo);
}
}
#endregion
#region NodeSorter
private class NodeSorter : IComparer {
public int Compare(object x, object y) {
NodeTag tagX = (NodeTag)((TreeNode)x).Tag;
NodeTag tagY = (NodeTag)((TreeNode)y).Tag;
return String.Compare(tagX.SortKey, tagY.SortKey);
}
}
#endregion
#region Constructors
/// <summary>
/// Constructor
/// </summary>
public SFTPForm()
: this(null, null, String.Empty) {
}
/// <summary>
/// Constructor
/// </summary>
public SFTPForm(Form ownerForm, SFTPClient sftp, string connectionName) {
InitializeComponent();
if (!this.DesignMode) {
this._sftp = sftp;
this._ownerForm = ownerForm;
this._remoteName = connectionName;
this.Text = "SFTP - " + connectionName;
this.progressBar.Maximum = PROGRESSBAR_MAX;
PrepareTreeIcons();
SetIcon();
SetText();
ClearProgressBar();
// Note: Setting TreeViewNodeSorter property enables sorting.
treeViewRemote.TreeViewNodeSorter = new NodeSorter();
}
}
private void PrepareTreeIcons() {
treeViewImageList.Images.Clear();
treeViewImageList.ColorDepth = ColorDepth.Depth32Bit;
treeViewImageList.Images.Add(Properties.Resources.Host16x16);
treeViewImageList.Images.Add(Properties.Resources.FolderClose16x16);
treeViewImageList.Images.Add(Properties.Resources.FolderOpen16x16);
treeViewImageList.Images.Add(Properties.Resources.File16x16);
treeViewImageList.Images.Add(Properties.Resources.Link16x16);
treeViewRemote.ImageList = treeViewImageList;
}
private void SetIcon() {
this.Icon = Properties.Resources.FormIconSFTP;
}
private void SetText() {
StringResource res = SFTPPlugin.Instance.StringResource;
this.labelDropHere.Text = res.GetString("SFTPForm.labelDropHere");
this.buttonDownload.Text = res.GetString("SFTPForm.buttonDownload");
this.buttonCancel.Text = res.GetString("SFTPForm.buttonCancel");
}
#endregion
#region Thread control
private bool CanExecuteSFTP() {
return !_sftpExecuting;
}
private void BeginSFTPThread(MethodInvoker threadMethod, bool modifyTree, bool isCancelable) {
_sftpExecuting = true;
DisableControls(isCancelable ? true : false);
Cursor.Current = Cursors.WaitCursor;
_sftpThread = new Thread(
delegate() {
try {
if (modifyTree)
_treeConstructing = true;
try {
threadMethod();
}
finally {
if (modifyTree)
_treeConstructing = false;
_sftpExecuting = false;
Invoke((MethodInvoker)delegate() {
EnableControls();
Cursor.Current = Cursors.Default;
});
}
}
catch (SFTPClientException e) {
Log("*** Error: " + e.Message);
}
catch (IOException e) {
Log("*** I/O Error: " + e.Message);
}
catch (UnauthorizedAccessException e) {
Log("*** Access Error: " + e.Message);
}
catch (Exception e) {
RuntimeUtil.ReportException(e);
}
});
_sftpThread.Start();
}
private void DisableControls(bool isCancelEnabled) {
_prevActiveControl = this.ActiveControl;
this.buttonDownload.Enabled = false;
this.treeViewRemote.Enabled = false;
this.buttonCancel.Enabled = isCancelEnabled;
}
private void EnableControls() {
this.buttonDownload.Enabled = true;
this.treeViewRemote.Enabled = true;
this.ActiveControl = _prevActiveControl;
this.buttonCancel.Enabled = false;
}
#endregion
#region SFTP Initialize
private void SFTPInitailize() {
if (!CanExecuteSFTP())
return;
BeginSFTPThread(delegate() {
SFTPInitailize_Init();
this.Invoke((MethodInvoker)delegate() {
treeViewRemote.BeginUpdate();
treeViewRemote.Nodes.Clear();
TreeNode rootNode = CreateRootNode();
treeViewRemote.Nodes.Add(rootNode);
treeViewRemote.EndUpdate();
});
Log("Retrieving home directory...");
string homeDir = _sftp.GetRealPath(".");
Log("...Done: " + homeDir);
SFTPOpenDirectory_Core(homeDir, true);
}, true, false);
}
private void SFTPInitailize_Init() {
Log("Initializing SFTP...");
_sftp.Init();
Log("...Done");
}
#endregion
#region SFTP Updating tree
private void SFTPOpenDirectory(string fullPath, bool expand) {
if (!CanExecuteSFTP())
return;
BeginSFTPThread(delegate() {
SFTPOpenDirectory_Core(fullPath, expand);
}, true, false);
}
private void SFTPOpenDirectory_Core(string fullPath, bool expand) {
Log("Retrieving directory entries...: " + fullPath);
SFTPFileInfo[] entries = _sftp.GetDirectoryEntries(fullPath);
for (int i = 0; i < entries.Length; i++) {
SFTPFileInfo ent = entries[i];
if (UnixPermissions.IsSymbolicLink(ent.Permissions)) {
// If the symbolic link points a directory,
// replace the file information so as to open the node.
string path = CombineUnixPath(fullPath, ent.FileName);
SFTPFileAttributes attr;
try {
attr = _sftp.GetFileInformations(path, false);
}
catch (SFTPClientException e) {
if (!IsSFTPError(e, SFTPStatusCode.SSH_FX_NO_SUCH_FILE))
throw;
// file missing or dead symbolic link ?
attr = null;
}
if (attr != null) {
if (UnixPermissions.IsDirectory(attr.Permissions)) {
entries[i] = new SFTPFileInfo(ent.FileName, ent.LongName, attr);
}
}
}
}
this.Invoke((MethodInvoker)delegate() {
treeViewRemote.BeginUpdate();
TreeNode dirNode = MakeDirectoryTree(fullPath, expand);
UpdateTreeDirectoryEntries(dirNode, entries);
dirNode.EnsureVisible();
treeViewRemote.EndUpdate();
});
Log("...Done");
}
#endregion
#region SFTP Upload
private void SFTPUpload(string[] localFiles, string remoteDirectoryPath) {
if (!CanExecuteSFTP())
return;
_fileTransferCancellation = new Cancellation();
BeginSFTPThread(delegate() {
ClearProgressBar();
Log("=== UPLOAD ===");
Log("Remote: " + remoteDirectoryPath);
bool continued = true;
try {
bool overwite = false;
foreach (string localFile in localFiles) {
string localFullPath = Path.GetFullPath(localFile);
continued = SFTPUpload_UploadRecursively(localFullPath, remoteDirectoryPath, ref overwite);
if (!continued)
break;
}
}
finally {
ClearProgressBar();
}
Log("UPLOAD " + (continued ? "completed." : "canceled."));
SFTPOpenDirectory_Core(remoteDirectoryPath, true);
_fileTransferCancellation = null;
}, true, true);
}
private bool SFTPUpload_UploadRecursively(string localFileFullPath, string remoteDirectoryPath, ref bool overwite) {
string fileName = Path.GetFileName(localFileFullPath);
string remoteFullPath = CombineUnixPath(remoteDirectoryPath, fileName);
if (Directory.Exists(localFileFullPath)) {
SFTPUpload_CreateRemoteDirectoryIfNotExist(fileName, remoteFullPath);
foreach (string path in Directory.GetDirectories(localFileFullPath)) {
bool cont = SFTPUpload_UploadRecursively(path, remoteFullPath, ref overwite);
if (!cont)
return false; // cancel
}
foreach (string path in Directory.GetFiles(localFileFullPath)) {
bool cont = SFTPUpload_UploadRecursively(path, remoteFullPath, ref overwite);
if (!cont)
return false; // cancel
}
return true;
}
else {
bool cont = SFTPUpload_UploadFile(fileName, localFileFullPath, remoteFullPath, ref overwite);
return cont;
}
}
private bool SFTPUpload_UploadFile(string fileName, string localFileFullPath, string remoteFullPath, ref bool overwite) {
if (IsFileTransferCanceled()) {
Log("Canceled");
return false; // cancel
}
if (!overwite) {
bool existence;
try {
_sftp.GetFileInformations(remoteFullPath, true);
existence = true;
}
catch (SFTPClientException e) {
if (!IsSFTPError(e, SFTPStatusCode.SSH_FX_NO_SUCH_FILE))
throw;
existence = false;
}
if (existence) {
DialogResult result = DialogResult.None;
this.Invoke((MethodInvoker)delegate() {
string caption = SFTPPlugin.Instance.StringResource.GetString("SFTPForm.Confirmation");
string format = SFTPPlugin.Instance.StringResource.GetString("SFTPForm.AskOverwriteFormat");
string message = String.Format(format, remoteFullPath);
using (YesNoAllDialog dialog = new YesNoAllDialog(message, caption)) {
result = dialog.ShowDialog(this);
}
});
if (result == DialogResult.Cancel) {
Log("Canceled");
return false; // cancel
}
if (result == DialogResult.No) {
Log(" | Skipped: " + localFileFullPath);
return true; // skip
}
if (result == YesNoAllDialog.YesToAll) {
overwite = true;
}
}
}
FileInfo localFileInfo = new FileInfo(localFileFullPath);
ulong fileSize = (ulong)localFileInfo.Length;
_sftp.UploadFile(localFileFullPath, remoteFullPath, _fileTransferCancellation,
delegate(SFTPFileTransferStatus status, ulong transmitted) {
ShowProgress(localFileFullPath, fileName, status, fileSize, transmitted);
});
if (IsFileTransferCanceled())
return false; // canceled
else
return true;
}
private void SFTPUpload_CreateRemoteDirectoryIfNotExist(string localName, string remotePath) {
try {
_sftp.GetFileInformations(remotePath, true);
Log(" | Directory already exists: " + remotePath);
return;
}
catch (SFTPClientException e) {
if (!IsSFTPError(e, SFTPStatusCode.SSH_FX_NO_SUCH_FILE))
throw;
}
Log(" | Create directory: " + remotePath);
UpdateProgressBar(localName, 0, 0, false);
_sftp.CreateDirectory(remotePath);
UpdateProgressBar(localName, 0, 0, true);
}
private void ShowProgress(string localFullPath, string fileName, SFTPFileTransferStatus status, ulong fileSize, ulong transmitted) {
switch (status) {
case SFTPFileTransferStatus.Open:
Log(" | File: " + localFullPath);
Log(" | ... Open");
UpdateProgressBar(fileName, fileSize, transmitted, false);
break;
case SFTPFileTransferStatus.Transmitting:
LogOverwrite(" | ... Transmitting");
UpdateProgressBar(fileName, fileSize, transmitted, false);
break;
case SFTPFileTransferStatus.Close:
LogOverwrite(" | ... Closing");
UpdateProgressBar(fileName, fileSize, transmitted, false);
break;
case SFTPFileTransferStatus.CompletedSuccess:
LogOverwrite(" | ... Done");
UpdateProgressBar(fileName, fileSize, transmitted, true);
break;
case SFTPFileTransferStatus.CompletedError:
LogOverwrite(" | ... Error");
UpdateProgressBar(fileName, fileSize, transmitted, true);
break;
case SFTPFileTransferStatus.CompletedAbort:
LogOverwrite(" | ... Aborted");
UpdateProgressBar(fileName, fileSize, transmitted, true);
break;
}
}
#endregion
#region SFTP Download
private void SFTPDownload(string[] remoteFiles, string localDirectoryPath) {
_fileTransferCancellation = new Cancellation();
BeginSFTPThread(delegate() {
ClearProgressBar();
Log("=== DOWNLOAD ===");
string localFullPath = Path.GetFullPath(localDirectoryPath);
bool continued = true;
try {
bool overwite = false;
foreach (string remotePath in remoteFiles) {
continued = SFTPDownload_DownloadRecursively(remotePath, localFullPath, ref overwite);
if (!continued)
break;
}
}
finally {
ClearProgressBar();
}
_fileTransferCancellation = null;
Log("DOWNLOAD " + (continued ? "completed." : "canceled."));
}, true, true);
}
private bool SFTPDownload_DownloadRecursively(string remoteFilePath, string localDirectoryPath, ref bool overwite) {
string fileName = GetUnixPathFileName(remoteFilePath);
string localPath = Path.Combine(localDirectoryPath, fileName); // local path to save
SFTPFileAttributes fileAttr;
try {
fileAttr = _sftp.GetFileInformations(remoteFilePath, false);
}
catch (SFTPClientException e) {
if (!IsSFTPError(e, SFTPStatusCode.SSH_FX_NO_SUCH_FILE))
throw;
// file missing or dead symbolic link ?
Log(" | File: " + remoteFilePath);
Log("*** Warning: " + e.Message);
return true; // skip
}
if (UnixPermissions.IsDirectory(fileAttr.Permissions)) {
if (!Directory.Exists(localPath))
Directory.CreateDirectory(localPath);
SFTPFileInfo[] remoteFiles = _sftp.GetDirectoryEntries(remoteFilePath);
foreach (SFTPFileInfo fileInfo in remoteFiles) {
if (IsDots(fileInfo.FileName))
continue;
string newRemoteFilePath = CombineUnixPath(remoteFilePath, fileInfo.FileName);
bool cont = SFTPDownload_DownloadRecursively(newRemoteFilePath, localPath, ref overwite);
if (!cont)
return false; // cancel
}
return true;
}
else {
if (!Directory.Exists(localDirectoryPath))
Directory.CreateDirectory(localDirectoryPath);
bool cont = SFTPDownload_DownloadFile(remoteFilePath, fileName, localPath, fileAttr, ref overwite);
return cont;
}
}
private bool SFTPDownload_DownloadFile(
string remoteFullPath, string fileName, string localFileFullPath,
SFTPFileAttributes fileAttr, ref bool overwite) {
if (IsFileTransferCanceled()) {
Log("Canceled");
return false; // cancel
}
if (!overwite) {
bool existence = File.Exists(localFileFullPath);
if (existence) {
DialogResult result = DialogResult.None;
this.Invoke((MethodInvoker)delegate() {
string caption = SFTPPlugin.Instance.StringResource.GetString("SFTPForm.Confirmation");
string format = SFTPPlugin.Instance.StringResource.GetString("SFTPForm.AskOverwriteFormat");
string message = String.Format(format, localFileFullPath);
using (YesNoAllDialog dialog = new YesNoAllDialog(message, caption)) {
result = dialog.ShowDialog(this);
}
});
if (result == DialogResult.Cancel) {
Log("Canceled");
return false; // cancel
}
if (result == DialogResult.No) {
Log(" | Skipped: " + localFileFullPath);
return true; // skip
}
if (result == YesNoAllDialog.YesToAll) {
overwite = true;
}
}
}
ulong fileSize = fileAttr.FileSize;
_sftp.DownloadFile(remoteFullPath, localFileFullPath, _fileTransferCancellation,
delegate(SFTPFileTransferStatus status, ulong transmitted) {
ShowProgress(localFileFullPath, fileName, status, fileSize, transmitted);
});
if (IsFileTransferCanceled())
return false; // canceled
else
return true;
}
#endregion
#region Log display
private int prevLineTop = 0;
private void ClearLog() {
if (_formClosed)
return;
if (this.InvokeRequired) {
this.Invoke((MethodInvoker)delegate() {
ClearLog();
});
return;
}
this.textLog.Text = String.Empty;
}
private void Log(string message) {
if (_formClosed)
return;
if (this.InvokeRequired) {
this.Invoke((MethodInvoker)delegate() {
Log(message);
});
return;
}
this.textLog.SelectionStart = this.textLog.TextLength;
if (this.textLog.TextLength > 0)
this.textLog.SelectedText = Environment.NewLine;
prevLineTop = this.textLog.TextLength;
this.textLog.SelectedText = message;
}
private void LogOverwrite(string message) {
if (_formClosed)
return;
if (this.InvokeRequired) {
this.Invoke((MethodInvoker)delegate() {
LogOverwrite(message);
});
return;
}
this.textLog.SelectionStart = prevLineTop;
this.textLog.SelectionLength = this.textLog.TextLength - prevLineTop;
this.textLog.SelectedText = message;
}
#endregion
#region Progress bar
private void ClearProgressBar() {
UpdateProgressBarCore(String.Empty, 0);
}
private void UpdateProgressBar(string targetFile, ulong total, ulong current, bool isCompleted) {
int progress;
if (total == 0) {
progress = isCompleted ? PROGRESSBAR_MAX : 0;
}
else if (total <= (ulong)Int32.MaxValue && current <= (ulong)Int32.MaxValue) {
progress = (int)((ulong)PROGRESSBAR_MAX * current / total);
}
else {
progress = (int)(PROGRESSBAR_MAX * ((double)current / (double)total));
}
UpdateProgressBarCore(targetFile, progress);
}
private void UpdateProgressBarCore(string targetFile, int progress) {
if (_formClosed)
return;
if (this.InvokeRequired) {
this.Invoke((MethodInvoker)delegate() {
UpdateProgressBarCore(targetFile, progress);
});
return;
}
this.labelProgress.Text = targetFile;
this.progressBar.Value = progress;
}
#endregion
#region Tree node operations
private TreeNode MakeDirectoryTree(string fullPath, bool expand) {
TreeNode rootNode;
if (treeViewRemote.Nodes.Count == 0) {
rootNode = CreateRootNode();
treeViewRemote.Nodes.Add(rootNode);
}
else {
rootNode = treeViewRemote.Nodes[0];
}
if (fullPath == "/")
return rootNode;
string[] pathElems = fullPath.Split('/');
TreeNode parentNode = rootNode;
for (int i = 0; i < pathElems.Length; i++) {
string dirName = pathElems[i];
if (i == 0 && dirName == String.Empty)
continue;
TreeNode dirNode = parentNode.Nodes[dirName];
if (dirNode == null) {
dirNode = CreateDirectoryNode(dirName);
parentNode.Nodes.Add(dirNode);
}
if (expand)
dirNode.Expand();
parentNode = dirNode;
}
return parentNode;
}
private void UpdateTreeDirectoryEntries(TreeNode directoryNode, SFTPFileInfo[] entries) {
TreeNodeCollection children = directoryNode.Nodes;
Dictionary<string, SFTPFileInfo> newEntries = new Dictionary<string, SFTPFileInfo>();
foreach (SFTPFileInfo ent in entries) {
if (IsDots(ent.FileName))
continue;
newEntries.Add(ent.FileName, ent);
}
List<TreeNode> nodesToDelete = new List<TreeNode>();
foreach (TreeNode node in children) {
if (!newEntries.ContainsKey(node.Name)) {
nodesToDelete.Add(node);
}
}
foreach (TreeNode node in nodesToDelete) {
children.Remove(node);
}
foreach (SFTPFileInfo ent in entries) {
if (IsDots(ent.FileName))
continue;
TreeNode entNode = children[ent.FileName];
if (entNode == null) {
entNode = CreateFileOrDirectoryNode(ent);
children.Add(entNode);
}
else {
UpdateTreeNode(entNode, ent);
}
}
}
private TreeNode CreateRootNode() {
TreeNode node = new TreeNode(_remoteName, IMAGE_INDEX_ROOT, IMAGE_INDEX_ROOT);
node.Name = String.Empty; // TreeNodeCollection uses this as a key.
node.Tag = NodeTag.CreateForRoot();
return node;
}
private TreeNode CreateDirectoryNode(string name) {
TreeNode node = new TreeNode(name, IMAGE_INDEX_FOLDER_CLOSE, IMAGE_INDEX_FOLDER_CLOSE);
node.Name = name; // TreeNodeCollection uses this as a key.
node.ToolTipText = name;
node.Tag = NodeTag.CreateForDirectory(name);
return node;
}
private TreeNode CreateFileOrDirectoryNode(SFTPFileInfo fileInfo) {
NodeTag nodeTag = NodeTag.CreateForFileOrDirectory(fileInfo);
int iconIndex = GetNodeImageIndex(nodeTag.Type);
TreeNode node = new TreeNode(fileInfo.FileName, iconIndex, iconIndex);
node.Name = fileInfo.FileName; // TreeNodeCollection uses this as a key.
node.Tag = nodeTag;
node.ToolTipText = GetTooltipText(fileInfo);
return node;
}
private void UpdateTreeNode(TreeNode node, SFTPFileInfo fileInfo) {
NodeTag nodeTag = NodeTag.CreateForFileOrDirectory(fileInfo);
node.SelectedImageIndex = node.ImageIndex = GetNodeImageIndex(nodeTag.Type);
node.Tag = nodeTag;
node.ToolTipText = GetTooltipText(fileInfo);
}
private int GetNodeImageIndex(NodeType nodeType) {
switch (nodeType) {
case NodeType.Root:
return IMAGE_INDEX_ROOT;
case NodeType.Directory:
return IMAGE_INDEX_FOLDER_CLOSE;
case NodeType.SymbolicLink:
return IMAGE_INDEX_SYMBOLICLINK;
case NodeType.File:
default:
return IMAGE_INDEX_FILE;
}
}
private string GetTooltipText(SFTPFileInfo fileInfo) {
return new StringBuilder()
.Append(UnixPermissions.Format(fileInfo.Permissions))
.Append(' ')
.Append(fileInfo.FileSize)
.Append(' ')
.Append(fileInfo.FileName)
.ToString();
}
private string GetPathOf(TreeNode node) {
string path = node.FullPath;
if (path.StartsWith(_remoteName)) {
path = path.Substring(_remoteName.Length);
if (path.Length == 0)
return "/";
else
return path;
}
else {
return path;
}
}
#endregion
#region Cancellation status
private bool IsFileTransferCanceled() {
Cancellation cancellation = _fileTransferCancellation;
return cancellation != null && cancellation.IsRequested;
}
#endregion
#region Common
private static string CombineUnixPath(string path1, string path2) {
return path1.TrimEnd('/') + "/" + path2;
}
private static string GetUnixPathFileName(string path) {
int s = path.LastIndexOf('/');
if (s >= 0)
return path.Substring(s + 1);
else
return path;
}
private static bool IsSFTPError(Exception e, uint expectedStatusCode) {
SFTPClientErrorException err = e as SFTPClientErrorException;
if (err == null) {
err = e.InnerException as SFTPClientErrorException;
if (err == null)
return false;
}
Debug.Assert(err != null);
return (err.Code == expectedStatusCode);
}
private static bool IsDots(string fileName) {
switch (fileName) {
case ".":
case "..":
return true;
default:
return false;
}
}
#endregion
#region Form event handlers
private void SFTPForm_Shown(object sender, EventArgs e) {
SFTPInitailize();
}
private void SFTPForm_Load(object sender, EventArgs e) {
if (_ownerForm != null) {
_ownerForm.FormClosed += new FormClosedEventHandler(_ownerForm_FormClosed);
this.Location = new Point(
_ownerForm.Left + (_ownerForm.Width - this.Width) / 2,
_ownerForm.Top + (_ownerForm.Height - this.Height) / 2
);
}
}
private void SFTPForm_FormClosing(object sender, FormClosingEventArgs e) {
if (e.CloseReason == CloseReason.UserClosing && !_closedByOwner) {
if (_sftpThread != null && _sftpThread.IsAlive) {
e.Cancel = true;
return;
}
}
_formClosed = true;
if (_sftpThread != null && _sftpThread.IsAlive)
_sftpThread.Abort(); // FIXME: we need graceful cancellation
try {
_sftp.Close();
}
catch (Exception ex) {
RuntimeUtil.ReportException(ex);
}
}
private void SFTPForm_FormClosed(object sender, FormClosedEventArgs e) {
if (_ownerForm != null) {
_ownerForm.FormClosed -= new FormClosedEventHandler(_ownerForm_FormClosed);
_ownerForm = null;
_sftp = null;
}
}
private void _ownerForm_FormClosed(object sender, FormClosedEventArgs e) {
_closedByOwner = true;
this.Close();
}
#endregion
#region Button event handlers
private void buttonDownload_Click(object sender, EventArgs e) {
if (!CanExecuteSFTP())
return;
TreeNode[] selectedNodes = this.treeViewRemote.SelectedNodes;
if (selectedNodes.Length == 0)
return;
string[] selectedPaths = new string[selectedNodes.Length];
for (int i = 0; i < selectedNodes.Length; i++) {
selectedPaths[i] = GetPathOf(selectedNodes[i]);
}
using (FolderBrowserDialog dialog = new FolderBrowserDialog()) {
if (_saveFolderPath == null)
_saveFolderPath = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments);
dialog.SelectedPath = _saveFolderPath;
dialog.ShowNewFolderButton = true;
dialog.Description = SFTPPlugin.Instance.StringResource.GetString("SFTPForm.ChooseFolder");
DialogResult result = dialog.ShowDialog(this);
if (result != DialogResult.OK)
return;
_saveFolderPath = dialog.SelectedPath;
}
SFTPDownload(selectedPaths, _saveFolderPath);
}
private void buttonCancel_Click(object sender, EventArgs e) {
Cancellation cancellation = _fileTransferCancellation;
if (cancellation != null) {
cancellation.Cancel();
buttonCancel.Enabled = false;
}
}
#endregion
#region TreeView event handlers
private void treeViewRemote_BeforeExpand(object sender, TreeViewCancelEventArgs e) {
// Change directory's icon
TreeNode node = e.Node;
NodeTag tag = node.Tag as NodeTag;
if (tag != null && tag.Type == NodeType.Directory)
node.SelectedImageIndex = node.ImageIndex = IMAGE_INDEX_FOLDER_OPEN;
}
private void treeViewRemote_BeforeCollapse(object sender, TreeViewCancelEventArgs e) {
// Change directory's icon
TreeNode node = e.Node;
NodeTag tag = node.Tag as NodeTag;
if (tag != null && tag.Type == NodeType.Directory)
node.SelectedImageIndex = node.ImageIndex = IMAGE_INDEX_FOLDER_CLOSE;
}
private void treeViewRemote_SingleNodeSelected(object sender, TreeViewEventArgs e) {
if (e.Action != TreeViewAction.ByMouse && e.Action != TreeViewAction.ByKeyboard)
return;
if (_treeConstructing)
return;
// Retrieve directory entries
TreeNode node = e.Node;
NodeTag tag = node.Tag as NodeTag;
if (tag != null && (tag.Type == NodeType.Directory || tag.Type == NodeType.Root)) {
string fullPath = GetPathOf(node);
SFTPOpenDirectory(fullPath, false);
}
}
private void treeViewRemote_DragOver(object sender, DragEventArgs e) {
TreeNode node = GetDroppableNode(e);
if (node != null)
e.Effect = DragDropEffects.Copy;
else
e.Effect = DragDropEffects.None;
if (node != null) {
treeViewRemote.SelectNode(node);
}
}
private void treeViewRemote_DragDrop(object sender, DragEventArgs e) {
TreeNode node = GetDroppableNode(e);
if (node != null) {
string remotePath = GetPathOf(node);
string[] localFiles = e.Data.GetData(DataFormats.FileDrop) as string[];
if (localFiles != null) {
SFTPUpload(localFiles, remotePath);
}
}
}
private TreeNode GetDroppableNode(DragEventArgs e) {
if ((e.AllowedEffect & DragDropEffects.Copy) == DragDropEffects.Copy
&& e.Data.GetDataPresent(DataFormats.FileDrop)) {
Point clientPoint = treeViewRemote.PointToClient(new Point(e.X, e.Y));
TreeNode node = treeViewRemote.GetNodeAt(clientPoint);
if (node != null) {
NodeTag tag = node.Tag as NodeTag;
if (tag != null && tag.Type == NodeType.Directory) {
return node;
}
}
}
return null;
}
#endregion
}
}
| |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
using System;
using System.Collections.Generic;
using System.Text;
using System.Diagnostics.Contracts;
using System.Diagnostics.CodeAnalysis;
namespace Microsoft.Glee.Optimization
{
/// <summary>
/// The class keeps a factorization of the matrix A. In this case it is an equality
/// Lm*Pm*...*L1*P1*A=Un-1*...U0*E1*...*Ek. Li are lower triangular eta matrices,
/// Pi are transposition matrices,
/// Ui are upper triangular eta matrices with ones on the diagonal everywhere, and Ei are eta matrices.
/// </summary>
internal class StandardFactorization : Factorization
{
#region Object Invariant
[ContractInvariantMethod]
void ObjectInvariant()
{
Contract.Invariant(dim >= 0);
Contract.Invariant(A != null);
Contract.Invariant(LP != null);
Contract.Invariant(etaList != null);
Contract.Invariant(U != null);
}
#endregion
readonly BMatrix A;
readonly int dim; //A is a dimXdim matrix
readonly List<Matrix> LP = new List<Matrix>(); //it is the list of matrix L, P
readonly List<EtaMatrix> etaList = new List<EtaMatrix>(); //it is the list of matrix eta matrices
//int[] q;
// double pivotEpsilon = 1.0e-1;
readonly UMatrix U;
bool failure = false;
/// <summary>
/// create the initial factorization of the form Ln*Pn*...*L1*P1*A=Un-1*...U0
/// </summary>
/// <param name="APar"></param>
internal StandardFactorization(Matrix APar, UMatrix Upar)
{
Contract.Requires(APar != null);
Contract.Requires(Upar != null);
this.A = (BMatrix)APar;
this.dim = A.NumberOfColumns;
this.U = Upar;
InitMatrixUAndMarkovichNumbers();
CalculateInitialFactorization();
#if DEBUGGLEE
// if(calls>=63)
// CheckFactorization();
#endif
}
[SuppressMessage("Microsoft.Contracts", "TestAlwaysEvaluatingToAConstant")]
unsafe private void InitMatrixUAndMarkovichNumbers()
{
/* the safe version
for (int i = 0; i < dim; i++)
for (int j = 0; j < dim; j++)
U[i, j] = A[i, j];
*/
fixed (int* basisPin = this.A.basis)
fixed (double* ACoeffPin = A.A.coeffs)
fixed (int* slacksPin = A.A.slacksAndArtificials)
fixed (double* uPin = U.Coeffs)
{
int nRegVars = A.A.nRegularVars;
int* basis = basisPin;
int* basisEnd = basis + dim;
double* uStart = uPin;
double* uEnd = uPin + dim * dim;
for (; basis < basisEnd; basis++, uStart++)
{
int j = *basis;//column
double* u = uStart;
if (j < nRegVars)
{
double* a = ACoeffPin + *basis;
for (; u < uEnd; u += dim, a += nRegVars)
{
*u = *a;
}
}
else
{
for (; u < uEnd; u += dim)
*u = 0;
int placeOfOne = *(slacksPin + j - nRegVars);
*(uStart + placeOfOne * dim) = 1;
}
}
}
}
#if DEBUGGLEE
internal void CheckFactorization(Matrix A) {
Matrix ls = A;
foreach (Matrix lp in this.LP)
ls = lp * ls;
Matrix f = U;
foreach (Matrix e in this.etaList)
f = f * e;
double dist = Matrix.Dist(ls, f);
Console.WriteLine("dist={0}", dist);
}
double det() {
double r = 1;
for (int i = 0; i < this.dim; i++)
r *= U[i, i];
foreach (EtaMatrix e in this.etaList) {
int j = e.EtaIndex;
r *= e[j, j];
}
return r;
}
#endif
[SuppressMessage("Microsoft.Contracts", "TestAlwaysEvaluatingToAConstant")]
unsafe void CalculateInitialFactorization()
{
fixed (double* uPin = U.Coeffs)
{
double* uPivot = uPin;
for (int k = 0; k < A.NumberOfRows; k++, uPivot += dim + 1)
{
int pivotRow = FindPivot(k);
Contract.Assume(k < this.dim);
if (pivotRow == -1)
{
failure = true;
return;
}
if (pivotRow != k)
SwapRows(k, pivotRow);
Contract.Assert(k <= this.dim); // SwapRows may have modified
double pivot = *uPivot;
Contract.Assume((double)pivot != 0.0d); // F: need quantified invariants
double[] column = CreateLMatrix(k, pivot);
if (!LIsUnitMatrix(column))
this.LP.Add(new LowerTriangEtaMatrix(k, column));
//divide k-th row by the pivot
DividePivotRowByPivot(k, pivot);
//substract the multiple of the k-th row from the lower rows
SubstractTheMultipleOfThePivotRowFromLowerRows(k);
}
}
}
[SuppressMessage("Microsoft.Contracts", "TestAlwaysEvaluatingToAConstant")]
unsafe void SubstractTheMultipleOfThePivotRowFromLowerRows(int k)
{
/* the safe version
* for (int i = k + 1; i < dim; i++) {
double t = -U[i, k];
U[i, k] = 0;
if (t != 0) {
double max = 0;
int markovitzNumber = 0;
for (int j = k + 1; j < dim; j++) {
double m = (U[i, j] += t * U[k, j]);
if (m != 0) {
markovitzNumber++;
max = Math.Max(max, Math.Abs(m));
}
}
p[i] = markovitzNumber;
// rowMax[i]=max;
}
}
*/
fixed (double* uPin = U.Coeffs)
{
double* uPivotRowStart = uPin + dim * k + k + 1;
double* uPivotRowEnd = uPin + dim * (k + 1);
double* iRow = uPivotRowStart + dim - 1;
double* iRowEnd = uPin + dim * dim;
for (; iRow < iRowEnd; iRow += k)
{
double t = *iRow;
if ((double)t != 0)
{
*iRow++ = 0;
double* uPivotRow = uPivotRowStart;
double max = 0;
int markovitzNumber = 0;
for (; uPivotRow < uPivotRowEnd; uPivotRow++, iRow++)
{
double m = (*iRow -= t * (*uPivotRow));
if ((double)m != 0)
{
markovitzNumber++;
max = Math.Max(max, Math.Abs(m));
}
}
}
else
iRow += dim - k;
}
}
}
[SuppressMessage("Microsoft.Contracts", "TestAlwaysEvaluatingToAConstant")]
unsafe private void DividePivotRowByPivot(int k, double pivot)
{
Contract.Requires((double)pivot != 0.0d);
/* the safe version
if (pivot != 1) {
U[k, k] = 1;
for (int j = k + 1; j < dim; j++)
U[k, j] /= pivot;
}
*/
fixed (double* uPin = this.U.Coeffs)
if ((double)pivot != 1)
{
double* u = uPin + k * (dim + 1);
double* uEnd = uPin + (k + 1) * dim;
*u = 1;
u++;
for (; u < uEnd; u++)
(*u) /= pivot;
}
}
unsafe private static bool LIsUnitMatrix(double[] column)
{
fixed (double* colPin = column)
{
double* col = colPin;
double* colEnd = col + column.Length;
bool unitMatrix = (double)*col == 1;
if (unitMatrix)
{
col++;
for (; col < colEnd && unitMatrix; col++)
if ((double)*col != 0)
unitMatrix = false;
}
return unitMatrix;
}
}
[SuppressMessage("Microsoft.Contracts", "TestAlwaysEvaluatingToAConstant")]
unsafe private double[] CreateLMatrix(int k, double pivot)
{
Contract.Requires(k <= dim);
Contract.Requires(((double)pivot) != 0.0d);
/*the safe version
double[] column = new double[dim - k];
column[0] = 1.0 / pivot;
for (int i = 1; i < column.Length; i++)
column[i] = -U[k + i, k] / pivot;
return column;
*/
double[] column = new double[dim - k];
fixed (double* colPin = column)
fixed (double* uPin = U.Coeffs)
{
double* col = colPin;
double* colEnd = col + dim - k;
*col = 1.0 / pivot;
col++;
double* u = uPin + dim * (k + 1) + k;
for (; col < colEnd; col++, u += dim)
*col = -(*u) / pivot;
}
return column;
}
[SuppressMessage("Microsoft.Contracts", "TestAlwaysEvaluatingToAConstant")]
unsafe private void SwapRows(int k, int pivotRow)
{
Contract.Requires(this.dim > k);
Contract.Requires(this.dim > pivotRow);
/* the safe version
this.LP.Add(new TranspositionMatrix(dim, k, pivotRow));
//swap k-th and pivotRow rows in U
//start swapping from the k-th column since we have zeroes in the first k columns
for (int j = k; j < dim; j++) {
double t = U[pivotRow, j];
U[pivotRow, j] = U[k, j];
U[k, j] = t;
}
*/
this.LP.Add(new TranspositionMatrix(dim, k, pivotRow));
//swap k-th and pivotRow rows in U
//start swapping from the k-th column since we have zeroes in the first k columns
fixed (double* uPin = this.U.Coeffs)
{
double* uPivotRow = uPin + pivotRow * dim + k;
double* kRow = uPin + k * (dim + 1);
double* kRowEnd = uPin + dim * (k + 1);
for (; kRow < kRowEnd; kRow++, uPivotRow++)
{
double t = *uPivotRow;
*uPivotRow = *kRow;
*kRow = t;
}
}
}
private int FindPivot(int k)
{
Contract.Requires(k >= 0);
Contract.Ensures(Contract.Result<int>() >= -1);
Contract.Ensures(Contract.Result<int>() < this.dim);
return FindLargestPivot(k);
}
#region
#if DEBUGGLEE // debugging routines
private void CheckMatrix(UMatrix U,int k) {
for (int i = 0; i < this.dim; i++)
CheckMatrixRow(i, U,k);
CheckBelowInTheColumn(U, k);
}
private void CheckBelowInTheColumn(UMatrix U, int k) {
if(U[k,k]!=1)
Console.WriteLine();
bool allZero=true;
for (int j = k + 1; j < dim&&allZero; j++)
if (U[j, k] != 0)
allZero = false;
if (allZero==false)
Console.WriteLine();
}
private void CheckMatrixRow(int i, UMatrix U, int k) {
bool allZero=false;
int j=0;
for (; j < dim; j++)
if (U[i, j]!= 0) {
allZero = false;
break;
}
if (allZero)
Console.WriteLine("all zero row");
}
private void CheckBasisOnZeroColumns(Matrix B) {
for (int i = 0; i < B.NumberOfColumns; i++)
CheckColumnOnZero( i,B);
}
private bool CheckColumnOnZero(int p, Matrix B) {
for (int i = 0; i < B.NumberOfRows; i++)
if (B[i, p] != 0)
return false;
return true;
}
#endif
#endregion
/// <summary>
///
/// </summary>
/// <param name="k">looking for the pivot in the k-th column and rows k,...,n-1</param>
///// <param name="pivotI"></param>
//int FindPivotCloseToOne(int k, double eps) {
// double distFromOne = Double.MaxValue;
// int pivotRow = -1;
// for (int i = k; i < dim; i++) {
// double d = Math.Abs(U[i, k]);
// if (d > eps) {
// d = Math.Abs(Math.Log(d));
// if (d < distFromOne) {
// distFromOne = d;
// pivotRow = i;
// if (d == 0)
// break;
// }
// }
// }
// return pivotRow;
//}
[SuppressMessage("Microsoft.Contracts", "TestAlwaysEvaluatingToAConstant")]
unsafe int FindLargestPivot(int k)
{
Contract.Requires(k >= 0);
Contract.Ensures(Contract.Result<int>() >= -1);
Contract.Ensures(Contract.Result<int>() < this.dim);
/* the safe version
double maxPivot = 0;
int minP = Int32.MaxValue;//number of zeroes in the row
int pivotRow = -1;
for (int i = k; i < dim; i++) {
double d = Math.Abs(U[i, k]);///rowMax[i];
if (d > maxPivot) {
minP = markovitzNumbers[i];
pivotRow = i;
maxPivot = d;
} else if (d == maxPivot && d > 0) {
int r = markovitzNumbers[i];//markovitz number
if (r < minP) {
pivotRow = i;
minP = r;
}
}
}
*/
double maxPivot = 0;
int pivotRow = -1;
fixed (double* uPin = this.U.Coeffs)
{
double* u = uPin + (dim + 1) * k;
for (int i = k; i < dim; u += dim, i++)
{
double d = *u;
if (d < 0)
d = -d;
if (d > maxPivot)
{
pivotRow = i;
maxPivot = d;
}
}
}
return pivotRow;
}
/*
private void InitMarkowitzNumbers() {
markovitzNumbers = new int[dim];
for (int i = 0; i < dim; i++)
for (int j = 0; j < A.NumberOfColumns; j++)
if (A[i, j]!=0)
markovitzNumbers[i]++;
}
*/
internal override void Solve_yBEquals_cB(double[] y)
{
/*
* We have LB=UE or B =L(-1)UE. We have yB=c or y=cB(-1)=cE(-1)(U-1)L.
* First we find cE(-1)=y, then yU(-1)=y, and then y=yL
*/
//solving yE=cB or yE1...Ek=cB
for (int i = this.etaList.Count - 1; i >= 0; i--)
etaList[i].SolveLeftSystem(y);
//solving xU=y, and putting the answer into y
this.U.SolveLeftSystem(y);
Vector v = new Vector(y);
for (int i = LP.Count - 1; i > -1; i--)
v *= LP[i];//will update coefficient of v, that is y
}
internal override void Solve_BdEqualsa(double[] a)
{
// We have LB=UE or B =L(-1)UE. We need to solve L(-1)UEd=a, or UEd=La
//calculating La
foreach (Matrix m in LP)
a = m * a;
//solving Ud=r
U.SolveRightSystem(a);
//solving Ed=d
for (int i = 0; i < this.etaList.Count; i++)
etaList[i].SolveRightSystem(a);
}
internal override void AddEtaMatrix(EtaMatrix e)
{
this.etaList.Add(e);
}
internal static Factorization Create(Matrix A, UMatrix U)
{
Contract.Requires(A != null);
Contract.Requires(U != null);
StandardFactorization f = new StandardFactorization(A, U);
if (f.failure)
return null;
return f;
}
}
}
| |
namespace EnergyTrading.MDM.Test.Services
{
using System;
using System.Collections.Generic;
using System.Linq;
using NUnit.Framework;
using Moq;
using EnergyTrading.MDM.Contracts.Sample; using EnergyTrading.Mdm.Contracts;
using EnergyTrading;
using EnergyTrading.Data;
using EnergyTrading.Mapping;
using EnergyTrading.Search;
using EnergyTrading.Validation;
using EnergyTrading.MDM;
using EnergyTrading.MDM.Messages;
using EnergyTrading.MDM.Services;
[TestFixture]
public class PartyRoleCrossMapFixture
{
[Test]
[ExpectedException(typeof(ArgumentNullException))]
public void NullRequestErrors()
{
// Arrange
var validatorFactory = new Mock<IValidatorEngine>();
var mappingEngine = new Mock<IMappingEngine>();
var repository = new Mock<IRepository>();
var searchCache = new Mock<ISearchCache>();
var service = new PartyRoleService(validatorFactory.Object, mappingEngine.Object, repository.Object, searchCache.Object);
// Act
service.CrossMap(null);
}
[Test]
public void UnsuccessfulMatchReturnsNotFound()
{
// Arrange
var validatorFactory = new Mock<IValidatorEngine>();
var mappingEngine = new Mock<IMappingEngine>();
var repository = new Mock<IRepository>();
var searchCache = new Mock<ISearchCache>();
var service = new PartyRoleService(validatorFactory.Object, mappingEngine.Object, repository.Object, searchCache.Object);
var list = new List<PartyRoleMapping>();
repository.Setup(x => x.Queryable<PartyRoleMapping>()).Returns(list.AsQueryable());
var request = new CrossMappingRequest { SystemName = "Endur", Identifier = "A", ValidAt = SystemTime.UtcNow(), TargetSystemName = "Trayport" };
// Act
var contract = service.CrossMap(request);
// Assert
Assert.IsNotNull(contract, "Contract null");
Assert.IsFalse(contract.IsValid, "Contract valid");
Assert.AreEqual(ErrorType.NotFound, contract.Error.Type, "ErrorType difers"); ;
}
[Test]
public void SuccessMatch()
{
// Arrange
var validatorFactory = new Mock<IValidatorEngine>();
var mappingEngine = new Mock<IMappingEngine>();
var repository = new Mock<IRepository>();
var searchCache = new Mock<ISearchCache>();
var service = new PartyRoleService(validatorFactory.Object, mappingEngine.Object, repository.Object, searchCache.Object);
// Domain details
var system = new MDM.SourceSystem { Name = "Endur" };
var mapping = new PartyRoleMapping
{
System = system,
MappingValue = "A",
};
var targetSystem = new MDM.SourceSystem { Name = "Trayport" };
var targetMapping = new PartyRoleMapping
{
System = targetSystem,
MappingValue = "B",
IsDefault = true
};
var details = new MDM.PartyRoleDetails
{
Name = "PartyRole 1"
};
var party = new MDM.PartyRole
{
Id = 1
};
party.AddDetails(details);
party.ProcessMapping(mapping);
party.ProcessMapping(targetMapping);
// Contract details
var targetIdentifier = new MdmId
{
SystemName = "Trayport",
Identifier = "B"
};
mappingEngine.Setup(x => x.Map<PartyRoleMapping, MdmId>(targetMapping)).Returns(targetIdentifier);
var list = new List<PartyRoleMapping> { mapping };
repository.Setup(x => x.Queryable<PartyRoleMapping>()).Returns(list.AsQueryable());
var request = new CrossMappingRequest
{
SystemName = "Endur",
Identifier = "A",
TargetSystemName = "trayport",
ValidAt = SystemTime.UtcNow(),
Version = 1
};
// Act
var response = service.CrossMap(request);
var candidate = response.Contract;
// Assert
Assert.IsNotNull(response, "Contract null");
Assert.IsNotNull(candidate, "Mapping null");
Assert.AreEqual(1, candidate.Mappings.Count, "Identifier count incorrect");
Assert.AreSame(targetIdentifier, candidate.Mappings[0], "Different identifier assigned");
}
[Test]
public void SuccessMatchSameVersion()
{
// Arrange
var validatorFactory = new Mock<IValidatorEngine>();
var mappingEngine = new Mock<IMappingEngine>();
var repository = new Mock<IRepository>();
var searchCache = new Mock<ISearchCache>();
var service = new PartyRoleService(validatorFactory.Object, mappingEngine.Object, repository.Object, searchCache.Object);
// Domain details
var system = new MDM.SourceSystem { Name = "Endur" };
var mapping = new PartyRoleMapping
{
System = system,
MappingValue = "A",
};
var targetSystem = new MDM.SourceSystem { Name = "Trayport" };
var targetMapping = new PartyRoleMapping
{
System = targetSystem,
MappingValue = "B",
IsDefault = true
};
var details = new MDM.PartyRoleDetails
{
Name = "PartyRole 1"
};
var party = new MDM.PartyRole
{
Id = 1
};
party.AddDetails(details);
party.ProcessMapping(mapping);
party.ProcessMapping(targetMapping);
// Contract details
var targetIdentifier = new MdmId
{
SystemName = "Trayport",
Identifier = "B"
};
mappingEngine.Setup(x => x.Map<PartyRoleMapping, MdmId>(targetMapping)).Returns(targetIdentifier);
var list = new List<PartyRoleMapping> { mapping };
repository.Setup(x => x.Queryable<PartyRoleMapping>()).Returns(list.AsQueryable());
var request = new CrossMappingRequest
{
SystemName = "Endur",
Identifier = "A",
TargetSystemName = "trayport",
ValidAt = SystemTime.UtcNow(),
Version = 0
};
// Act
var response = service.CrossMap(request);
var candidate = response.Contract;
// Assert
Assert.IsNotNull(response, "Contract null");
Assert.IsTrue(response.IsValid, "Contract invalid");
Assert.IsNull(candidate, "Mapping not null");
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using factor10.VisionThing.Effects;
using SharpDX;
using SharpDX.Toolkit;
using SharpDX.Toolkit.Input;
namespace factor10.VisionThing.CameraStuff
{
public class Camera
{
private Matrix _view;
private Matrix _projection;
private bool _dirtyViewProj;
private Matrix _viewProjection;
public Vector3 Position { get; protected set; }
public Vector3 Target { get; protected set; }
public Vector3 Forward { get; protected set; }
public Vector3 Left { get; protected set; }
public Vector3 Up { get; set; }
public float Yaw { get; protected set; }
public float Pitch { get; protected set; }
public readonly Vector2 ClientSize;
private Vector2 _lastMousePosition;
private readonly List<Keys> _downKeys = new List<Keys>();
public readonly KeyboardManager KeyboardManager;
public readonly MouseManager MouseManager;
public readonly PointerManager PointerManager;
public KeyboardState KeyboardState;
public MouseState MouseState;
public PointerState PointerState;
public readonly float ZNear;
public readonly float ZFar;
public float MovingSpeed = 30;
public Camera(
Vector2 clientSize,
Vector3 position,
Vector3 target,
float zNear = 1,
float zFar = 20000)
{
ClientSize = clientSize;
Up = Vector3.Up;
Update(position, target);
ZNear = zNear;
ZFar = zFar;
Projection = Matrix.PerspectiveFovRH(
MathUtil.PiOverFour,
clientSize.X / clientSize.Y,
ZNear, ZFar);
}
public Camera(
Vector2 clientSize,
KeyboardManager keyboardManager,
MouseManager mouseManager,
PointerManager pointerManager,
Vector3 position,
Vector3 target,
float nearPlane = 1,
float farPlane = 20000)
: this(clientSize, position, target, nearPlane, farPlane)
{
KeyboardManager = keyboardManager;
MouseManager = mouseManager;
PointerManager = pointerManager;
}
public Matrix View
{
get { return _view; }
set
{
_view = value;
_dirtyViewProj = true;
_boundingFrustum = null;
}
}
public Matrix Projection
{
get { return _projection; }
set
{
_projection = value;
_dirtyViewProj = true;
_boundingFrustum = null;
}
}
public Matrix ViewProjection
{
get
{
if (_dirtyViewProj)
{
_viewProjection = View*Projection;
_boundingFrustum = null;
_dirtyViewProj = false;
}
return _viewProjection;
}
}
public Vector3 Front
{
get
{
var front = Target - Position;
front.Normalize();
return front;
}
}
public void Update(
Vector3 position,
Vector3 target)
{
Position = position;
Target = target;
View = Matrix.LookAtRH(
Position,
Target,
Vector3.Up);
Yaw = (float)Math.Atan2(position.X - target.X, position.Z - target.Z);
Pitch = -(float)Math.Asin((position.Y - target.Y) / Vector3.Distance(position, target));
Forward = Vector3.Normalize(target - position);
Left = Vector3.Normalize(Vector3.Cross(Up, Forward));
}
private BoundingFrustum? _boundingFrustum;
private int _lastWheelDelta;
private float _moveMoveback;
public BoundingFrustum BoundingFrustum
{
get { return (_boundingFrustum ?? (_boundingFrustum = new BoundingFrustum(ViewProjection))).Value; }
}
public void UpdateInputDevices()
{
KeyboardState = KeyboardManager.GetState();
KeyboardState.GetDownKeys(_downKeys);
MouseState = MouseManager.GetState();
if(PointerManager!=null)
PointerState = PointerManager.GetState();
}
public void UpdateFreeFlyingCamera(GameTime gameTime)
{
var step = (float)gameTime.ElapsedGameTime.TotalSeconds;
var mouseWheelChanged = MouseState.WheelDelta != _lastWheelDelta;
if (_moveMoveback > 0)
{
_moveMoveback -= step;
if(_moveMoveback<=0)
MouseManager.SetPosition(_lastMousePosition);
}
if (!MouseState.LeftButton.Down && !MouseState.RightButton.Down && !mouseWheelChanged && !_downKeys.Any())
return;
var mousePos = new Vector2(MouseState.X, MouseState.Y);
if (MouseState.LeftButton.Pressed || MouseState.RightButton.Pressed)
_lastMousePosition = mousePos;
var delta = (_lastMousePosition - mousePos)*ClientSize*step*5;
if (mouseWheelChanged)
{
delta.Y += (MouseState.WheelDelta - _lastWheelDelta)*1.0f;
_lastWheelDelta = MouseState.WheelDelta;
}
var pos = Position;
//if (PointerState.Points.Any())
// foreach(var point in PointerState.Points)
// switch (point.EventType)
// {
// case PointerEventType.Pressed:
// _lastPointerPosition = point.Position;
// break;
// case PointerEventType.Moved:
// delta += _lastMousePosition - point.Position;
// Yaw += MathUtil.DegreesToRadians(delta.X*0.50f);
// Pitch += MathUtil.DegreesToRadians(delta.Y*0.50f);
// _lastPointerPosition = point.Position;
// break;
// }
//else
if (MouseState.LeftButton.Down)
{
Yaw += MathUtil.DegreesToRadians(delta.X*0.50f);
Pitch += MathUtil.DegreesToRadians(delta.Y*0.50f);
if(_moveMoveback<=0)
_moveMoveback = 0.05f;
}
else if (MouseState.RightButton.Down || mouseWheelChanged)
{
pos -= Forward*delta.Y;
pos += Left*delta.X;
if (_moveMoveback <= 0)
_moveMoveback = 0.05f;
}
var rotStep = step*1.5f;
step *= MovingSpeed;
if (_downKeys.Contains(Keys.Shift))
step *= 5;
if (_downKeys.Contains(Keys.R))
pos.Y += step;
if (KeyboardState.IsKeyDown(Keys.F))
pos.Y -= step;
if (KeyboardState.IsKeyDown(Keys.A))
pos += Left*step;
if (KeyboardState.IsKeyDown(Keys.D))
pos -= Left*step;
if (KeyboardState.IsKeyDown(Keys.W))
pos += Forward*step;
if (KeyboardState.IsKeyDown(Keys.S))
pos -= Forward*step;
if (KeyboardState.IsKeyDown(Keys.Left))
Yaw += rotStep;
if (KeyboardState.IsKeyDown(Keys.Right))
Yaw -= rotStep;
if (KeyboardState.IsKeyDown(Keys.Up))
Pitch += rotStep;
if (KeyboardState.IsKeyDown(Keys.Down))
Pitch -= rotStep;
var rotation = Matrix.RotationYawPitchRoll(Yaw, Pitch, 0);
Update(
pos,
pos + Vector3.TransformCoordinate(Vector3.ForwardRH*10, rotation));
}
public void UpdateEffect(IVEffect effect)
{
effect.View = View;
effect.Projection = Projection;
effect.CameraPosition = Position;
}
public Ray GetPickingRay()
{
var viewProj = View * Projection;
var mouseNearVector = new Vector3(MouseState.X, MouseState.Y, ZNear);
Vector3 pointNear;
Vector3.Unproject(ref mouseNearVector, 0, 0, 1, 1, ZNear, ZFar, ref viewProj, out pointNear);
var mouseFarVector = new Vector3(MouseState.X, MouseState.Y, ZFar);
Vector3 pointFar;
Vector3.Unproject(ref mouseFarVector, 0, 0, 1, 1, ZNear, ZFar, ref viewProj, out pointFar);
return new Ray(pointNear, Vector3.Normalize(pointFar - pointNear));
}
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Management.SiteRecovery;
using Microsoft.Azure.Management.SiteRecovery.Models;
namespace Microsoft.Azure.Management.SiteRecovery
{
public static partial class RecoveryServicesProviderOperationsExtensions
{
/// <summary>
/// Deletes a provider
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='fabricName'>
/// Required. Name of provider's fabric
/// </param>
/// <param name='providerName'>
/// Required. Provider Name.
/// </param>
/// <param name='input'>
/// Required. Deletion input.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public static LongRunningOperationResponse BeginDeleting(this IRecoveryServicesProviderOperations operations, string fabricName, string providerName, RecoveryServicesProviderDeletionInput input, CustomRequestHeaders customRequestHeaders)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRecoveryServicesProviderOperations)s).BeginDeletingAsync(fabricName, providerName, input, customRequestHeaders);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes a provider
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='fabricName'>
/// Required. Name of provider's fabric
/// </param>
/// <param name='providerName'>
/// Required. Provider Name.
/// </param>
/// <param name='input'>
/// Required. Deletion input.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public static Task<LongRunningOperationResponse> BeginDeletingAsync(this IRecoveryServicesProviderOperations operations, string fabricName, string providerName, RecoveryServicesProviderDeletionInput input, CustomRequestHeaders customRequestHeaders)
{
return operations.BeginDeletingAsync(fabricName, providerName, input, customRequestHeaders, CancellationToken.None);
}
/// <summary>
/// Purges a provider
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='fabricName'>
/// Required. Name of provider's fabric
/// </param>
/// <param name='providerName'>
/// Required. Provider Name.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public static LongRunningOperationResponse BeginPurging(this IRecoveryServicesProviderOperations operations, string fabricName, string providerName, CustomRequestHeaders customRequestHeaders)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRecoveryServicesProviderOperations)s).BeginPurgingAsync(fabricName, providerName, customRequestHeaders);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Purges a provider
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='fabricName'>
/// Required. Name of provider's fabric
/// </param>
/// <param name='providerName'>
/// Required. Provider Name.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public static Task<LongRunningOperationResponse> BeginPurgingAsync(this IRecoveryServicesProviderOperations operations, string fabricName, string providerName, CustomRequestHeaders customRequestHeaders)
{
return operations.BeginPurgingAsync(fabricName, providerName, customRequestHeaders, CancellationToken.None);
}
/// <summary>
/// Refreshes a provider
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='fabricName'>
/// Required. Name of provider's fabric
/// </param>
/// <param name='providerName'>
/// Required. Name of provider
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public static LongRunningOperationResponse BeginRefreshing(this IRecoveryServicesProviderOperations operations, string fabricName, string providerName, CustomRequestHeaders customRequestHeaders)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRecoveryServicesProviderOperations)s).BeginRefreshingAsync(fabricName, providerName, customRequestHeaders);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Refreshes a provider
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='fabricName'>
/// Required. Name of provider's fabric
/// </param>
/// <param name='providerName'>
/// Required. Name of provider
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public static Task<LongRunningOperationResponse> BeginRefreshingAsync(this IRecoveryServicesProviderOperations operations, string fabricName, string providerName, CustomRequestHeaders customRequestHeaders)
{
return operations.BeginRefreshingAsync(fabricName, providerName, customRequestHeaders, CancellationToken.None);
}
/// <summary>
/// Deletes a provider
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='fabricName'>
/// Required. Name of provider's fabric
/// </param>
/// <param name='providerName'>
/// Required. Provider Name.
/// </param>
/// <param name='input'>
/// Required. Deletion input.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public static LongRunningOperationResponse Delete(this IRecoveryServicesProviderOperations operations, string fabricName, string providerName, RecoveryServicesProviderDeletionInput input, CustomRequestHeaders customRequestHeaders)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRecoveryServicesProviderOperations)s).DeleteAsync(fabricName, providerName, input, customRequestHeaders);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes a provider
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='fabricName'>
/// Required. Name of provider's fabric
/// </param>
/// <param name='providerName'>
/// Required. Provider Name.
/// </param>
/// <param name='input'>
/// Required. Deletion input.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public static Task<LongRunningOperationResponse> DeleteAsync(this IRecoveryServicesProviderOperations operations, string fabricName, string providerName, RecoveryServicesProviderDeletionInput input, CustomRequestHeaders customRequestHeaders)
{
return operations.DeleteAsync(fabricName, providerName, input, customRequestHeaders, CancellationToken.None);
}
/// <summary>
/// Get the server object by Id.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='fabricName'>
/// Required. Fabric Name.
/// </param>
/// <param name='providerName'>
/// Required. Provider Name.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// The response model for the provider object
/// </returns>
public static RecoveryServicesProviderResponse Get(this IRecoveryServicesProviderOperations operations, string fabricName, string providerName, CustomRequestHeaders customRequestHeaders)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRecoveryServicesProviderOperations)s).GetAsync(fabricName, providerName, customRequestHeaders);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get the server object by Id.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='fabricName'>
/// Required. Fabric Name.
/// </param>
/// <param name='providerName'>
/// Required. Provider Name.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// The response model for the provider object
/// </returns>
public static Task<RecoveryServicesProviderResponse> GetAsync(this IRecoveryServicesProviderOperations operations, string fabricName, string providerName, CustomRequestHeaders customRequestHeaders)
{
return operations.GetAsync(fabricName, providerName, customRequestHeaders, CancellationToken.None);
}
/// <summary>
/// The Get Operation Status operation returns the status of the
/// specified operation. After calling an asynchronous operation, you
/// can call Get Operation Status to determine whether the operation
/// has succeeded, failed, or is still in progress.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='operationStatusLink'>
/// Required. Location value returned by the Begin operation.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public static LongRunningOperationResponse GetDeleteStatus(this IRecoveryServicesProviderOperations operations, string operationStatusLink)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRecoveryServicesProviderOperations)s).GetDeleteStatusAsync(operationStatusLink);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The Get Operation Status operation returns the status of the
/// specified operation. After calling an asynchronous operation, you
/// can call Get Operation Status to determine whether the operation
/// has succeeded, failed, or is still in progress.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='operationStatusLink'>
/// Required. Location value returned by the Begin operation.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public static Task<LongRunningOperationResponse> GetDeleteStatusAsync(this IRecoveryServicesProviderOperations operations, string operationStatusLink)
{
return operations.GetDeleteStatusAsync(operationStatusLink, CancellationToken.None);
}
/// <summary>
/// The Get Operation Status operation returns the status of the
/// specified operation. After calling an asynchronous operation, you
/// can call Get Operation Status to determine whether the operation
/// has succeeded, failed, or is still in progress.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='operationStatusLink'>
/// Required. Location value returned by the Begin operation.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public static LongRunningOperationResponse GetPurgeStatus(this IRecoveryServicesProviderOperations operations, string operationStatusLink)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRecoveryServicesProviderOperations)s).GetPurgeStatusAsync(operationStatusLink);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The Get Operation Status operation returns the status of the
/// specified operation. After calling an asynchronous operation, you
/// can call Get Operation Status to determine whether the operation
/// has succeeded, failed, or is still in progress.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='operationStatusLink'>
/// Required. Location value returned by the Begin operation.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public static Task<LongRunningOperationResponse> GetPurgeStatusAsync(this IRecoveryServicesProviderOperations operations, string operationStatusLink)
{
return operations.GetPurgeStatusAsync(operationStatusLink, CancellationToken.None);
}
/// <summary>
/// The Get Operation Status operation returns the status of the
/// specified operation. After calling an asynchronous operation, you
/// can call Get Operation Status to determine whether the operation
/// has succeeded, failed, or is still in progress.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='operationStatusLink'>
/// Required. Location value returned by the Begin operation.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public static LongRunningOperationResponse GetRefreshStatus(this IRecoveryServicesProviderOperations operations, string operationStatusLink)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRecoveryServicesProviderOperations)s).GetRefreshStatusAsync(operationStatusLink);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The Get Operation Status operation returns the status of the
/// specified operation. After calling an asynchronous operation, you
/// can call Get Operation Status to determine whether the operation
/// has succeeded, failed, or is still in progress.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='operationStatusLink'>
/// Required. Location value returned by the Begin operation.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public static Task<LongRunningOperationResponse> GetRefreshStatusAsync(this IRecoveryServicesProviderOperations operations, string operationStatusLink)
{
return operations.GetRefreshStatusAsync(operationStatusLink, CancellationToken.None);
}
/// <summary>
/// Get the list of all servers under the vault for given fabric.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='fabricName'>
/// Required. Fabric Name.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// The response model for the list servers operation.
/// </returns>
public static RecoveryServicesProviderListResponse List(this IRecoveryServicesProviderOperations operations, string fabricName, CustomRequestHeaders customRequestHeaders)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRecoveryServicesProviderOperations)s).ListAsync(fabricName, customRequestHeaders);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get the list of all servers under the vault for given fabric.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='fabricName'>
/// Required. Fabric Name.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// The response model for the list servers operation.
/// </returns>
public static Task<RecoveryServicesProviderListResponse> ListAsync(this IRecoveryServicesProviderOperations operations, string fabricName, CustomRequestHeaders customRequestHeaders)
{
return operations.ListAsync(fabricName, customRequestHeaders, CancellationToken.None);
}
/// <summary>
/// Get the list of all servers under the vault.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// The response model for the list servers operation.
/// </returns>
public static RecoveryServicesProviderListResponse ListAll(this IRecoveryServicesProviderOperations operations, CustomRequestHeaders customRequestHeaders)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRecoveryServicesProviderOperations)s).ListAllAsync(customRequestHeaders);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get the list of all servers under the vault.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// The response model for the list servers operation.
/// </returns>
public static Task<RecoveryServicesProviderListResponse> ListAllAsync(this IRecoveryServicesProviderOperations operations, CustomRequestHeaders customRequestHeaders)
{
return operations.ListAllAsync(customRequestHeaders, CancellationToken.None);
}
/// <summary>
/// Purges a provider
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='fabricName'>
/// Required. Name of provider's fabric
/// </param>
/// <param name='providerName'>
/// Required. Provider Name.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public static LongRunningOperationResponse Purge(this IRecoveryServicesProviderOperations operations, string fabricName, string providerName, CustomRequestHeaders customRequestHeaders)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRecoveryServicesProviderOperations)s).PurgeAsync(fabricName, providerName, customRequestHeaders);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Purges a provider
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='fabricName'>
/// Required. Name of provider's fabric
/// </param>
/// <param name='providerName'>
/// Required. Provider Name.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public static Task<LongRunningOperationResponse> PurgeAsync(this IRecoveryServicesProviderOperations operations, string fabricName, string providerName, CustomRequestHeaders customRequestHeaders)
{
return operations.PurgeAsync(fabricName, providerName, customRequestHeaders, CancellationToken.None);
}
/// <summary>
/// Refreshes a provider
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='fabricName'>
/// Required. Name of provider's fabric
/// </param>
/// <param name='providerName'>
/// Required. Name of provider
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public static LongRunningOperationResponse Refresh(this IRecoveryServicesProviderOperations operations, string fabricName, string providerName, CustomRequestHeaders customRequestHeaders)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRecoveryServicesProviderOperations)s).RefreshAsync(fabricName, providerName, customRequestHeaders);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Refreshes a provider
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.SiteRecovery.IRecoveryServicesProviderOperations.
/// </param>
/// <param name='fabricName'>
/// Required. Name of provider's fabric
/// </param>
/// <param name='providerName'>
/// Required. Name of provider
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public static Task<LongRunningOperationResponse> RefreshAsync(this IRecoveryServicesProviderOperations operations, string fabricName, string providerName, CustomRequestHeaders customRequestHeaders)
{
return operations.RefreshAsync(fabricName, providerName, customRequestHeaders, CancellationToken.None);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Text;
using System.IO;
using System.Threading;
using Pk2 = PICkit2V2.PICkitFunctions;
using KONST = PICkit2V2.Constants;
namespace PICkit2V2
{
class Pk2BootLoader
{
public static bool ReadHexAndDownload(string fileName, ref ushort pk2num)
{
try
{
FileInfo hexFile = new FileInfo(fileName);
TextReader hexRead = hexFile.OpenText();
byte[] flashWriteData = new byte[3+32]; // 3 address bytes plus 32 data bytes.
string fileLine = hexRead.ReadLine();
if (fileLine != null)
{
Pk2.EnterBootloader();
Pk2.ResetPk2Number();
Thread.Sleep(3000);
int i;
pk2num = 0;
for (i = 0; i < 10; i++)
{ //timijk, put bootloader into pickitBT??
//if (Pk2.DetectPICkit2Device(pk2num, false, true) == Constants.PICkit2USB.bootloader)
if (Pk2.DetectPICkit2Device(pk2num, (Pk2.spHandle != null), true) == Constants.PICkit2USB.bootloader)
{
if (Pk2.VerifyBootloaderMode())
{
break;
}
}
else
{
pk2num++; // look for PK2 with bootloader.
}
Thread.Sleep(500);
}
if (i == 10)
{
hexRead.Close();
return false;
}
}
// erase PICkit 2 firmware flash
Pk2.BL_EraseFlash();
//bool second16 = false;
//while (fileLine != null)
//{
// if ((fileLine[0] == ':') && (fileLine.Length >= 11))
// { // skip line if not hex line entry,or not minimum length ":BBAAAATTCC"
// int byteCount = Int32.Parse(fileLine.Substring(1, 2), System.Globalization.NumberStyles.HexNumber);
// int fileAddress = Int32.Parse(fileLine.Substring(3, 4), System.Globalization.NumberStyles.HexNumber);
// int recordType = Int32.Parse(fileLine.Substring(7, 2), System.Globalization.NumberStyles.HexNumber);
// if ((second16) && ((fileAddress & 0x00000010) == 0))
// {// if just moved to new 32-byte boundary.
// Pk2.BL_WriteFlash(flashWriteData);
// for (int x = 0; x < flashWriteData.Length; x++)
// { // clear array for skipped bytes in hex file
// flashWriteData[x] = 0xFF;
// }
// }
// second16 = ((fileAddress & 0x00000010) == 0x10);
// if (recordType == 0)
// { // Data Record}
// if ((fileAddress >= 0x2000) && (fileAddress < 0x7FE0))
// { // don't program 5555 key at last address until after verification.
// if (!second16)
// {
// int rowAddress = fileAddress & 0xFFE0;
// flashWriteData[0] = (byte)(rowAddress & 0xFF);
// flashWriteData[1] = (byte)((rowAddress >> 8) & 0xFF);
// flashWriteData[2] = 0x00; // address upper
// }
// if (fileLine.Length >= (11 + (2 * byteCount)))
// { // skip if line isn't long enough for bytecount.
// int startByte = fileAddress & 0x000F;
// int endByte = startByte + byteCount;
// int offset = 3;
// if (second16)
// {
// offset = 19;
// }
// for (int rowByte = 0; rowByte < 16; rowByte++)
// {
// if ((rowByte >= startByte) && (rowByte < endByte))
// {
// // get the byte value from hex file
// uint wordByte = UInt32.Parse(fileLine.Substring((9 + (2 * (rowByte - startByte))), 2), System.Globalization.NumberStyles.HexNumber);
// flashWriteData[offset + rowByte] = (byte)(wordByte & 0xFF);
// }
// }
// }
// }
// } // end if (recordType == 0)
// if (recordType == 1)
// { // end of record
// break;
// }
// }
// fileLine = hexRead.ReadLine();
//}
for (int x = 0; x < flashWriteData.Length; x++)
{ // clear array for skipped bytes in hex file
flashWriteData[x] = 0xFF;
}
int rowAddress=0;
int prvRowAddress=0;
while (fileLine != null)
{
if ((fileLine[0] == ':') && (fileLine.Length >= 11))
{ // skip line if not hex line entry,or not minimum length ":BBAAAATTCC"
int byteCount = Int32.Parse(fileLine.Substring(1, 2), System.Globalization.NumberStyles.HexNumber);
int fileAddress = Int32.Parse(fileLine.Substring(3, 4), System.Globalization.NumberStyles.HexNumber);
int recordType = Int32.Parse(fileLine.Substring(7, 2), System.Globalization.NumberStyles.HexNumber);
rowAddress = fileAddress & 0xFFE0;
if( prvRowAddress!=0 && prvRowAddress!=rowAddress)
{
Pk2.BL_WriteFlash(flashWriteData);
for (int x = 0; x < flashWriteData.Length; x++)
{ // clear array for skipped bytes in hex file
flashWriteData[x] = 0xFF;
}
prvRowAddress = 0;
}
// second16 = ((fileAddress & 0x00000010) == 0x10);
if (recordType == 0)
{ // Data Record}
if ((fileAddress >= 0x2000) && (fileAddress < 0x7FE0))
{ // don't program 5555 key at last address until after verification.
if (prvRowAddress != rowAddress)
{
flashWriteData[0] = (byte)(rowAddress & 0xFF);
flashWriteData[1] = (byte)((rowAddress >> 8) & 0xFF);
flashWriteData[2] = 0x00; // address upper
prvRowAddress = rowAddress;
}
if (fileLine.Length >= (11 + (2 * byteCount)))
{ // skip if line isn't long enough for bytecount.
int addrIdx = fileAddress & 0x001F;
int offset = 3;
for (int j = 0; j < byteCount; j++)
{
uint wordByte = UInt32.Parse(fileLine.Substring((9 + (2 * j)), 2), System.Globalization.NumberStyles.HexNumber);
flashWriteData[offset + addrIdx] = (byte)(wordByte & 0xFF);
addrIdx++;
if( addrIdx==0x0020)
{
Pk2.BL_WriteFlash(flashWriteData);
for (int x = 0; x < flashWriteData.Length; x++)
{ // clear array for skipped bytes in hex file
flashWriteData[x] = 0xFF;
}
prvRowAddress = 0;
if (j<byteCount-1)
{
rowAddress = (fileAddress + 0x20) & 0xFFE0;
flashWriteData[0] = (byte)(rowAddress & 0xFF);
flashWriteData[1] = (byte)((rowAddress >> 8) & 0xFF);
flashWriteData[2] = 0x00; // address upper
prvRowAddress = rowAddress;
addrIdx = 0;
}
}
}
}
}
} // end if (recordType == 0)
if (recordType == 1)
{ // end of record
break;
}
}
fileLine = hexRead.ReadLine();
}
if(prvRowAddress!=0) Pk2.BL_WriteFlash(flashWriteData); // write last row
hexRead.Close();
return true;
}
catch( Exception e)
{
return false;
}
}
public static bool ReadHexAndVerify(string fileName)
{
try
{
FileInfo hexFile = new FileInfo(fileName);
TextReader hexRead = hexFile.OpenText();
string fileLine = hexRead.ReadLine();
bool verified = true;
int lastAddress = 0;
int usbRdAddr = 0;
while (fileLine != null)
{
if ((fileLine[0] == ':') && (fileLine.Length >= 11))
{ // skip line if not hex line entry,or not minimum length ":BBAAAATTCC"
int byteCount = Int32.Parse(fileLine.Substring(1, 2), System.Globalization.NumberStyles.HexNumber);
int fileAddress = Int32.Parse(fileLine.Substring(3, 4), System.Globalization.NumberStyles.HexNumber);
int recordType = Int32.Parse(fileLine.Substring(7, 2), System.Globalization.NumberStyles.HexNumber);
if (recordType == 0)
{ // Data Record}
if ((fileAddress >= 0x2000) && (fileAddress < 0x7FE0))
{ // don't check bootloader stuff.
int startByte = fileAddress & 0x000F; // read 16 bytes at a time.
int firstAddress = fileAddress & 0xFFF0;
if (lastAddress != firstAddress)
{ // only read if next line in different 16-byte block
Pk2.BL_ReadFlash16(firstAddress);
}
if (fileLine.Length >= (11 + (2 * byteCount)))
{ // skip if line isn't long enough for bytecount.
usbRdAddr = startByte;
for (int lineByte = 0; lineByte < byteCount; lineByte++)
{
// get the byte value from hex file
uint wordByte = UInt32.Parse(fileLine.Substring((9 + (2 * lineByte)), 2), System.Globalization.NumberStyles.HexNumber);
if (usbRdAddr == 0x10)
{
firstAddress += 0x10;
Pk2.BL_ReadFlash16(firstAddress);
usbRdAddr = 0;
}
if (Pk2.Usb_read_array[6 + usbRdAddr++] != (byte)(wordByte & 0xFF))
{
verified = false;
recordType = 1;
break;
}
}
}
lastAddress = firstAddress;
}
} // end if (recordType == 0)
if (recordType == 1)
{ // end of record
break;
}
}
fileLine = hexRead.ReadLine();
}
hexRead.Close();
return verified;
}
catch
{
return false;
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.IO;
using System.Text;
namespace System.Diagnostics
{
/// <summary>
/// Provides version information for a physical file on disk.
/// </summary>
public sealed partial class FileVersionInfo
{
private readonly string _fileName;
private string _companyName;
private string _fileDescription;
private string _fileVersion;
private string _internalName;
private string _legalCopyright;
private string _originalFilename;
private string _productName;
private string _productVersion;
private string _comments;
private string _legalTrademarks;
private string _privateBuild;
private string _specialBuild;
private string _language;
private int _fileMajor;
private int _fileMinor;
private int _fileBuild;
private int _filePrivate;
private int _productMajor;
private int _productMinor;
private int _productBuild;
private int _productPrivate;
private bool _isDebug;
private bool _isPatched;
private bool _isPrivateBuild;
private bool _isPreRelease;
private bool _isSpecialBuild;
/// <summary>
/// Gets the comments associated with the file.
/// </summary>
public string Comments
{
get { return _comments; }
}
/// <summary>
/// Gets the name of the company that produced the file.
/// </summary>
public string CompanyName
{
get { return _companyName; }
}
/// <summary>
/// Gets the build number of the file.
/// </summary>
public int FileBuildPart
{
get { return _fileBuild; }
}
/// <summary>
/// Gets the description of the file.
/// </summary>
public string FileDescription
{
get { return _fileDescription; }
}
/// <summary>
/// Gets the major part of the version number.
/// </summary>
public int FileMajorPart
{
get { return _fileMajor; }
}
/// <summary>
/// Gets the minor part of the version number of the file.
/// </summary>
public int FileMinorPart
{
get { return _fileMinor; }
}
/// <summary>
/// Gets the name of the file that this instance of <see cref="FileVersionInfo" /> describes.
/// </summary>
public string FileName
{
get { return _fileName; }
}
/// <summary>
/// Gets the file private part number.
/// </summary>
public int FilePrivatePart
{
get { return _filePrivate; }
}
/// <summary>
/// Gets the file version number.
/// </summary>
public string FileVersion
{
get { return _fileVersion; }
}
/// <summary>
/// Gets the internal name of the file, if one exists.
/// </summary>
public string InternalName
{
get { return _internalName; }
}
/// <summary>
/// Gets a value that specifies whether the file contains debugging information
/// or is compiled with debugging features enabled.
/// </summary>
public bool IsDebug
{
get { return _isDebug; }
}
/// <summary>
/// Gets a value that specifies whether the file has been modified and is not identical to
/// the original shipping file of the same version number.
/// </summary>
public bool IsPatched
{
get { return _isPatched; }
}
/// <summary>
/// Gets a value that specifies whether the file was built using standard release procedures.
/// </summary>
public bool IsPrivateBuild
{
get { return _isPrivateBuild; }
}
/// <summary>
/// Gets a value that specifies whether the file
/// is a development version, rather than a commercially released product.
/// </summary>
public bool IsPreRelease
{
get { return _isPreRelease; }
}
/// <summary>
/// Gets a value that specifies whether the file is a special build.
/// </summary>
public bool IsSpecialBuild
{
get { return _isSpecialBuild; }
}
/// <summary>
/// Gets the default language string for the version info block.
/// </summary>
public string Language
{
get { return _language; }
}
/// <summary>
/// Gets all copyright notices that apply to the specified file.
/// </summary>
public string LegalCopyright
{
get { return _legalCopyright; }
}
/// <summary>
/// Gets the trademarks and registered trademarks that apply to the file.
/// </summary>
public string LegalTrademarks
{
get { return _legalTrademarks; }
}
/// <summary>
/// Gets the name the file was created with.
/// </summary>
public string OriginalFilename
{
get { return _originalFilename; }
}
/// <summary>
/// Gets information about a private version of the file.
/// </summary>
public string PrivateBuild
{
get { return _privateBuild; }
}
/// <summary>
/// Gets the build number of the product this file is associated with.
/// </summary>
public int ProductBuildPart
{
get { return _productBuild; }
}
/// <summary>
/// Gets the major part of the version number for the product this file is associated with.
/// </summary>
public int ProductMajorPart
{
get { return _productMajor; }
}
/// <summary>
/// Gets the minor part of the version number for the product the file is associated with.
/// </summary>
public int ProductMinorPart
{
get { return _productMinor; }
}
/// <summary>
/// Gets the name of the product this file is distributed with.
/// </summary>
public string ProductName
{
get { return _productName; }
}
/// <summary>
/// Gets the private part number of the product this file is associated with.
/// </summary>
public int ProductPrivatePart
{
get { return _productPrivate; }
}
/// <summary>
/// Gets the version of the product this file is distributed with.
/// </summary>
public string ProductVersion
{
get { return _productVersion; }
}
/// <summary>
/// Gets the special build information for the file.
/// </summary>
public string SpecialBuild
{
get { return _specialBuild; }
}
/// <summary>
/// Returns a <see cref="FileVersionInfo" /> representing the version information associated with the specified file.
/// </summary>
public static FileVersionInfo GetVersionInfo(string fileName)
{
// Check for the existence of the file. File.Exists returns false if Read permission is denied.
if (!File.Exists(fileName))
{
throw new FileNotFoundException(fileName);
}
return new FileVersionInfo(fileName);
}
/// <summary>
/// Returns a partial list of properties in <see cref="FileVersionInfo" />
/// and their values.
/// </summary>
public override string ToString()
{
// An initial capacity of 512 was chosen because it is large enough to cover
// the size of the static strings with enough capacity left over to cover
// average length property values.
var sb = new StringBuilder(512);
sb.Append("File: ").AppendLine(FileName);
sb.Append("InternalName: ").AppendLine(InternalName);
sb.Append("OriginalFilename: ").AppendLine(OriginalFilename);
sb.Append("FileVersion: ").AppendLine(FileVersion);
sb.Append("FileDescription: ").AppendLine(FileDescription);
sb.Append("Product: ").AppendLine(ProductName);
sb.Append("ProductVersion: ").AppendLine(ProductVersion);
sb.Append("Debug: ").AppendLine(IsDebug.ToString());
sb.Append("Patched: ").AppendLine(IsPatched.ToString());
sb.Append("PreRelease: ").AppendLine(IsPreRelease.ToString());
sb.Append("PrivateBuild: ").AppendLine(IsPrivateBuild.ToString());
sb.Append("SpecialBuild: ").AppendLine(IsSpecialBuild.ToString());
sb.Append("Language: ").AppendLine(Language);
return sb.ToString();
}
}
}
| |
/*
*************************************************************************
** Custom classes used by C#
*************************************************************************
*/
using System;
using System.Diagnostics;
using System.IO;
#if !(SQLITE_SILVERLIGHT || WINDOWS_MOBILE || SQLITE_WINRT)
using System.Management;
#endif
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
#if SQLITE_WINRT
using System.Reflection;
#endif
using i64 = System.Int64;
using u32 = System.UInt32;
using time_t = System.Int64;
namespace Community.CsharpSqlite
{
using sqlite3_value = Sqlite3.Mem;
public partial class Sqlite3
{
static int atoi( byte[] inStr )
{
return atoi( Encoding.UTF8.GetString( inStr, 0, inStr.Length ) );
}
static int atoi( string inStr )
{
int i;
for ( i = 0; i < inStr.Length; i++ )
{
if ( !sqlite3Isdigit( inStr[i] ) && inStr[i] != '-' )
break;
}
int result = 0;
#if WINDOWS_MOBILE
try { result = Int32.Parse(inStr.Substring(0, i)); }
catch { }
return result;
#else
return ( Int32.TryParse( inStr.Substring( 0, i ), out result ) ? result : 0 );
#endif
}
static void fprintf( TextWriter tw, string zFormat, params object[] ap )
{
tw.Write( sqlite3_mprintf( zFormat, ap ) );
}
static void printf( string zFormat, params object[] ap )
{
#if !SQLITE_WINRT
Console.Out.Write( sqlite3_mprintf( zFormat, ap ) );
#endif
}
//Byte Buffer Testing
static int memcmp( byte[] bA, byte[] bB, int Limit )
{
if ( bA.Length < Limit )
return ( bA.Length < bB.Length ) ? -1 : +1;
if ( bB.Length < Limit )
return +1;
for ( int i = 0; i < Limit; i++ )
{
if ( bA[i] != bB[i] )
return ( bA[i] < bB[i] ) ? -1 : 1;
}
return 0;
}
//Byte Buffer & String Testing
static int memcmp( string A, byte[] bB, int Limit )
{
if ( A.Length < Limit )
return ( A.Length < bB.Length ) ? -1 : +1;
if ( bB.Length < Limit )
return +1;
char[] cA = A.ToCharArray();
for ( int i = 0; i < Limit; i++ )
{
if ( cA[i] != bB[i] )
return ( cA[i] < bB[i] ) ? -1 : 1;
}
return 0;
}
//byte with Offset & String Testing
static int memcmp( byte[] a, int Offset, byte[] b, int Limit )
{
if ( a.Length < Offset + Limit )
return ( a.Length - Offset < b.Length ) ? -1 : +1;
if ( b.Length < Limit )
return +1;
for ( int i = 0; i < Limit; i++ )
{
if ( a[i + Offset] != b[i] )
return ( a[i + Offset] < b[i] ) ? -1 : 1;
}
return 0;
}
//byte with Offset & String Testing
static int memcmp( byte[] a, int Aoffset, byte[] b, int Boffset, int Limit )
{
if ( a.Length < Aoffset + Limit )
return ( a.Length - Aoffset < b.Length - Boffset ) ? -1 : +1;
if ( b.Length < Boffset + Limit )
return +1;
for ( int i = 0; i < Limit; i++ )
{
if ( a[i + Aoffset] != b[i + Boffset] )
return ( a[i + Aoffset] < b[i + Boffset] ) ? -1 : 1;
}
return 0;
}
static int memcmp( byte[] a, int Offset, string b, int Limit )
{
if ( a.Length < Offset + Limit )
return ( a.Length - Offset < b.Length ) ? -1 : +1;
if ( b.Length < Limit )
return +1;
for ( int i = 0; i < Limit; i++ )
{
if ( a[i + Offset] != b[i] )
return ( a[i + Offset] < b[i] ) ? -1 : 1;
}
return 0;
}
//String Testing
static int memcmp( string A, string B, int Limit )
{
if ( A.Length < Limit )
return ( A.Length < B.Length ) ? -1 : +1;
if ( B.Length < Limit )
return +1;
int rc;
if ( ( rc = String.Compare( A, 0, B, 0, Limit, StringComparison.Ordinal ) ) == 0 )
return 0;
return rc < 0 ? -1 : +1;
}
// ----------------------------
// ** Builtin Functions
// ----------------------------
static Regex oRegex = null;
/*
** The regexp() function. two arguments are both strings
** Collating sequences are not used.
*/
static void regexpFunc(
sqlite3_context context,
int argc,
sqlite3_value[] argv
)
{
string zTest; /* The input string A */
string zRegex; /* The regex string B */
Debug.Assert( argc == 2 );
UNUSED_PARAMETER( argc );
zRegex = sqlite3_value_text( argv[0] );
zTest = sqlite3_value_text( argv[1] );
if ( zTest == null || String.IsNullOrEmpty( zRegex ) )
{
sqlite3_result_int( context, 0 );
return;
}
if ( oRegex == null || oRegex.ToString() == zRegex )
{
oRegex = new Regex( zRegex, RegexOptions.IgnoreCase );
}
sqlite3_result_int( context, oRegex.IsMatch( zTest ) ? 1 : 0 );
}
// ----------------------------
// ** Convertion routines
// ----------------------------
static Object lock_va_list = new Object();
static string vaFORMAT;
static int vaNEXT;
static void va_start( object[] ap, string zFormat )
{
vaFORMAT = zFormat;
vaNEXT = 0;
}
static Boolean va_arg( object[] ap, Boolean sysType )
{
return Convert.ToBoolean( ap[vaNEXT++] );
}
static Byte[] va_arg( object[] ap, Byte[] sysType )
{
return (Byte[])ap[vaNEXT++];
}
static Byte[][] va_arg( object[] ap, Byte[][] sysType )
{
if ( ap[vaNEXT] == null )
{
{
vaNEXT++;
return null;
}
}
else
{
return (Byte[][])ap[vaNEXT++];
}
}
static Char va_arg( object[] ap, Char sysType )
{
if ( ap[vaNEXT] is Int32 && (int)ap[vaNEXT] == 0 )
{
vaNEXT++;
return (char)'0';
}
else
{
if ( ap[vaNEXT] is Int64 )
if ( (i64)ap[vaNEXT] == 0 )
{
vaNEXT++;
return (char)'0';
}
else
return (char)( (i64)ap[vaNEXT++] );
else
return (char)ap[vaNEXT++];
}
}
static Double va_arg( object[] ap, Double sysType )
{
return Convert.ToDouble( ap[vaNEXT++] );
}
static dxLog va_arg( object[] ap, dxLog sysType )
{
return (dxLog)ap[vaNEXT++];
}
static Int64 va_arg( object[] ap, Int64 sysType )
{
if ( ap[vaNEXT] is System.Int64)
return Convert.ToInt64( ap[vaNEXT++] );
else
return (Int64)( ap[vaNEXT++].GetHashCode() );
}
static Int32 va_arg( object[] ap, Int32 sysType )
{
if ( Convert.ToInt64( ap[vaNEXT] ) > 0 && ( Convert.ToUInt32( ap[vaNEXT] ) > Int32.MaxValue ) )
return (Int32)( Convert.ToUInt32( ap[vaNEXT++] ) - System.UInt32.MaxValue - 1 );
else
return (Int32)Convert.ToInt32( ap[vaNEXT++] );
}
static Int32[] va_arg( object[] ap, Int32[] sysType )
{
if ( ap[vaNEXT] == null )
{
{
vaNEXT++;
return null;
}
}
else
{
return (Int32[])ap[vaNEXT++];
}
}
static MemPage va_arg( object[] ap, MemPage sysType )
{
return (MemPage)ap[vaNEXT++];
}
static Object va_arg( object[] ap, Object sysType )
{
return (Object)ap[vaNEXT++];
}
static sqlite3 va_arg( object[] ap, sqlite3 sysType )
{
return (sqlite3)ap[vaNEXT++];
}
static sqlite3_mem_methods va_arg( object[] ap, sqlite3_mem_methods sysType )
{
return (sqlite3_mem_methods)ap[vaNEXT++];
}
static sqlite3_mutex_methods va_arg( object[] ap, sqlite3_mutex_methods sysType )
{
return (sqlite3_mutex_methods)ap[vaNEXT++];
}
static SrcList va_arg( object[] ap, SrcList sysType )
{
return (SrcList)ap[vaNEXT++];
}
static String va_arg( object[] ap, String sysType )
{
if ( ap.Length < vaNEXT - 1 || ap[vaNEXT] == null )
{
vaNEXT++;
return "NULL";
}
else
{
if ( ap[vaNEXT] is Byte[] )
if ( Encoding.UTF8.GetString( (byte[])ap[vaNEXT], 0, ( (byte[])ap[vaNEXT] ).Length ) == "\0" )
{
vaNEXT++;
return "";
}
else
return Encoding.UTF8.GetString( (byte[])ap[vaNEXT], 0, ( (byte[])ap[vaNEXT++] ).Length );
else if ( ap[vaNEXT] is Int32 )
{
vaNEXT++;
return null;
}
else if ( ap[vaNEXT] is StringBuilder )
return (String)ap[vaNEXT++].ToString();
else if ( ap[vaNEXT] is Char )
return ( (Char)ap[vaNEXT++] ).ToString();
else
return (String)ap[vaNEXT++];
}
}
static Token va_arg( object[] ap, Token sysType )
{
return (Token)ap[vaNEXT++];
}
static UInt32 va_arg( object[] ap, UInt32 sysType )
{
#if SQLITE_WINRT
Type t = ap[vaNEXT].GetType();
if ( t.GetTypeInfo().IsClass )
#else
if ( ap[vaNEXT].GetType().IsClass )
#endif
{
return (UInt32)ap[vaNEXT++].GetHashCode();
}
else
{
return (UInt32)Convert.ToUInt32( ap[vaNEXT++] );
}
}
static UInt64 va_arg( object[] ap, UInt64 sysType )
{
#if SQLITE_WINRT
Type t = ap[vaNEXT].GetType();
if (t.GetTypeInfo().IsClass)
#else
if ( ap[vaNEXT].GetType().IsClass )
#endif
{
return (UInt64)ap[vaNEXT++].GetHashCode();
}
else
{
return (UInt64)Convert.ToUInt64( ap[vaNEXT++] );
}
}
static void_function va_arg( object[] ap, void_function sysType )
{
return (void_function)ap[vaNEXT++];
}
static void va_end( ref string[] ap )
{
ap = null;
vaNEXT = -1;
vaFORMAT = "";
}
static void va_end( ref object[] ap )
{
ap = null;
vaNEXT = -1;
vaFORMAT = "";
}
public static tm localtime( time_t baseTime )
{
System.DateTime RefTime = new System.DateTime( 1970, 1, 1, 0, 0, 0, 0 );
RefTime = RefTime.AddSeconds( Convert.ToDouble( baseTime ) ).ToLocalTime();
tm tm = new tm();
tm.tm_sec = RefTime.Second;
tm.tm_min = RefTime.Minute;
tm.tm_hour = RefTime.Hour;
tm.tm_mday = RefTime.Day;
tm.tm_mon = RefTime.Month;
tm.tm_year = RefTime.Year;
tm.tm_wday = (int)RefTime.DayOfWeek;
tm.tm_yday = RefTime.DayOfYear;
tm.tm_isdst = RefTime.IsDaylightSavingTime() ? 1 : 0;
return tm;
}
public static long ToUnixtime( System.DateTime date )
{
System.DateTime unixStartTime = new System.DateTime( 1970, 1, 1, 0, 0, 0, 0 );
System.TimeSpan timeSpan = date - unixStartTime;
return Convert.ToInt64( timeSpan.TotalSeconds );
}
public static System.DateTime ToCSharpTime( long unixTime )
{
System.DateTime unixStartTime = new System.DateTime( 1970, 1, 1, 0, 0, 0, 0 );
return unixStartTime.AddSeconds( Convert.ToDouble( unixTime ) );
}
public class tm
{
public int tm_sec; /* seconds after the minute - [0,59] */
public int tm_min; /* minutes after the hour - [0,59] */
public int tm_hour; /* hours since midnight - [0,23] */
public int tm_mday; /* day of the month - [1,31] */
public int tm_mon; /* months since January - [0,11] */
public int tm_year; /* years since 1900 */
public int tm_wday; /* days since Sunday - [0,6] */
public int tm_yday; /* days since January 1 - [0,365] */
public int tm_isdst; /* daylight savings time flag */
};
public struct FILETIME
{
public u32 dwLowDateTime;
public u32 dwHighDateTime;
}
// Example (C#)
public static int GetbytesPerSector( StringBuilder diskPath )
{
#if !(SQLITE_SILVERLIGHT || WINDOWS_MOBILE || SQLITE_WINRT)
ManagementObjectSearcher mosLogicalDisks = new ManagementObjectSearcher( "select * from Win32_LogicalDisk where DeviceID = '" + diskPath.ToString().Remove( diskPath.Length - 1, 1 ) + "'" );
try
{
foreach ( ManagementObject moLogDisk in mosLogicalDisks.Get() )
{
ManagementObjectSearcher mosDiskDrives = new ManagementObjectSearcher( "select * from Win32_DiskDrive where SystemName = '" + moLogDisk["SystemName"] + "'" );
foreach ( ManagementObject moPDisk in mosDiskDrives.Get() )
{
return int.Parse( moPDisk["BytesPerSector"].ToString() );
}
}
}
catch
{
}
return 4096;
#else
return 4096;
#endif
}
static void SWAP<T>( ref T A, ref T B )
{
T t = A;
A = B;
B = t;
}
static void x_CountStep(
sqlite3_context context,
int argc,
sqlite3_value[] argv
)
{
SumCtx p;
int type;
Debug.Assert( argc <= 1 );
Mem pMem = sqlite3_aggregate_context( context, 1 );//sizeof(*p));
if ( pMem._SumCtx == null )
pMem._SumCtx = new SumCtx();
p = pMem._SumCtx;
if ( p.Context == null )
p.Context = pMem;
if ( argc == 0 || SQLITE_NULL == sqlite3_value_type( argv[0] ) )
{
p.cnt++;
p.iSum += 1;
}
else
{
type = sqlite3_value_numeric_type( argv[0] );
if ( p != null && type != SQLITE_NULL )
{
p.cnt++;
if ( type == SQLITE_INTEGER )
{
i64 v = sqlite3_value_int64( argv[0] );
if ( v == 40 || v == 41 )
{
sqlite3_result_error( context, "value of " + v + " handed to x_count", -1 );
return;
}
else
{
p.iSum += v;
if ( !( p.approx | p.overflow != 0 ) )
{
i64 iNewSum = p.iSum + v;
int s1 = (int)( p.iSum >> ( sizeof( i64 ) * 8 - 1 ) );
int s2 = (int)( v >> ( sizeof( i64 ) * 8 - 1 ) );
int s3 = (int)( iNewSum >> ( sizeof( i64 ) * 8 - 1 ) );
p.overflow = ( ( s1 & s2 & ~s3 ) | ( ~s1 & ~s2 & s3 ) ) != 0 ? 1 : 0;
p.iSum = iNewSum;
}
}
}
else
{
p.rSum += sqlite3_value_double( argv[0] );
p.approx = true;
}
}
}
}
static void x_CountFinalize( sqlite3_context context )
{
SumCtx p;
Mem pMem = sqlite3_aggregate_context( context, 0 );
p = pMem._SumCtx;
if ( p != null && p.cnt > 0 )
{
if ( p.overflow != 0 )
{
sqlite3_result_error( context, "integer overflow", -1 );
}
else if ( p.approx )
{
sqlite3_result_double( context, p.rSum );
}
else if ( p.iSum == 42 )
{
sqlite3_result_error( context, "x_count totals to 42", -1 );
}
else
{
sqlite3_result_int64( context, p.iSum );
}
}
}
#if SQLITE_MUTEX_W32
//---------------------WIN32 Definitions
static int GetCurrentThreadId()
{
return Thread.CurrentThread.ManagedThreadId;
}
static long InterlockedIncrement( long location )
{
Interlocked.Increment( ref location );
return location;
}
static void EnterCriticalSection( Object mtx )
{
//long mid = mtx.GetHashCode();
//int tid = Thread.CurrentThread.ManagedThreadId;
//long ticks = cnt++;
//Debug.WriteLine(String.Format( "{2}: +EnterCriticalSection; Mutex {0} Thread {1}", mtx.GetHashCode(), Thread.CurrentThread.ManagedThreadId, ticks) );
Monitor.Enter( mtx );
}
static void InitializeCriticalSection( Object mtx )
{
//Debug.WriteLine(String.Format( "{2}: +InitializeCriticalSection; Mutex {0} Thread {1}", mtx.GetHashCode(), Thread.CurrentThread.ManagedThreadId, System.DateTime.Now.Ticks ));
}
static void DeleteCriticalSection( Object mtx )
{
//Debug.WriteLine(String.Format( "{2}: +DeleteCriticalSection; Mutex {0} Thread {1}", mtx.GetHashCode(), Thread.CurrentThread.ManagedThreadId, System.DateTime.Now.Ticks) );
}
static void LeaveCriticalSection( Object mtx )
{
//Debug.WriteLine(String.Format("{2}: +LeaveCriticalSection; Mutex {0} Thread {1}", mtx.GetHashCode(), Thread.CurrentThread.ManagedThreadId, System.DateTime.Now.Ticks ));
Monitor.Exit( mtx );
}
#endif
// Miscellaneous Windows Constants
//#define ERROR_FILE_NOT_FOUND 2L
//#define ERROR_HANDLE_DISK_FULL 39L
//#define ERROR_NOT_SUPPORTED 50L
//#define ERROR_DISK_FULL 112L
const long ERROR_FILE_NOT_FOUND = 2L;
const long ERROR_HANDLE_DISK_FULL = 39L;
const long ERROR_NOT_SUPPORTED = 50L;
const long ERROR_DISK_FULL = 112L;
private class SQLite3UpperToLower
{
static int[] sqlite3UpperToLower = new int[] {
#if SQLITE_ASCII
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 97, 98, 99,100,101,102,103,
104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,
122, 91, 92, 93, 94, 95, 96, 97, 98, 99,100,101,102,103,104,105,106,107,
108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,
126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,
144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,
162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,
180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,
198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,
216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,
234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,
252,253,254,255
#endif
};
public int this[int index]
{
get
{
if ( index < sqlite3UpperToLower.Length )
return sqlite3UpperToLower[index];
else
return index;
}
}
public int this[u32 index]
{
get
{
if ( index < sqlite3UpperToLower.Length )
return sqlite3UpperToLower[index];
else
return (int)index;
}
}
}
static SQLite3UpperToLower sqlite3UpperToLower = new SQLite3UpperToLower();
static SQLite3UpperToLower UpperToLower = sqlite3UpperToLower;
}
}
| |
using NUnit.Framework;
using System;
using System.Collections.Generic;
using System.Linq;
using Umbraco.Core;
using Umbraco.Core.Exceptions;
using Umbraco.Core.Models;
using Umbraco.Core.Models.Rdbms;
using Umbraco.Core.Services;
using Umbraco.Tests.TestHelpers;
using Umbraco.Tests.TestHelpers.Entities;
namespace Umbraco.Tests.Services
{
[DatabaseTestBehavior(DatabaseBehavior.NewDbFileAndSchemaPerTest)]
[TestFixture, RequiresSTA]
public class ContentTypeServiceTests : BaseServiceTest
{
[SetUp]
public override void Initialize()
{
base.Initialize();
}
[TearDown]
public override void TearDown()
{
base.TearDown();
}
[Test]
public void Deleting_PropertyType_Removes_The_Property_From_Content()
{
IContentType contentType1 = MockedContentTypes.CreateTextpageContentType("test1", "Test1");
ServiceContext.ContentTypeService.Save(contentType1);
IContent contentItem = MockedContent.CreateTextpageContent(contentType1, "Testing", -1);
ServiceContext.ContentService.SaveAndPublishWithStatus(contentItem);
var initProps = contentItem.Properties.Count;
var initPropTypes = contentItem.PropertyTypes.Count();
//remove a property
contentType1.RemovePropertyType(contentType1.PropertyTypes.First().Alias);
ServiceContext.ContentTypeService.Save(contentType1);
//re-load it from the db
contentItem = ServiceContext.ContentService.GetById(contentItem.Id);
Assert.AreEqual(initPropTypes - 1, contentItem.PropertyTypes.Count());
Assert.AreEqual(initProps - 1, contentItem.Properties.Count);
}
[Test]
public void Rebuild_Content_Xml_On_Alias_Change()
{
var contentType1 = MockedContentTypes.CreateTextpageContentType("test1", "Test1");
var contentType2 = MockedContentTypes.CreateTextpageContentType("test2", "Test2");
ServiceContext.ContentTypeService.Save(contentType1);
ServiceContext.ContentTypeService.Save(contentType2);
var contentItems1 = MockedContent.CreateTextpageContent(contentType1, -1, 10).ToArray();
contentItems1.ForEach(x => ServiceContext.ContentService.SaveAndPublishWithStatus(x));
var contentItems2 = MockedContent.CreateTextpageContent(contentType2, -1, 5).ToArray();
contentItems2.ForEach(x => ServiceContext.ContentService.SaveAndPublishWithStatus(x));
//only update the contentType1 alias which will force an xml rebuild for all content of that type
contentType1.Alias = "newAlias";
ServiceContext.ContentTypeService.Save(contentType1);
foreach (var c in contentItems1)
{
var xml = DatabaseContext.Database.FirstOrDefault<ContentXmlDto>("WHERE nodeId = @Id", new { Id = c.Id });
Assert.IsNotNull(xml);
Assert.IsTrue(xml.Xml.StartsWith("<newAlias"));
}
foreach (var c in contentItems2)
{
var xml = DatabaseContext.Database.FirstOrDefault<ContentXmlDto>("WHERE nodeId = @Id", new { Id = c.Id });
Assert.IsNotNull(xml);
Assert.IsTrue(xml.Xml.StartsWith("<test2")); //should remain the same
}
}
[Test]
public void Rebuild_Content_Xml_On_Property_Removal()
{
var contentType1 = MockedContentTypes.CreateTextpageContentType("test1", "Test1");
ServiceContext.ContentTypeService.Save(contentType1);
var contentItems1 = MockedContent.CreateTextpageContent(contentType1, -1, 10).ToArray();
contentItems1.ForEach(x => ServiceContext.ContentService.SaveAndPublishWithStatus(x));
var alias = contentType1.PropertyTypes.First().Alias;
var elementToMatch = "<" + alias + ">";
foreach (var c in contentItems1)
{
var xml = DatabaseContext.Database.FirstOrDefault<ContentXmlDto>("WHERE nodeId = @Id", new { Id = c.Id });
Assert.IsNotNull(xml);
Assert.IsTrue(xml.Xml.Contains(elementToMatch)); //verify that it is there before we remove the property
}
//remove a property
contentType1.RemovePropertyType(contentType1.PropertyTypes.First().Alias);
ServiceContext.ContentTypeService.Save(contentType1);
var reQueried = ServiceContext.ContentTypeService.GetContentType(contentType1.Id);
var reContent = ServiceContext.ContentService.GetById(contentItems1.First().Id);
foreach (var c in contentItems1)
{
var xml = DatabaseContext.Database.FirstOrDefault<ContentXmlDto>("WHERE nodeId = @Id", new { Id = c.Id });
Assert.IsNotNull(xml);
Assert.IsFalse(xml.Xml.Contains(elementToMatch)); //verify that it is no longer there
}
}
[Test]
public void Get_Descendants()
{
// Arrange
var contentTypeService = ServiceContext.ContentTypeService;
var hierarchy = CreateContentTypeHierarchy();
contentTypeService.Save(hierarchy, 0); //ensure they are saved!
var master = hierarchy.First();
//Act
var descendants = master.Descendants();
//Assert
Assert.AreEqual(10, descendants.Count());
}
[Test]
public void Get_Descendants_And_Self()
{
// Arrange
var contentTypeService = ServiceContext.ContentTypeService;
var hierarchy = CreateContentTypeHierarchy();
contentTypeService.Save(hierarchy, 0); //ensure they are saved!
var master = hierarchy.First();
//Act
var descendants = master.DescendantsAndSelf();
//Assert
Assert.AreEqual(11, descendants.Count());
}
[Test]
public void Get_With_Missing_Guid()
{
// Arrange
var contentTypeService = ServiceContext.ContentTypeService;
//Act
var result = contentTypeService.GetMediaType(Guid.NewGuid());
//Assert
Assert.IsNull(result);
}
[Test]
public void Can_Bulk_Save_New_Hierarchy_Content_Types()
{
// Arrange
var contentTypeService = ServiceContext.ContentTypeService;
var hierarchy = CreateContentTypeHierarchy();
// Act
contentTypeService.Save(hierarchy, 0);
Assert.That(hierarchy.Any(), Is.True);
Assert.That(hierarchy.Any(x => x.HasIdentity == false), Is.False);
//all parent id's should be ok, they are lazy and if they equal zero an exception will be thrown
Assert.DoesNotThrow(() => hierarchy.Any(x => x.ParentId != 0));
for (var i = 0; i < hierarchy.Count(); i++)
{
if (i == 0) continue;
Assert.AreEqual(hierarchy.ElementAt(i).ParentId, hierarchy.ElementAt(i - 1).Id);
}
}
[Test]
public void Can_Save_ContentType_Structure_And_Create_Content_Based_On_It()
{
// Arrange
var cs = ServiceContext.ContentService;
var cts = ServiceContext.ContentTypeService;
var dtdYesNo = ServiceContext.DataTypeService.GetDataTypeDefinitionById(-49);
var ctBase = new ContentType(-1) { Name = "Base", Alias = "Base", Icon = "folder.gif", Thumbnail = "folder.png" };
ctBase.AddPropertyType(new PropertyType(dtdYesNo, Constants.Conventions.Content.NaviHide)
{
Name = "Hide From Navigation",
}
/*,"Navigation"*/);
cts.Save(ctBase);
const string contentTypeAlias = "HomePage";
var ctHomePage = new ContentType(ctBase, contentTypeAlias)
{
Name = "Home Page",
Alias = contentTypeAlias,
Icon = "settingDomain.gif",
Thumbnail = "folder.png",
AllowedAsRoot = true
};
ctHomePage.AddPropertyType(new PropertyType(dtdYesNo, "someProperty") { Name = "Some property" }
/*,"Navigation"*/);
cts.Save(ctHomePage);
// Act
var homeDoc = cs.CreateContent("Home Page", -1, contentTypeAlias);
cs.SaveAndPublishWithStatus(homeDoc);
// Assert
Assert.That(ctBase.HasIdentity, Is.True);
Assert.That(ctHomePage.HasIdentity, Is.True);
Assert.That(homeDoc.HasIdentity, Is.True);
Assert.That(homeDoc.ContentTypeId, Is.EqualTo(ctHomePage.Id));
}
[Test]
public void Create_Content_Type_Ensures_Sort_Orders()
{
var service = ServiceContext.ContentTypeService;
var contentType = new ContentType(-1)
{
Alias = "test",
Name = "Test",
Description = "ContentType used for simple text pages",
Icon = ".sprTreeDoc3",
Thumbnail = "doc2.png",
SortOrder = 1,
CreatorId = 0,
Trashed = false
};
contentType.AddPropertyType(new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title") { Name = "Title", Description = "", Mandatory = false, DataTypeDefinitionId = -88 });
contentType.AddPropertyType(new PropertyType(Constants.PropertyEditors.TinyMCEAlias, DataTypeDatabaseType.Ntext, "bodyText") { Name = "Body Text", Description = "", Mandatory = false, DataTypeDefinitionId = -87 });
contentType.AddPropertyType(new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author") { Name = "Author", Description = "Name of the author", Mandatory = false, DataTypeDefinitionId = -88 });
service.Save(contentType);
var sortOrders = contentType.PropertyTypes.Select(x => x.SortOrder).ToArray();
Assert.AreEqual(1, sortOrders.Count(x => x == 0));
Assert.AreEqual(1, sortOrders.Count(x => x == 1));
Assert.AreEqual(1, sortOrders.Count(x => x == 2));
}
[Test]
public void Can_Create_And_Save_ContentType_Composition()
{
/*
* Global
* - Components
* - Category
*/
var service = ServiceContext.ContentTypeService;
var global = MockedContentTypes.CreateSimpleContentType("global", "Global");
service.Save(global);
var components = MockedContentTypes.CreateSimpleContentType("components", "Components", global, true);
service.Save(components);
var component = MockedContentTypes.CreateSimpleContentType("component", "Component", components, true);
service.Save(component);
var category = MockedContentTypes.CreateSimpleContentType("category", "Category", global, true);
service.Save(category);
var success = category.AddContentType(component);
Assert.That(success, Is.False);
}
[Test]
public void Can_Delete_Parent_ContentType_When_Child_Has_Content()
{
var cts = ServiceContext.ContentTypeService;
var contentType = MockedContentTypes.CreateSimpleContentType("page", "Page", null, true);
cts.Save(contentType);
var childContentType = MockedContentTypes.CreateSimpleContentType("childPage", "Child Page", contentType, true, "Child Content");
cts.Save(childContentType);
var cs = ServiceContext.ContentService;
var content = cs.CreateContent("Page 1", -1, childContentType.Alias);
cs.Save(content);
cts.Delete(contentType);
Assert.IsNotNull(content.Id);
Assert.AreNotEqual(0, content.Id);
Assert.IsNotNull(childContentType.Id);
Assert.AreNotEqual(0, childContentType.Id);
Assert.IsNotNull(contentType.Id);
Assert.AreNotEqual(0, contentType.Id);
var deletedContent = cs.GetById(content.Id);
var deletedChildContentType = cts.GetContentType(childContentType.Id);
var deletedContentType = cts.GetContentType(contentType.Id);
Assert.IsNull(deletedChildContentType);
Assert.IsNull(deletedContent);
Assert.IsNull(deletedContentType);
}
[Test]
public void Can_Create_Container()
{
// Arrange
var cts = ServiceContext.ContentTypeService;
// Act
var container = new EntityContainer(Constants.ObjectTypes.DocumentTypeGuid);
container.Name = "container1";
cts.SaveContentTypeContainer(container);
// Assert
var createdContainer = cts.GetContentTypeContainer(container.Id);
Assert.IsNotNull(createdContainer);
}
[Test]
public void Can_Get_All_Containers()
{
// Arrange
var cts = ServiceContext.ContentTypeService;
// Act
var container1 = new EntityContainer(Constants.ObjectTypes.DocumentTypeGuid);
container1.Name = "container1";
cts.SaveContentTypeContainer(container1);
var container2 = new EntityContainer(Constants.ObjectTypes.DocumentTypeGuid);
container2.Name = "container2";
cts.SaveContentTypeContainer(container2);
// Assert
var containers = cts.GetContentTypeContainers(new int[0]);
Assert.AreEqual(2, containers.Count());
}
[Test]
public void Deleting_ContentType_Sends_Correct_Number_Of_DeletedEntities_In_Events()
{
var cts = ServiceContext.ContentTypeService;
var deletedEntities = 0;
var contentType = MockedContentTypes.CreateSimpleContentType("page", "Page");
cts.Save(contentType);
ContentTypeService.DeletedContentType += (sender, args) =>
{
deletedEntities += args.DeletedEntities.Count();
};
cts.Delete(contentType);
Assert.AreEqual(deletedEntities, 1);
}
[Test]
public void Deleting_Multiple_ContentTypes_Sends_Correct_Number_Of_DeletedEntities_In_Events()
{
var cts = ServiceContext.ContentTypeService;
var deletedEntities = 0;
var contentType = MockedContentTypes.CreateSimpleContentType("page", "Page");
cts.Save(contentType);
var contentType2 = MockedContentTypes.CreateSimpleContentType("otherPage", "Other page");
cts.Save(contentType2);
ContentTypeService.DeletedContentType += (sender, args) =>
{
deletedEntities += args.DeletedEntities.Count();
};
cts.Delete(contentType);
cts.Delete(contentType2);
Assert.AreEqual(2, deletedEntities);
}
[Test]
public void Deleting_ContentType_With_Child_Sends_Correct_Number_Of_DeletedEntities_In_Events()
{
var cts = ServiceContext.ContentTypeService;
var deletedEntities = 0;
var contentType = MockedContentTypes.CreateSimpleContentType("page", "Page");
cts.Save(contentType);
var contentType2 = MockedContentTypes.CreateSimpleContentType("subPage", "Sub page");
contentType2.ParentId = contentType.Id;
cts.Save(contentType2);
ContentTypeService.DeletedContentType += (sender, args) =>
{
deletedEntities += args.DeletedEntities.Count();
};
cts.Delete(contentType);
Assert.AreEqual(2, deletedEntities);
}
[Test]
public void Can_Remove_ContentType_Composition_From_ContentType()
{
//Test for U4-2234
var cts = ServiceContext.ContentTypeService;
//Arrange
var component = CreateComponent();
cts.Save(component);
var banner = CreateBannerComponent(component);
cts.Save(banner);
var site = CreateSite();
cts.Save(site);
var homepage = CreateHomepage(site);
cts.Save(homepage);
//Add banner to homepage
var added = homepage.AddContentType(banner);
cts.Save(homepage);
//Assert composition
var bannerExists = homepage.ContentTypeCompositionExists(banner.Alias);
var bannerPropertyExists = homepage.CompositionPropertyTypes.Any(x => x.Alias.Equals("bannerName"));
Assert.That(added, Is.True);
Assert.That(bannerExists, Is.True);
Assert.That(bannerPropertyExists, Is.True);
Assert.That(homepage.CompositionPropertyTypes.Count(), Is.EqualTo(6));
//Remove banner from homepage
var removed = homepage.RemoveContentType(banner.Alias);
cts.Save(homepage);
//Assert composition
var bannerStillExists = homepage.ContentTypeCompositionExists(banner.Alias);
var bannerPropertyStillExists = homepage.CompositionPropertyTypes.Any(x => x.Alias.Equals("bannerName"));
Assert.That(removed, Is.True);
Assert.That(bannerStillExists, Is.False);
Assert.That(bannerPropertyStillExists, Is.False);
Assert.That(homepage.CompositionPropertyTypes.Count(), Is.EqualTo(4));
}
[Test]
public void Can_Copy_ContentType_By_Performing_Clone()
{
// Arrange
var service = ServiceContext.ContentTypeService;
var metaContentType = MockedContentTypes.CreateMetaContentType();
service.Save(metaContentType);
var simpleContentType = MockedContentTypes.CreateSimpleContentType("category", "Category", metaContentType);
service.Save(simpleContentType);
var categoryId = simpleContentType.Id;
// Act
var sut = simpleContentType.DeepCloneWithResetIdentities("newcategory");
service.Save(sut);
// Assert
Assert.That(sut.HasIdentity, Is.True);
var contentType = service.GetContentType(sut.Id);
var category = service.GetContentType(categoryId);
Assert.That(contentType.CompositionAliases().Any(x => x.Equals("meta")), Is.True);
Assert.AreEqual(contentType.ParentId, category.ParentId);
Assert.AreEqual(contentType.Level, category.Level);
Assert.AreEqual(contentType.PropertyTypes.Count(), category.PropertyTypes.Count());
Assert.AreNotEqual(contentType.Id, category.Id);
Assert.AreNotEqual(contentType.Key, category.Key);
Assert.AreNotEqual(contentType.Path, category.Path);
Assert.AreNotEqual(contentType.SortOrder, category.SortOrder);
Assert.AreNotEqual(contentType.PropertyTypes.First(x => x.Alias.Equals("title")).Id, category.PropertyTypes.First(x => x.Alias.Equals("title")).Id);
Assert.AreNotEqual(contentType.PropertyGroups.First(x => x.Name.Equals("Content")).Id, category.PropertyGroups.First(x => x.Name.Equals("Content")).Id);
}
[Test]
public void Can_Copy_ContentType_To_New_Parent_By_Performing_Clone()
{
// Arrange
var service = ServiceContext.ContentTypeService;
var parentContentType1 = MockedContentTypes.CreateSimpleContentType("parent1", "Parent1");
service.Save(parentContentType1);
var parentContentType2 = MockedContentTypes.CreateSimpleContentType("parent2", "Parent2", null, true);
service.Save(parentContentType2);
var simpleContentType = MockedContentTypes.CreateSimpleContentType("category", "Category", parentContentType1, true);
service.Save(simpleContentType);
// Act
var clone = simpleContentType.DeepCloneWithResetIdentities("newcategory");
clone.RemoveContentType("parent1");
clone.AddContentType(parentContentType2);
clone.ParentId = parentContentType2.Id;
service.Save(clone);
// Assert
Assert.That(clone.HasIdentity, Is.True);
var clonedContentType = service.GetContentType(clone.Id);
var originalContentType = service.GetContentType(simpleContentType.Id);
Assert.That(clonedContentType.CompositionAliases().Any(x => x.Equals("parent2")), Is.True);
Assert.That(clonedContentType.CompositionAliases().Any(x => x.Equals("parent1")), Is.False);
Assert.AreEqual(clonedContentType.Path, "-1," + parentContentType2.Id + "," + clonedContentType.Id);
Assert.AreEqual(clonedContentType.PropertyTypes.Count(), originalContentType.PropertyTypes.Count());
Assert.AreNotEqual(clonedContentType.ParentId, originalContentType.ParentId);
Assert.AreEqual(clonedContentType.ParentId, parentContentType2.Id);
Assert.AreNotEqual(clonedContentType.Id, originalContentType.Id);
Assert.AreNotEqual(clonedContentType.Key, originalContentType.Key);
Assert.AreNotEqual(clonedContentType.Path, originalContentType.Path);
Assert.AreNotEqual(clonedContentType.PropertyTypes.First(x => x.Alias.StartsWith("title")).Id, originalContentType.PropertyTypes.First(x => x.Alias.StartsWith("title")).Id);
Assert.AreNotEqual(clonedContentType.PropertyGroups.First(x => x.Name.StartsWith("Content")).Id, originalContentType.PropertyGroups.First(x => x.Name.StartsWith("Content")).Id);
}
[Test]
public void Can_Copy_ContentType_With_Service_To_Root()
{
// Arrange
var service = ServiceContext.ContentTypeService;
var metaContentType = MockedContentTypes.CreateMetaContentType();
service.Save(metaContentType);
var simpleContentType = MockedContentTypes.CreateSimpleContentType("category", "Category", metaContentType);
service.Save(simpleContentType);
var categoryId = simpleContentType.Id;
// Act
var clone = service.Copy(simpleContentType, "newcategory", "new category");
// Assert
Assert.That(clone.HasIdentity, Is.True);
var cloned = service.GetContentType(clone.Id);
var original = service.GetContentType(categoryId);
Assert.That(cloned.CompositionAliases().Any(x => x.Equals("meta")), Is.False); //it's been copied to root
Assert.AreEqual(cloned.ParentId, -1);
Assert.AreEqual(cloned.Level, 1);
Assert.AreEqual(cloned.PropertyTypes.Count(), original.PropertyTypes.Count());
Assert.AreEqual(cloned.PropertyGroups.Count(), original.PropertyGroups.Count());
for (int i = 0; i < cloned.PropertyGroups.Count; i++)
{
Assert.AreEqual(cloned.PropertyGroups[i].PropertyTypes.Count, original.PropertyGroups[i].PropertyTypes.Count);
foreach (var propertyType in cloned.PropertyGroups[i].PropertyTypes)
{
Assert.IsTrue(propertyType.HasIdentity);
}
}
foreach (var propertyType in cloned.PropertyTypes)
{
Assert.IsTrue(propertyType.HasIdentity);
}
Assert.AreNotEqual(cloned.Id, original.Id);
Assert.AreNotEqual(cloned.Key, original.Key);
Assert.AreNotEqual(cloned.Path, original.Path);
Assert.AreNotEqual(cloned.SortOrder, original.SortOrder);
Assert.AreNotEqual(cloned.PropertyTypes.First(x => x.Alias.Equals("title")).Id, original.PropertyTypes.First(x => x.Alias.Equals("title")).Id);
Assert.AreNotEqual(cloned.PropertyGroups.First(x => x.Name.Equals("Content")).Id, original.PropertyGroups.First(x => x.Name.Equals("Content")).Id);
}
[Test]
public void Can_Copy_ContentType_To_New_Parent_With_Service()
{
// Arrange
var service = ServiceContext.ContentTypeService;
var parentContentType1 = MockedContentTypes.CreateSimpleContentType("parent1", "Parent1");
service.Save(parentContentType1);
var parentContentType2 = MockedContentTypes.CreateSimpleContentType("parent2", "Parent2", null, true);
service.Save(parentContentType2);
var simpleContentType = MockedContentTypes.CreateSimpleContentType("category", "Category", parentContentType1, true);
service.Save(simpleContentType);
// Act
var clone = service.Copy(simpleContentType, "newAlias", "new alias", parentContentType2);
// Assert
Assert.That(clone.HasIdentity, Is.True);
var clonedContentType = service.GetContentType(clone.Id);
var originalContentType = service.GetContentType(simpleContentType.Id);
Assert.That(clonedContentType.CompositionAliases().Any(x => x.Equals("parent2")), Is.True);
Assert.That(clonedContentType.CompositionAliases().Any(x => x.Equals("parent1")), Is.False);
Assert.AreEqual(clonedContentType.Path, "-1," + parentContentType2.Id + "," + clonedContentType.Id);
Assert.AreEqual(clonedContentType.PropertyTypes.Count(), originalContentType.PropertyTypes.Count());
Assert.AreNotEqual(clonedContentType.ParentId, originalContentType.ParentId);
Assert.AreEqual(clonedContentType.ParentId, parentContentType2.Id);
Assert.AreNotEqual(clonedContentType.Id, originalContentType.Id);
Assert.AreNotEqual(clonedContentType.Key, originalContentType.Key);
Assert.AreNotEqual(clonedContentType.Path, originalContentType.Path);
Assert.AreNotEqual(clonedContentType.PropertyTypes.First(x => x.Alias.StartsWith("title")).Id, originalContentType.PropertyTypes.First(x => x.Alias.StartsWith("title")).Id);
Assert.AreNotEqual(clonedContentType.PropertyGroups.First(x => x.Name.StartsWith("Content")).Id, originalContentType.PropertyGroups.First(x => x.Name.StartsWith("Content")).Id);
}
[Test]
public void Cannot_Add_Duplicate_PropertyType_Alias_To_Referenced_Composition()
{
//Related the second issue in screencast from this post http://issues.umbraco.org/issue/U4-5986
// Arrange
var service = ServiceContext.ContentTypeService;
var parent = MockedContentTypes.CreateSimpleContentType();
service.Save(parent);
var child = MockedContentTypes.CreateSimpleContentType("simpleChildPage", "Simple Child Page", parent, true);
service.Save(child);
var composition = MockedContentTypes.CreateMetaContentType();
service.Save(composition);
//Adding Meta-composition to child doc type
child.AddContentType(composition);
service.Save(child);
// Act
var duplicatePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title")
{
Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var added = composition.AddPropertyType(duplicatePropertyType, "Meta");
// Assert
Assert.That(added, Is.True);
Assert.Throws<InvalidCompositionException>(() => service.Save(composition));
Assert.DoesNotThrow(() => service.GetContentType("simpleChildPage"));
}
[Test]
public void Cannot_Add_Duplicate_PropertyType_Alias_In_Composition_Graph()
{
// Arrange
var service = ServiceContext.ContentTypeService;
var basePage = MockedContentTypes.CreateSimpleContentType("basePage", "Base Page", null, true);
service.Save(basePage);
var contentPage = MockedContentTypes.CreateSimpleContentType("contentPage", "Content Page", basePage);
service.Save(contentPage);
var advancedPage = MockedContentTypes.CreateSimpleContentType("advancedPage", "Advanced Page", contentPage, true);
service.Save(advancedPage);
var metaComposition = MockedContentTypes.CreateMetaContentType();
service.Save(metaComposition);
var seoComposition = MockedContentTypes.CreateSeoContentType();
service.Save(seoComposition);
var metaAdded = contentPage.AddContentType(metaComposition);
service.Save(contentPage);
var seoAdded = advancedPage.AddContentType(seoComposition);
service.Save(advancedPage);
// Act
var duplicatePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title")
{
Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var addedToBasePage = basePage.AddPropertyType(duplicatePropertyType, "Content");
var addedToAdvancedPage = advancedPage.AddPropertyType(duplicatePropertyType, "Content");
var addedToMeta = metaComposition.AddPropertyType(duplicatePropertyType, "Meta");
var addedToSeo = seoComposition.AddPropertyType(duplicatePropertyType, "Seo");
// Assert
Assert.That(metaAdded, Is.True);
Assert.That(seoAdded, Is.True);
Assert.That(addedToBasePage, Is.True);
Assert.That(addedToAdvancedPage, Is.False);
Assert.That(addedToMeta, Is.True);
Assert.That(addedToSeo, Is.True);
Assert.Throws<InvalidCompositionException>(() => service.Save(basePage));
Assert.Throws<InvalidCompositionException>(() => service.Save(metaComposition));
Assert.Throws<InvalidCompositionException>(() => service.Save(seoComposition));
Assert.DoesNotThrow(() => service.GetContentType("contentPage"));
Assert.DoesNotThrow(() => service.GetContentType("advancedPage"));
Assert.DoesNotThrow(() => service.GetContentType("meta"));
Assert.DoesNotThrow(() => service.GetContentType("seo"));
}
[Test]
public void Cannot_Add_Duplicate_PropertyType_Alias_At_Root_Which_Conflicts_With_Third_Levels_Composition()
{
/*
* BasePage, gets 'Title' added but should not be allowed
* -- Content Page
* ---- Advanced Page -> Content Meta
* Content Meta :: Composition, has 'Title'
*
* Content Meta has 'Title' PropertyType
* Adding 'Title' to BasePage should fail
*/
// Arrange
var service = ServiceContext.ContentTypeService;
var basePage = MockedContentTypes.CreateBasicContentType();
service.Save(basePage);
var contentPage = MockedContentTypes.CreateBasicContentType("contentPage", "Content Page", basePage);
service.Save(contentPage);
var advancedPage = MockedContentTypes.CreateBasicContentType("advancedPage", "Advanced Page", contentPage);
service.Save(advancedPage);
var contentMetaComposition = MockedContentTypes.CreateContentMetaContentType();
service.Save(contentMetaComposition);
// Act
var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText")
{
Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var bodyTextAdded = basePage.AddPropertyType(bodyTextPropertyType, "Content");
service.Save(basePage);
var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author")
{
Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var authorAdded = contentPage.AddPropertyType(authorPropertyType, "Content");
service.Save(contentPage);
var compositionAdded = advancedPage.AddContentType(contentMetaComposition);
service.Save(advancedPage);
//NOTE: It should not be possible to Save 'BasePage' with the Title PropertyType added
var titlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title")
{
Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var titleAdded = basePage.AddPropertyType(titlePropertyType, "Content");
// Assert
Assert.That(bodyTextAdded, Is.True);
Assert.That(authorAdded, Is.True);
Assert.That(titleAdded, Is.True);
Assert.That(compositionAdded, Is.True);
Assert.Throws<InvalidCompositionException>(() => service.Save(basePage));
Assert.DoesNotThrow(() => service.GetContentType("contentPage"));
Assert.DoesNotThrow(() => service.GetContentType("advancedPage"));
}
[Test]
public void Cannot_Save_ContentType_With_Empty_Name()
{
// Arrange
var contentType = MockedContentTypes.CreateSimpleContentType("contentType", string.Empty);
// Act & Assert
Assert.Throws<ArgumentException>(() => ServiceContext.ContentTypeService.Save(contentType));
}
[Test]
public void Cannot_Rename_PropertyType_Alias_On_Composition_Which_Would_Cause_Conflict_In_Other_Composition()
{
/*
* Meta renames alias to 'title'
* Seo has 'Title'
* BasePage
* -- ContentPage
* ---- AdvancedPage -> Seo
* ------ MoreAdvanedPage -> Meta
*/
// Arrange
var service = ServiceContext.ContentTypeService;
var basePage = MockedContentTypes.CreateBasicContentType();
service.Save(basePage);
var contentPage = MockedContentTypes.CreateBasicContentType("contentPage", "Content Page", basePage);
service.Save(contentPage);
var advancedPage = MockedContentTypes.CreateBasicContentType("advancedPage", "Advanced Page", contentPage);
service.Save(advancedPage);
var moreAdvancedPage = MockedContentTypes.CreateBasicContentType("moreAdvancedPage", "More Advanced Page", advancedPage);
service.Save(moreAdvancedPage);
var seoComposition = MockedContentTypes.CreateSeoContentType();
service.Save(seoComposition);
var metaComposition = MockedContentTypes.CreateMetaContentType();
service.Save(metaComposition);
// Act
var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText")
{
Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var bodyTextAdded = basePage.AddPropertyType(bodyTextPropertyType, "Content");
service.Save(basePage);
var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author")
{
Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var authorAdded = contentPage.AddPropertyType(authorPropertyType, "Content");
service.Save(contentPage);
var subtitlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "subtitle")
{
Name = "Subtitle", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var subtitleAdded = advancedPage.AddPropertyType(subtitlePropertyType, "Content");
service.Save(advancedPage);
var titlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title")
{
Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var titleAdded = seoComposition.AddPropertyType(titlePropertyType, "Content");
service.Save(seoComposition);
var seoCompositionAdded = advancedPage.AddContentType(seoComposition);
var metaCompositionAdded = moreAdvancedPage.AddContentType(metaComposition);
service.Save(advancedPage);
service.Save(moreAdvancedPage);
var keywordsPropertyType = metaComposition.PropertyTypes.First(x => x.Alias.Equals("metakeywords"));
keywordsPropertyType.Alias = "title";
// Assert
Assert.That(bodyTextAdded, Is.True);
Assert.That(subtitleAdded, Is.True);
Assert.That(authorAdded, Is.True);
Assert.That(titleAdded, Is.True);
Assert.That(seoCompositionAdded, Is.True);
Assert.That(metaCompositionAdded, Is.True);
Assert.Throws<InvalidCompositionException>(() => service.Save(metaComposition));
Assert.DoesNotThrow(() => service.GetContentType("contentPage"));
Assert.DoesNotThrow(() => service.GetContentType("advancedPage"));
Assert.DoesNotThrow(() => service.GetContentType("moreAdvancedPage"));
}
[Test]
public void Can_Add_Additional_Properties_On_Composition_Once_Composition_Has_Been_Saved()
{
/*
* Meta renames alias to 'title'
* Seo has 'Title'
* BasePage
* -- ContentPage
* ---- AdvancedPage -> Seo
* ------ MoreAdvancedPage -> Meta
*/
// Arrange
var service = ServiceContext.ContentTypeService;
var basePage = MockedContentTypes.CreateBasicContentType();
service.Save(basePage);
var contentPage = MockedContentTypes.CreateBasicContentType("contentPage", "Content Page", basePage);
service.Save(contentPage);
var advancedPage = MockedContentTypes.CreateBasicContentType("advancedPage", "Advanced Page", contentPage);
service.Save(advancedPage);
var moreAdvancedPage = MockedContentTypes.CreateBasicContentType("moreAdvancedPage", "More Advanced Page", advancedPage);
service.Save(moreAdvancedPage);
var seoComposition = MockedContentTypes.CreateSeoContentType();
service.Save(seoComposition);
var metaComposition = MockedContentTypes.CreateMetaContentType();
service.Save(metaComposition);
// Act
var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText")
{
Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var bodyTextAdded = basePage.AddPropertyType(bodyTextPropertyType, "Content");
service.Save(basePage);
var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author")
{
Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var authorAdded = contentPage.AddPropertyType(authorPropertyType, "Content");
service.Save(contentPage);
var subtitlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "subtitle")
{
Name = "Subtitle", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var subtitleAdded = advancedPage.AddPropertyType(subtitlePropertyType, "Content");
service.Save(advancedPage);
var titlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title")
{
Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var titleAdded = seoComposition.AddPropertyType(titlePropertyType, "Content");
service.Save(seoComposition);
var seoCompositionAdded = advancedPage.AddContentType(seoComposition);
var metaCompositionAdded = moreAdvancedPage.AddContentType(metaComposition);
service.Save(advancedPage);
service.Save(moreAdvancedPage);
// Assert
Assert.That(bodyTextAdded, Is.True);
Assert.That(subtitleAdded, Is.True);
Assert.That(authorAdded, Is.True);
Assert.That(titleAdded, Is.True);
Assert.That(seoCompositionAdded, Is.True);
Assert.That(metaCompositionAdded, Is.True);
var testPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "test")
{
Name = "Test", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var testAdded = seoComposition.AddPropertyType(testPropertyType, "Content");
service.Save(seoComposition);
Assert.That(testAdded, Is.True);
Assert.DoesNotThrow(() => service.GetContentType("contentPage"));
Assert.DoesNotThrow(() => service.GetContentType("advancedPage"));
Assert.DoesNotThrow(() => service.GetContentType("moreAdvancedPage"));
}
[Test]
public void Cannot_Rename_PropertyGroup_On_Child_Avoiding_Conflict_With_Parent_PropertyGroup()
{
// Arrange
var service = ServiceContext.ContentTypeService;
var page = MockedContentTypes.CreateSimpleContentType("page", "Page", null, true, "Content");
service.Save(page);
var contentPage = MockedContentTypes.CreateSimpleContentType("contentPage", "Content Page", page, true, "Content_");
service.Save(contentPage);
var advancedPage = MockedContentTypes.CreateSimpleContentType("advancedPage", "Advanced Page", contentPage, true, "Details");
service.Save(advancedPage);
var contentMetaComposition = MockedContentTypes.CreateContentMetaContentType();
service.Save(contentMetaComposition);
// Act
var subtitlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "subtitle")
{
Name = "Subtitle", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author")
{
Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var subtitleAdded = contentPage.AddPropertyType(subtitlePropertyType, "Content");
var authorAdded = contentPage.AddPropertyType(authorPropertyType, "Content");
service.Save(contentPage);
var compositionAdded = contentPage.AddContentType(contentMetaComposition);
service.Save(contentPage);
//Change the name of the tab on the "root" content type 'page'.
var propertyGroup = contentPage.PropertyGroups["Content_"];
Assert.Throws<Exception>(() => contentPage.PropertyGroups.Add(new PropertyGroup
{
Id = propertyGroup.Id,
Name = "Content",
SortOrder = 0
}));
// Assert
Assert.That(compositionAdded, Is.True);
Assert.That(subtitleAdded, Is.True);
Assert.That(authorAdded, Is.True);
Assert.DoesNotThrow(() => service.GetContentType("contentPage"));
Assert.DoesNotThrow(() => service.GetContentType("advancedPage"));
}
[Test]
public void Cannot_Rename_PropertyType_Alias_Causing_Conflicts_With_Parents()
{
// Arrange
var service = ServiceContext.ContentTypeService;
var basePage = MockedContentTypes.CreateBasicContentType();
service.Save(basePage);
var contentPage = MockedContentTypes.CreateBasicContentType("contentPage", "Content Page", basePage);
service.Save(contentPage);
var advancedPage = MockedContentTypes.CreateBasicContentType("advancedPage", "Advanced Page", contentPage);
service.Save(advancedPage);
// Act
var titlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title")
{
Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var titleAdded = basePage.AddPropertyType(titlePropertyType, "Content");
var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText")
{
Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var bodyTextAdded = contentPage.AddPropertyType(bodyTextPropertyType, "Content");
var subtitlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "subtitle")
{
Name = "Subtitle", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var subtitleAdded = contentPage.AddPropertyType(subtitlePropertyType, "Content");
var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author")
{
Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var authorAdded = advancedPage.AddPropertyType(authorPropertyType, "Content");
service.Save(basePage);
service.Save(contentPage);
service.Save(advancedPage);
//Rename the PropertyType to something that already exists in the Composition - NOTE this should not be allowed and Saving should throw an exception
var authorPropertyTypeToRename = advancedPage.PropertyTypes.First(x => x.Alias.Equals("author"));
authorPropertyTypeToRename.Alias = "title";
// Assert
Assert.That(bodyTextAdded, Is.True);
Assert.That(authorAdded, Is.True);
Assert.That(titleAdded, Is.True);
Assert.That(subtitleAdded, Is.True);
Assert.Throws<InvalidCompositionException>(() => service.Save(advancedPage));
Assert.DoesNotThrow(() => service.GetContentType("contentPage"));
Assert.DoesNotThrow(() => service.GetContentType("advancedPage"));
}
[Test]
public void Can_Add_PropertyType_Alias_Which_Exists_In_Composition_Outside_Graph()
{
/*
* Meta (Composition)
* Content Meta (Composition) has 'Title' -> Meta
* BasePage
* -- ContentPage gets 'Title' added -> Meta
* ---- Advanced Page
*/
// Arrange
var service = ServiceContext.ContentTypeService;
var basePage = MockedContentTypes.CreateSimpleContentType("basePage", "Base Page", null, true);
service.Save(basePage);
var contentPage = MockedContentTypes.CreateSimpleContentType("contentPage", "Content Page", basePage, true);
service.Save(contentPage);
var advancedPage = MockedContentTypes.CreateSimpleContentType("advancedPage", "Advanced Page", contentPage, true);
service.Save(advancedPage);
var metaComposition = MockedContentTypes.CreateMetaContentType();
service.Save(metaComposition);
var contentMetaComposition = MockedContentTypes.CreateContentMetaContentType();
service.Save(contentMetaComposition);
var metaAdded = contentPage.AddContentType(metaComposition);
service.Save(contentPage);
var metaAddedToComposition = contentMetaComposition.AddContentType(metaComposition);
service.Save(contentMetaComposition);
// Act
var propertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title")
{
Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var addedToContentPage = contentPage.AddPropertyType(propertyType, "Content");
// Assert
Assert.That(metaAdded, Is.True);
Assert.That(metaAddedToComposition, Is.True);
Assert.That(addedToContentPage, Is.True);
Assert.DoesNotThrow(() => service.Save(contentPage));
}
[Test]
public void Can_Rename_PropertyGroup_With_Inherited_PropertyGroups()
{
//Related the first issue in screencast from this post http://issues.umbraco.org/issue/U4-5986
// Arrange
var service = ServiceContext.ContentTypeService;
var page = MockedContentTypes.CreateSimpleContentType("page", "Page", null, false, "Content_");
service.Save(page);
var contentPage = MockedContentTypes.CreateSimpleContentType("contentPage", "Content Page", page, true);
service.Save(contentPage);
var composition = MockedContentTypes.CreateMetaContentType();
composition.AddPropertyGroup("Content");
service.Save(composition);
//Adding Meta-composition to child doc type
contentPage.AddContentType(composition);
service.Save(contentPage);
// Act
var propertyTypeOne = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "testTextbox")
{
Name = "Test Textbox", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var firstOneAdded = contentPage.AddPropertyType(propertyTypeOne, "Content_");
var propertyTypeTwo = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "anotherTextbox")
{
Name = "Another Test Textbox", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var secondOneAdded = contentPage.AddPropertyType(propertyTypeTwo, "Content");
service.Save(contentPage);
Assert.That(page.PropertyGroups.Contains("Content_"), Is.True);
var propertyGroup = page.PropertyGroups["Content_"];
page.PropertyGroups.Add(new PropertyGroup{ Id = propertyGroup.Id, Name = "ContentTab", SortOrder = 0});
service.Save(page);
// Assert
Assert.That(firstOneAdded, Is.True);
Assert.That(secondOneAdded, Is.True);
var contentType = service.GetContentType("contentPage");
Assert.That(contentType, Is.Not.Null);
var compositionPropertyGroups = contentType.CompositionPropertyGroups;
// now it is still 1, because we don't propagate renames anymore
Assert.That(compositionPropertyGroups.Count(x => x.Name.Equals("Content_")), Is.EqualTo(1));
var propertyTypeCount = contentType.PropertyTypes.Count();
var compPropertyTypeCount = contentType.CompositionPropertyTypes.Count();
Assert.That(propertyTypeCount, Is.EqualTo(5));
Assert.That(compPropertyTypeCount, Is.EqualTo(10));
}
[Test]
public void Can_Rename_PropertyGroup_On_Parent_Without_Causing_Duplicate_PropertyGroups()
{
// Arrange
var service = ServiceContext.ContentTypeService;
var page = MockedContentTypes.CreateSimpleContentType("page", "Page", null, true, "Content_");
service.Save(page);
var contentPage = MockedContentTypes.CreateSimpleContentType("contentPage", "Content Page", page, true, "Contentx");
service.Save(contentPage);
var advancedPage = MockedContentTypes.CreateSimpleContentType("advancedPage", "Advanced Page", contentPage, true, "Contenty");
service.Save(advancedPage);
var contentMetaComposition = MockedContentTypes.CreateContentMetaContentType();
service.Save(contentMetaComposition);
var compositionAdded = contentPage.AddContentType(contentMetaComposition);
service.Save(contentPage);
// Act
var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText")
{
Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var subtitlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "subtitle")
{
Name = "Subtitle", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var bodyTextAdded = contentPage.AddPropertyType(bodyTextPropertyType, "Content_");//Will be added to the parent tab
var subtitleAdded = contentPage.AddPropertyType(subtitlePropertyType, "Content");//Will be added to the "Content Meta" composition
service.Save(contentPage);
var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author")
{
Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var descriptionPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "description")
{
Name = "Description", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var keywordsPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "keywords")
{
Name = "Keywords", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var authorAdded = advancedPage.AddPropertyType(authorPropertyType, "Content_");//Will be added to an ancestor tab
var descriptionAdded = advancedPage.AddPropertyType(descriptionPropertyType, "Contentx");//Will be added to a parent tab
var keywordsAdded = advancedPage.AddPropertyType(keywordsPropertyType, "Content");//Will be added to the "Content Meta" composition
service.Save(advancedPage);
//Change the name of the tab on the "root" content type 'page'.
var propertyGroup = page.PropertyGroups["Content_"];
page.PropertyGroups.Add(new PropertyGroup { Id = propertyGroup.Id, Name = "Content", SortOrder = 0 });
service.Save(page);
// Assert
Assert.That(compositionAdded, Is.True);
Assert.That(bodyTextAdded, Is.True);
Assert.That(subtitleAdded, Is.True);
Assert.That(authorAdded, Is.True);
Assert.That(descriptionAdded, Is.True);
Assert.That(keywordsAdded, Is.True);
Assert.DoesNotThrow(() => service.GetContentType("contentPage"));
Assert.DoesNotThrow(() => service.GetContentType("advancedPage"));
var advancedPageReloaded = service.GetContentType("advancedPage");
var contentUnderscoreTabExists = advancedPageReloaded.CompositionPropertyGroups.Any(x => x.Name.Equals("Content_"));
// now is true, because we don't propagate renames anymore
Assert.That(contentUnderscoreTabExists, Is.True);
var numberOfContentTabs = advancedPageReloaded.CompositionPropertyGroups.Count(x => x.Name.Equals("Content"));
Assert.That(numberOfContentTabs, Is.EqualTo(4));
}
[Test]
public void Can_Rename_PropertyGroup_On_Parent_Without_Causing_Duplicate_PropertyGroups_v2()
{
// Arrange
var service = ServiceContext.ContentTypeService;
var page = MockedContentTypes.CreateSimpleContentType("page", "Page", null, true, "Content_");
service.Save(page);
var contentPage = MockedContentTypes.CreateSimpleContentType("contentPage", "Content Page", page, true, "Content");
service.Save(contentPage);
var contentMetaComposition = MockedContentTypes.CreateContentMetaContentType();
service.Save(contentMetaComposition);
// Act
var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText")
{
Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var subtitlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "subtitle")
{
Name = "Subtitle", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author")
{
Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var bodyTextAdded = page.AddPropertyType(bodyTextPropertyType, "Content_");
var subtitleAdded = contentPage.AddPropertyType(subtitlePropertyType, "Content");
var authorAdded = contentPage.AddPropertyType(authorPropertyType, "Content_");
service.Save(page);
service.Save(contentPage);
var compositionAdded = contentPage.AddContentType(contentMetaComposition);
service.Save(contentPage);
//Change the name of the tab on the "root" content type 'page'.
var propertyGroup = page.PropertyGroups["Content_"];
page.PropertyGroups.Add(new PropertyGroup { Id = propertyGroup.Id, Name = "Content", SortOrder = 0 });
service.Save(page);
// Assert
Assert.That(compositionAdded, Is.True);
Assert.That(bodyTextAdded, Is.True);
Assert.That(subtitleAdded, Is.True);
Assert.That(authorAdded, Is.True);
Assert.DoesNotThrow(() => service.GetContentType("contentPage"));
}
[Test]
public void Can_Remove_PropertyGroup_On_Parent_Without_Causing_Duplicate_PropertyGroups()
{
// Arrange
var service = ServiceContext.ContentTypeService;
var basePage = MockedContentTypes.CreateBasicContentType();
service.Save(basePage);
var contentPage = MockedContentTypes.CreateBasicContentType("contentPage", "Content Page", basePage);
service.Save(contentPage);
var advancedPage = MockedContentTypes.CreateBasicContentType("advancedPage", "Advanced Page", contentPage);
service.Save(advancedPage);
var contentMetaComposition = MockedContentTypes.CreateContentMetaContentType();
service.Save(contentMetaComposition);
// Act
var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText")
{
Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var bodyTextAdded = basePage.AddPropertyType(bodyTextPropertyType, "Content");
service.Save(basePage);
var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author")
{
Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var authorAdded = contentPage.AddPropertyType(authorPropertyType, "Content");
service.Save(contentPage);
var compositionAdded = contentPage.AddContentType(contentMetaComposition);
service.Save(contentPage);
basePage.RemovePropertyGroup("Content");
service.Save(basePage);
// Assert
Assert.That(bodyTextAdded, Is.True);
Assert.That(authorAdded, Is.True);
Assert.That(compositionAdded, Is.True);
Assert.DoesNotThrow(() => service.GetContentType("contentPage"));
Assert.DoesNotThrow(() => service.GetContentType("advancedPage"));
var contentType = service.GetContentType("contentPage");
var propertyGroup = contentType.PropertyGroups["Content"];
}
[Test]
public void Can_Remove_PropertyGroup_Without_Removing_Property_Types()
{
var service = ServiceContext.ContentTypeService;
var basePage = (IContentType)MockedContentTypes.CreateBasicContentType();
basePage.AddPropertyGroup("Content");
basePage.AddPropertyGroup("Meta");
service.Save(basePage);
var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author")
{
Name = "Author",
Description = "",
Mandatory = false,
SortOrder = 1,
DataTypeDefinitionId = -88
};
var authorAdded = basePage.AddPropertyType(authorPropertyType, "Content");
var titlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title")
{
Name = "Title",
Description = "",
Mandatory = false,
SortOrder = 1,
DataTypeDefinitionId = -88
};
var titleAdded = basePage.AddPropertyType(authorPropertyType, "Meta");
service.Save(basePage);
basePage = service.GetContentType(basePage.Id);
var totalPt = basePage.PropertyTypes.Count();
basePage.RemovePropertyGroup("Content");
service.Save(basePage);
basePage = service.GetContentType(basePage.Id);
Assert.AreEqual(totalPt, basePage.PropertyTypes.Count());
}
[Test]
public void Can_Add_PropertyGroup_With_Same_Name_On_Parent_and_Child()
{
/*
* BasePage
* - Content Page
* -- Advanced Page
* Content Meta :: Composition
*/
// Arrange
var service = ServiceContext.ContentTypeService;
var basePage = MockedContentTypes.CreateBasicContentType();
service.Save(basePage);
var contentPage = MockedContentTypes.CreateBasicContentType("contentPage", "Content Page", basePage);
service.Save(contentPage);
var advancedPage = MockedContentTypes.CreateBasicContentType("advancedPage", "Advanced Page", contentPage);
service.Save(advancedPage);
var contentMetaComposition = MockedContentTypes.CreateContentMetaContentType();
service.Save(contentMetaComposition);
// Act
var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author")
{
Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var authorAdded = contentPage.AddPropertyType(authorPropertyType, "Content");
service.Save(contentPage);
var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText")
{
Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var bodyTextAdded = basePage.AddPropertyType(bodyTextPropertyType, "Content");
service.Save(basePage);
var compositionAdded = contentPage.AddContentType(contentMetaComposition);
service.Save(contentPage);
// Assert
Assert.That(bodyTextAdded, Is.True);
Assert.That(authorAdded, Is.True);
Assert.That(compositionAdded, Is.True);
Assert.DoesNotThrow(() => service.GetContentType("contentPage"));
Assert.DoesNotThrow(() => service.GetContentType("advancedPage"));
var contentType = service.GetContentType("contentPage");
var propertyGroup = contentType.PropertyGroups["Content"];
var numberOfContentTabs = contentType.CompositionPropertyGroups.Count(x => x.Name.Equals("Content"));
Assert.That(numberOfContentTabs, Is.EqualTo(3));
//Ensure that adding a new PropertyType to the "Content"-tab also adds it to the right group
var descriptionPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext)
{
Alias = "description", Name = "Description", Description = "", Mandatory = false, SortOrder = 1,DataTypeDefinitionId = -88
};
var descriptionAdded = contentType.AddPropertyType(descriptionPropertyType, "Content");
service.Save(contentType);
Assert.That(descriptionAdded, Is.True);
var contentPageReloaded = service.GetContentType("contentPage");
var propertyGroupReloaded = contentPageReloaded.PropertyGroups["Content"];
var hasDescriptionPropertyType = propertyGroupReloaded.PropertyTypes.Contains("description");
Assert.That(hasDescriptionPropertyType, Is.True);
var descriptionPropertyTypeReloaded = propertyGroupReloaded.PropertyTypes["description"];
Assert.That(descriptionPropertyTypeReloaded.PropertyGroupId.IsValueCreated, Is.False);
}
[Test]
public void Empty_Description_Is_Always_Null_After_Saving_Content_Type()
{
var service = ServiceContext.ContentTypeService;
var contentType = MockedContentTypes.CreateBasicContentType();
contentType.Description = null;
service.Save(contentType);
var contentType2 = MockedContentTypes.CreateBasicContentType("basePage2", "Base Page 2");
contentType2.Description = string.Empty;
service.Save(contentType2);
Assert.IsNull(contentType.Description);
Assert.IsNull(contentType2.Description);
}
[Test]
public void Empty_Description_Is_Always_Null_After_Saving_Media_Type()
{
var service = ServiceContext.ContentTypeService;
var mediaType = MockedContentTypes.CreateSimpleMediaType("mediaType", "Media Type");
mediaType.Description = null;
service.Save(mediaType);
var mediaType2 = MockedContentTypes.CreateSimpleMediaType("mediaType2", "Media Type 2");
mediaType2.Description = string.Empty;
service.Save(mediaType2);
Assert.IsNull(mediaType.Description);
Assert.IsNull(mediaType2.Description);
}
private ContentType CreateComponent()
{
var component = new ContentType(-1)
{
Alias = "component",
Name = "Component",
Description = "ContentType used for Component grouping",
Icon = ".sprTreeDoc3",
Thumbnail = "doc.png",
SortOrder = 1,
CreatorId = 0,
Trashed = false
};
var contentCollection = new PropertyTypeCollection();
contentCollection.Add(new PropertyType("test", DataTypeDatabaseType.Ntext, "componentGroup") { Name = "Component Group", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 });
component.PropertyGroups.Add(new PropertyGroup(contentCollection) { Name = "Component", SortOrder = 1 });
return component;
}
private ContentType CreateBannerComponent(ContentType parent)
{
const string contentTypeAlias = "banner";
var banner = new ContentType(parent, contentTypeAlias)
{
Alias = contentTypeAlias,
Name = "Banner Component",
Description = "ContentType used for Banner Component",
Icon = ".sprTreeDoc3",
Thumbnail = "doc.png",
SortOrder = 1,
CreatorId = 0,
Trashed = false
};
var propertyType = new PropertyType("test", DataTypeDatabaseType.Ntext, "bannerName")
{
Name = "Banner Name",
Description = "",
Mandatory = false,
SortOrder = 2,
DataTypeDefinitionId = -88
};
banner.AddPropertyType(propertyType, "Component");
return banner;
}
private ContentType CreateSite()
{
var site = new ContentType(-1)
{
Alias = "site",
Name = "Site",
Description = "ContentType used for Site inheritence",
Icon = ".sprTreeDoc3",
Thumbnail = "doc.png",
SortOrder = 2,
CreatorId = 0,
Trashed = false
};
var contentCollection = new PropertyTypeCollection();
contentCollection.Add(new PropertyType("test", DataTypeDatabaseType.Ntext, "hostname") { Name = "Hostname", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 });
site.PropertyGroups.Add(new PropertyGroup(contentCollection) { Name = "Site Settings", SortOrder = 1 });
return site;
}
private ContentType CreateHomepage(ContentType parent)
{
const string contentTypeAlias = "homepage";
var contentType = new ContentType(parent, contentTypeAlias)
{
Alias = contentTypeAlias,
Name = "Homepage",
Description = "ContentType used for the Homepage",
Icon = ".sprTreeDoc3",
Thumbnail = "doc.png",
SortOrder = 1,
CreatorId = 0,
Trashed = false
};
var contentCollection = new PropertyTypeCollection();
contentCollection.Add(new PropertyType("test", DataTypeDatabaseType.Ntext, "title") { Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 });
contentCollection.Add(new PropertyType("test", DataTypeDatabaseType.Ntext, "bodyText") { Name = "Body Text", Description = "", Mandatory = false, SortOrder = 2, DataTypeDefinitionId = -87 });
contentCollection.Add(new PropertyType("test", DataTypeDatabaseType.Ntext, "author") { Name = "Author", Description = "Name of the author", Mandatory = false, SortOrder = 3, DataTypeDefinitionId = -88 });
contentType.PropertyGroups.Add(new PropertyGroup(contentCollection) { Name = "Content", SortOrder = 1 });
return contentType;
}
private IContentType[] CreateContentTypeHierarchy()
{
//create the master type
var masterContentType = MockedContentTypes.CreateSimpleContentType("masterContentType", "MasterContentType");
masterContentType.Key = new Guid("C00CA18E-5A9D-483B-A371-EECE0D89B4AE");
ServiceContext.ContentTypeService.Save(masterContentType);
//add the one we just created
var list = new List<IContentType> { masterContentType };
for (var i = 0; i < 10; i++)
{
var contentType = MockedContentTypes.CreateSimpleContentType("childType" + i, "ChildType" + i,
//make the last entry in the list, this one's parent
list.Last(), true);
list.Add(contentType);
}
return list.ToArray();
}
}
}
| |
using System;
#nullable enable
public class StressTest
{
int ch;
int Ors()
{
if (ch >= '0' && ch <= '9'
|| ch >= 'A' && ch <= 'Z'
|| ch == '_'
|| ch >= 'a' && ch <= 'z'
|| ch == 170
|| ch == 181
|| ch == 186
|| ch >= 192 && ch <= 214
|| ch >= 216 && ch <= 246
|| ch >= 248 && ch <= 705
|| ch >= 710 && ch <= 721
|| ch >= 736 && ch <= 740
|| ch == 748
|| ch == 750
|| ch >= 768 && ch <= 884
|| ch >= 886 && ch <= 887
|| ch >= 890 && ch <= 893
|| ch == 902
|| ch >= 904 && ch <= 906
|| ch == 908
|| ch >= 910 && ch <= 929
|| ch >= 931 && ch <= 1013
|| ch >= 1015 && ch <= 1153
|| ch >= 1155 && ch <= 1159
|| ch >= 1162 && ch <= 1319
|| ch >= 1329 && ch <= 1366
|| ch == 1369
|| ch >= 1377 && ch <= 1415
|| ch >= 1425 && ch <= 1469
|| ch == 1471
|| ch >= 1473 && ch <= 1474
|| ch >= 1476 && ch <= 1477
|| ch == 1479
|| ch >= 1488 && ch <= 1514
|| ch >= 1520 && ch <= 1522
|| ch >= 1552 && ch <= 1562
|| ch >= 1568 && ch <= 1641
|| ch >= 1646 && ch <= 1747
|| ch >= 1749 && ch <= 1756
|| ch >= 1759 && ch <= 1768
|| ch >= 1770 && ch <= 1788
|| ch == 1791
|| ch >= 1808 && ch <= 1866
|| ch >= 1869 && ch <= 1969
|| ch >= 1984 && ch <= 2037
|| ch == 2042
|| ch >= 2048 && ch <= 2093
|| ch >= 2112 && ch <= 2139
|| ch == 2208
|| ch >= 2210 && ch <= 2220
|| ch >= 2276 && ch <= 2302
|| ch >= 2304 && ch <= 2403
|| ch >= 2406 && ch <= 2415
|| ch >= 2417 && ch <= 2423
|| ch >= 2425 && ch <= 2431
|| ch >= 2433 && ch <= 2435
|| ch >= 2437 && ch <= 2444
|| ch >= 2447 && ch <= 2448
|| ch >= 2451 && ch <= 2472
|| ch >= 2474 && ch <= 2480
|| ch == 2482
|| ch >= 2486 && ch <= 2489
|| ch >= 2492 && ch <= 2500
|| ch >= 2503 && ch <= 2504
|| ch >= 2507 && ch <= 2510
|| ch == 2519
|| ch >= 2524 && ch <= 2525
|| ch >= 2527 && ch <= 2531
|| ch >= 2534 && ch <= 2545
|| ch >= 2561 && ch <= 2563
|| ch >= 2565 && ch <= 2570
|| ch >= 2575 && ch <= 2576
|| ch >= 2579 && ch <= 2600
|| ch >= 2602 && ch <= 2608
|| ch >= 2610 && ch <= 2611
|| ch >= 2613 && ch <= 2614
|| ch >= 2616 && ch <= 2617
|| ch == 2620
|| ch >= 2622 && ch <= 2626
|| ch >= 2631 && ch <= 2632
|| ch >= 2635 && ch <= 2637
|| ch == 2641
|| ch >= 2649 && ch <= 2652
|| ch == 2654
|| ch >= 2662 && ch <= 2677
|| ch >= 2689 && ch <= 2691
|| ch >= 2693 && ch <= 2701
|| ch >= 2703 && ch <= 2705
|| ch >= 2707 && ch <= 2728
|| ch >= 2730 && ch <= 2736
|| ch >= 2738 && ch <= 2739
|| ch >= 2741 && ch <= 2745
|| ch >= 2748 && ch <= 2757
|| ch >= 2759 && ch <= 2761
|| ch >= 2763 && ch <= 2765
|| ch == 2768
|| ch >= 2784 && ch <= 2787
|| ch >= 2790 && ch <= 2799
|| ch >= 2817 && ch <= 2819
|| ch >= 2821 && ch <= 2828
|| ch >= 2831 && ch <= 2832
|| ch >= 2835 && ch <= 2856
|| ch >= 2858 && ch <= 2864
|| ch >= 2866 && ch <= 2867
|| ch >= 2869 && ch <= 2873
|| ch >= 2876 && ch <= 2884
|| ch >= 2887 && ch <= 2888
|| ch >= 2891 && ch <= 2893
|| ch >= 2902 && ch <= 2903
|| ch >= 2908 && ch <= 2909
|| ch >= 2911 && ch <= 2915
|| ch >= 2918 && ch <= 2927
|| ch == 2929
|| ch >= 2946 && ch <= 2947
|| ch >= 2949 && ch <= 2954
|| ch >= 2958 && ch <= 2960
|| ch >= 2962 && ch <= 2965
|| ch >= 2969 && ch <= 2970
|| ch == 2972
|| ch >= 2974 && ch <= 2975
|| ch >= 2979 && ch <= 2980
|| ch >= 2984 && ch <= 2986
|| ch >= 2990 && ch <= 3001
|| ch >= 3006 && ch <= 3010
|| ch >= 3014 && ch <= 3016
|| ch >= 3018 && ch <= 3021
|| ch == 3024
|| ch == 3031
|| ch >= 3046 && ch <= 3055
|| ch >= 3073 && ch <= 3075
|| ch >= 3077 && ch <= 3084
|| ch >= 3086 && ch <= 3088
|| ch >= 3090 && ch <= 3112
|| ch >= 3114 && ch <= 3123
|| ch >= 3125 && ch <= 3129
|| ch >= 3133 && ch <= 3140
|| ch >= 3142 && ch <= 3144
|| ch >= 3146 && ch <= 3149
|| ch >= 3157 && ch <= 3158
|| ch >= 3160 && ch <= 3161
|| ch >= 3168 && ch <= 3171
|| ch >= 3174 && ch <= 3183
|| ch >= 3202 && ch <= 3203
|| ch >= 3205 && ch <= 3212
|| ch >= 3214 && ch <= 3216
|| ch >= 3218 && ch <= 3240
|| ch >= 3242 && ch <= 3251
|| ch >= 3253 && ch <= 3257
|| ch >= 3260 && ch <= 3268
|| ch >= 3270 && ch <= 3272
|| ch >= 3274 && ch <= 3277
|| ch >= 3285 && ch <= 3286
|| ch == 3294
|| ch >= 3296 && ch <= 3299
|| ch >= 3302 && ch <= 3311
|| ch >= 3313 && ch <= 3314
|| ch >= 3330 && ch <= 3331
|| ch >= 3333 && ch <= 3340
|| ch >= 3342 && ch <= 3344
|| ch >= 3346 && ch <= 3386
|| ch >= 3389 && ch <= 3396
|| ch >= 3398 && ch <= 3400
|| ch >= 3402 && ch <= 3406
|| ch == 3415
|| ch >= 3424 && ch <= 3427
|| ch >= 3430 && ch <= 3439
|| ch >= 3450 && ch <= 3455
|| ch >= 3458 && ch <= 3459
|| ch >= 3461 && ch <= 3478
|| ch >= 3482 && ch <= 3505
|| ch >= 3507 && ch <= 3515
|| ch == 3517
|| ch >= 3520 && ch <= 3526
|| ch == 3530
|| ch >= 3535 && ch <= 3540
|| ch == 3542
|| ch >= 3544 && ch <= 3551
|| ch >= 3570 && ch <= 3571
|| ch >= 3585 && ch <= 3642
|| ch >= 3648 && ch <= 3662
|| ch >= 3664 && ch <= 3673
|| ch >= 3713 && ch <= 3714
|| ch == 3716
|| ch >= 3719 && ch <= 3720
|| ch == 3722
|| ch == 3725
|| ch >= 3732 && ch <= 3735
|| ch >= 3737 && ch <= 3743
|| ch >= 3745 && ch <= 3747
|| ch == 3749
|| ch == 3751
|| ch >= 3754 && ch <= 3755
|| ch >= 3757 && ch <= 3769
|| ch >= 3771 && ch <= 3773
|| ch >= 3776 && ch <= 3780
|| ch == 3782
|| ch >= 3784 && ch <= 3789
|| ch >= 3792 && ch <= 3801
|| ch >= 3804 && ch <= 3807
|| ch == 3840
|| ch >= 3864 && ch <= 3865
|| ch >= 3872 && ch <= 3881
|| ch == 3893
|| ch == 3895
|| ch == 3897
|| ch >= 3902 && ch <= 3911
|| ch >= 3913 && ch <= 3948
|| ch >= 3953 && ch <= 3972
|| ch >= 3974 && ch <= 3991
|| ch >= 3993 && ch <= 4028
|| ch == 4038
|| ch >= 4096 && ch <= 4169
|| ch >= 4176 && ch <= 4253
|| ch >= 4256 && ch <= 4293
|| ch == 4295
|| ch == 4301
|| ch >= 4304 && ch <= 4346
|| ch >= 4348 && ch <= 4680
|| ch >= 4682 && ch <= 4685
|| ch >= 4688 && ch <= 4694
|| ch == 4696
|| ch >= 4698 && ch <= 4701
|| ch >= 4704 && ch <= 4744
|| ch >= 4746 && ch <= 4749
|| ch >= 4752 && ch <= 4784
|| ch >= 4786 && ch <= 4789
|| ch >= 4792 && ch <= 4798
|| ch == 4800
|| ch >= 4802 && ch <= 4805
|| ch >= 4808 && ch <= 4822
|| ch >= 4824 && ch <= 4880
|| ch >= 4882 && ch <= 4885
|| ch >= 4888 && ch <= 4954
|| ch >= 4957 && ch <= 4959
|| ch >= 4992 && ch <= 5007
|| ch >= 5024 && ch <= 5108
|| ch >= 5121 && ch <= 5740
|| ch >= 5743 && ch <= 5759
|| ch >= 5761 && ch <= 5786
|| ch >= 5792 && ch <= 5866
|| ch >= 5870 && ch <= 5872
|| ch >= 5888 && ch <= 5900
|| ch >= 5902 && ch <= 5908
|| ch >= 5920 && ch <= 5940
|| ch >= 5952 && ch <= 5971
|| ch >= 5984 && ch <= 5996
|| ch >= 5998 && ch <= 6000
|| ch >= 6002 && ch <= 6003
|| ch >= 6016 && ch <= 6099
|| ch == 6103
|| ch >= 6108 && ch <= 6109
|| ch >= 6112 && ch <= 6121
|| ch >= 6155 && ch <= 6157
|| ch >= 6160 && ch <= 6169
|| ch >= 6176 && ch <= 6263
|| ch >= 6272 && ch <= 6314
|| ch >= 6320 && ch <= 6389
|| ch >= 6400 && ch <= 6428
|| ch >= 6432 && ch <= 6443
|| ch >= 6448 && ch <= 6459
|| ch >= 6470 && ch <= 6509
|| ch >= 6512 && ch <= 6516
|| ch >= 6528 && ch <= 6571
|| ch >= 6576 && ch <= 6601
|| ch >= 6608 && ch <= 6617
|| ch >= 6656 && ch <= 6683
|| ch >= 6688 && ch <= 6750
|| ch >= 6752 && ch <= 6780
|| ch >= 6783 && ch <= 6793
|| ch >= 6800 && ch <= 6809
|| ch == 6823
|| ch >= 6912 && ch <= 6987
|| ch >= 6992 && ch <= 7001
|| ch >= 7019 && ch <= 7027
|| ch >= 7040 && ch <= 7155
|| ch >= 7168 && ch <= 7223
|| ch >= 7232 && ch <= 7241
|| ch >= 7245 && ch <= 7293
|| ch >= 7376 && ch <= 7378
|| ch >= 7380 && ch <= 7414
|| ch >= 7424 && ch <= 7654
|| ch >= 7676 && ch <= 7957
|| ch >= 7960 && ch <= 7965
|| ch >= 7968 && ch <= 8005
|| ch >= 8008 && ch <= 8013
|| ch >= 8016 && ch <= 8023
|| ch == 8025
|| ch == 8027
|| ch == 8029
|| ch >= 8031 && ch <= 8061
|| ch >= 8064 && ch <= 8116
|| ch >= 8118 && ch <= 8124
|| ch == 8126
|| ch >= 8130 && ch <= 8132
|| ch >= 8134 && ch <= 8140
|| ch >= 8144 && ch <= 8147
|| ch >= 8150 && ch <= 8155
|| ch >= 8160 && ch <= 8172
|| ch >= 8178 && ch <= 8180
|| ch >= 8182 && ch <= 8188
|| ch >= 8204 && ch <= 8205
|| ch >= 8255 && ch <= 8256
|| ch == 8276
|| ch == 8305
|| ch == 8319
|| ch >= 8336 && ch <= 8348
|| ch >= 8400 && ch <= 8412
|| ch == 8417
|| ch >= 8421 && ch <= 8432
|| ch == 8450
|| ch == 8455
|| ch >= 8458 && ch <= 8467
|| ch == 8469
|| ch >= 8473 && ch <= 8477
|| ch == 8484
|| ch == 8486
|| ch == 8488
|| ch >= 8490 && ch <= 8493
|| ch >= 8495 && ch <= 8505
|| ch >= 8508 && ch <= 8511
|| ch >= 8517 && ch <= 8521
|| ch == 8526
|| ch >= 8544 && ch <= 8584
|| ch >= 11264 && ch <= 11310
|| ch >= 11312 && ch <= 11358
|| ch >= 11360 && ch <= 11492
|| ch >= 11499 && ch <= 11507
|| ch >= 11520 && ch <= 11557
|| ch == 11559
|| ch == 11565
|| ch >= 11568 && ch <= 11623
|| ch == 11631
|| ch >= 11647 && ch <= 11670
|| ch >= 11680 && ch <= 11686
|| ch >= 11688 && ch <= 11694
|| ch >= 11696 && ch <= 11702
|| ch >= 11704 && ch <= 11710
|| ch >= 11712 && ch <= 11718
|| ch >= 11720 && ch <= 11726
|| ch >= 11728 && ch <= 11734
|| ch >= 11736 && ch <= 11742
|| ch >= 11744 && ch <= 11775
|| ch == 11823
|| ch >= 12293 && ch <= 12295
|| ch >= 12321 && ch <= 12335
|| ch >= 12337 && ch <= 12341
|| ch >= 12344 && ch <= 12348
|| ch >= 12353 && ch <= 12438
|| ch >= 12441 && ch <= 12442
|| ch >= 12445 && ch <= 12447
|| ch >= 12449 && ch <= 12538
|| ch >= 12540 && ch <= 12543
|| ch >= 12549 && ch <= 12589
|| ch >= 12593 && ch <= 12686
|| ch >= 12704 && ch <= 12730
|| ch >= 12784 && ch <= 12799
|| ch >= 13312 && ch <= 19893
|| ch >= 19968 && ch <= 40908
|| ch >= 40960 && ch <= 42124
|| ch >= 42192 && ch <= 42237
|| ch >= 42240 && ch <= 42508
|| ch >= 42512 && ch <= 42539
|| ch >= 42560 && ch <= 42607
|| ch >= 42612 && ch <= 42621
|| ch >= 42623 && ch <= 42647
|| ch >= 42655 && ch <= 42737
|| ch >= 42775 && ch <= 42783
|| ch >= 42786 && ch <= 42888
|| ch >= 42891 && ch <= 42894
|| ch >= 42896 && ch <= 42899
|| ch >= 42912 && ch <= 42922
|| ch >= 43000 && ch <= 43047
|| ch >= 43072 && ch <= 43123
|| ch >= 43136 && ch <= 43204
|| ch >= 43216 && ch <= 43225
|| ch >= 43232 && ch <= 43255
|| ch == 43259
|| ch >= 43264 && ch <= 43309
|| ch >= 43312 && ch <= 43347
|| ch >= 43360 && ch <= 43388
|| ch >= 43392 && ch <= 43456
|| ch >= 43471 && ch <= 43481
|| ch >= 43520 && ch <= 43574
|| ch >= 43584 && ch <= 43597
|| ch >= 43600 && ch <= 43609
|| ch >= 43616 && ch <= 43638
|| ch >= 43642 && ch <= 43643
|| ch >= 43648 && ch <= 43714
|| ch >= 43739 && ch <= 43741
|| ch >= 43744 && ch <= 43759
|| ch >= 43762 && ch <= 43766
|| ch >= 43777 && ch <= 43782
|| ch >= 43785 && ch <= 43790
|| ch >= 43793 && ch <= 43798
|| ch >= 43808 && ch <= 43814
|| ch >= 43816 && ch <= 43822
|| ch >= 43968 && ch <= 44010
|| ch >= 44012 && ch <= 44013
|| ch >= 44016 && ch <= 44025
|| ch >= 44032 && ch <= 55203
|| ch >= 55216 && ch <= 55238
|| ch >= 55243 && ch <= 55291
|| ch >= 63744 && ch <= 64109
|| ch >= 64112 && ch <= 64217
|| ch >= 64256 && ch <= 64262
|| ch >= 64275 && ch <= 64279
|| ch >= 64285 && ch <= 64296
|| ch >= 64298 && ch <= 64310
|| ch >= 64312 && ch <= 64316
|| ch == 64318
|| ch >= 64320 && ch <= 64321
|| ch >= 64323 && ch <= 64324
|| ch >= 64326 && ch <= 64433
|| ch >= 64467 && ch <= 64829
|| ch >= 64848 && ch <= 64911
|| ch >= 64914 && ch <= 64967
|| ch >= 65008 && ch <= 65019
|| ch >= 65024 && ch <= 65039
|| ch >= 65056 && ch <= 65062
|| ch >= 65075 && ch <= 65076
|| ch >= 65101 && ch <= 65103
|| ch >= 65136 && ch <= 65140
|| ch >= 65142 && ch <= 65276
|| ch >= 65296 && ch <= 65305
|| ch >= 65313 && ch <= 65338
|| ch == 65343
|| ch >= 65345 && ch <= 65370
|| ch >= 65382 && ch <= 65470
|| ch >= 65474 && ch <= 65479
|| ch >= 65482 && ch <= 65487
|| ch >= 65490 && ch <= 65495
|| ch >= 65498 && ch <= 65500) { return ch; }
else { return ch + 1; }
}
}
| |
/*
* Copyright (c) Citrix Systems, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1) Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2) Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using CookComputing.XmlRpc;
namespace XenAPI
{
/// <summary>
/// Pool-wide updates to the host software
/// First published in .
/// </summary>
public partial class Pool_update : XenObject<Pool_update>
{
public Pool_update()
{
}
public Pool_update(string uuid,
string name_label,
string name_description,
long installation_size,
string key,
List<update_after_apply_guidance> after_apply_guidance,
XenRef<VDI> vdi,
List<XenRef<Host>> hosts)
{
this.uuid = uuid;
this.name_label = name_label;
this.name_description = name_description;
this.installation_size = installation_size;
this.key = key;
this.after_apply_guidance = after_apply_guidance;
this.vdi = vdi;
this.hosts = hosts;
}
/// <summary>
/// Creates a new Pool_update from a Proxy_Pool_update.
/// </summary>
/// <param name="proxy"></param>
public Pool_update(Proxy_Pool_update proxy)
{
this.UpdateFromProxy(proxy);
}
public override void UpdateFrom(Pool_update update)
{
uuid = update.uuid;
name_label = update.name_label;
name_description = update.name_description;
installation_size = update.installation_size;
key = update.key;
after_apply_guidance = update.after_apply_guidance;
vdi = update.vdi;
hosts = update.hosts;
}
internal void UpdateFromProxy(Proxy_Pool_update proxy)
{
uuid = proxy.uuid == null ? null : (string)proxy.uuid;
name_label = proxy.name_label == null ? null : (string)proxy.name_label;
name_description = proxy.name_description == null ? null : (string)proxy.name_description;
installation_size = proxy.installation_size == null ? 0 : long.Parse((string)proxy.installation_size);
key = proxy.key == null ? null : (string)proxy.key;
after_apply_guidance = proxy.after_apply_guidance == null ? null : Helper.StringArrayToEnumList<update_after_apply_guidance>(proxy.after_apply_guidance);
vdi = proxy.vdi == null ? null : XenRef<VDI>.Create(proxy.vdi);
hosts = proxy.hosts == null ? null : XenRef<Host>.Create(proxy.hosts);
}
public Proxy_Pool_update ToProxy()
{
Proxy_Pool_update result_ = new Proxy_Pool_update();
result_.uuid = (uuid != null) ? uuid : "";
result_.name_label = (name_label != null) ? name_label : "";
result_.name_description = (name_description != null) ? name_description : "";
result_.installation_size = installation_size.ToString();
result_.key = (key != null) ? key : "";
result_.after_apply_guidance = (after_apply_guidance != null) ? Helper.ObjectListToStringArray(after_apply_guidance) : new string[] {};
result_.vdi = (vdi != null) ? vdi : "";
result_.hosts = (hosts != null) ? Helper.RefListToStringArray(hosts) : new string[] {};
return result_;
}
/// <summary>
/// Creates a new Pool_update from a Hashtable.
/// </summary>
/// <param name="table"></param>
public Pool_update(Hashtable table)
{
uuid = Marshalling.ParseString(table, "uuid");
name_label = Marshalling.ParseString(table, "name_label");
name_description = Marshalling.ParseString(table, "name_description");
installation_size = Marshalling.ParseLong(table, "installation_size");
key = Marshalling.ParseString(table, "key");
after_apply_guidance = Helper.StringArrayToEnumList<update_after_apply_guidance>(Marshalling.ParseStringArray(table, "after_apply_guidance"));
vdi = Marshalling.ParseRef<VDI>(table, "vdi");
hosts = Marshalling.ParseSetRef<Host>(table, "hosts");
}
public bool DeepEquals(Pool_update other)
{
if (ReferenceEquals(null, other))
return false;
if (ReferenceEquals(this, other))
return true;
return Helper.AreEqual2(this._uuid, other._uuid) &&
Helper.AreEqual2(this._name_label, other._name_label) &&
Helper.AreEqual2(this._name_description, other._name_description) &&
Helper.AreEqual2(this._installation_size, other._installation_size) &&
Helper.AreEqual2(this._key, other._key) &&
Helper.AreEqual2(this._after_apply_guidance, other._after_apply_guidance) &&
Helper.AreEqual2(this._vdi, other._vdi) &&
Helper.AreEqual2(this._hosts, other._hosts);
}
public override string SaveChanges(Session session, string opaqueRef, Pool_update server)
{
if (opaqueRef == null)
{
System.Diagnostics.Debug.Assert(false, "Cannot create instances of this type on the server");
return "";
}
else
{
throw new InvalidOperationException("This type has no read/write properties");
}
}
/// <summary>
/// Get a record containing the current state of the given pool_update.
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_pool_update">The opaque_ref of the given pool_update</param>
public static Pool_update get_record(Session session, string _pool_update)
{
return new Pool_update((Proxy_Pool_update)session.proxy.pool_update_get_record(session.uuid, (_pool_update != null) ? _pool_update : "").parse());
}
/// <summary>
/// Get a reference to the pool_update instance with the specified UUID.
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_uuid">UUID of object to return</param>
public static XenRef<Pool_update> get_by_uuid(Session session, string _uuid)
{
return XenRef<Pool_update>.Create(session.proxy.pool_update_get_by_uuid(session.uuid, (_uuid != null) ? _uuid : "").parse());
}
/// <summary>
/// Get all the pool_update instances with the given label.
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_label">label of object to return</param>
public static List<XenRef<Pool_update>> get_by_name_label(Session session, string _label)
{
return XenRef<Pool_update>.Create(session.proxy.pool_update_get_by_name_label(session.uuid, (_label != null) ? _label : "").parse());
}
/// <summary>
/// Get the uuid field of the given pool_update.
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_pool_update">The opaque_ref of the given pool_update</param>
public static string get_uuid(Session session, string _pool_update)
{
return (string)session.proxy.pool_update_get_uuid(session.uuid, (_pool_update != null) ? _pool_update : "").parse();
}
/// <summary>
/// Get the name/label field of the given pool_update.
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_pool_update">The opaque_ref of the given pool_update</param>
public static string get_name_label(Session session, string _pool_update)
{
return (string)session.proxy.pool_update_get_name_label(session.uuid, (_pool_update != null) ? _pool_update : "").parse();
}
/// <summary>
/// Get the name/description field of the given pool_update.
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_pool_update">The opaque_ref of the given pool_update</param>
public static string get_name_description(Session session, string _pool_update)
{
return (string)session.proxy.pool_update_get_name_description(session.uuid, (_pool_update != null) ? _pool_update : "").parse();
}
/// <summary>
/// Get the installation_size field of the given pool_update.
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_pool_update">The opaque_ref of the given pool_update</param>
public static long get_installation_size(Session session, string _pool_update)
{
return long.Parse((string)session.proxy.pool_update_get_installation_size(session.uuid, (_pool_update != null) ? _pool_update : "").parse());
}
/// <summary>
/// Get the key field of the given pool_update.
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_pool_update">The opaque_ref of the given pool_update</param>
public static string get_key(Session session, string _pool_update)
{
return (string)session.proxy.pool_update_get_key(session.uuid, (_pool_update != null) ? _pool_update : "").parse();
}
/// <summary>
/// Get the after_apply_guidance field of the given pool_update.
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_pool_update">The opaque_ref of the given pool_update</param>
public static List<update_after_apply_guidance> get_after_apply_guidance(Session session, string _pool_update)
{
return Helper.StringArrayToEnumList<update_after_apply_guidance>(session.proxy.pool_update_get_after_apply_guidance(session.uuid, (_pool_update != null) ? _pool_update : "").parse());
}
/// <summary>
/// Get the vdi field of the given pool_update.
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_pool_update">The opaque_ref of the given pool_update</param>
public static XenRef<VDI> get_vdi(Session session, string _pool_update)
{
return XenRef<VDI>.Create(session.proxy.pool_update_get_vdi(session.uuid, (_pool_update != null) ? _pool_update : "").parse());
}
/// <summary>
/// Get the hosts field of the given pool_update.
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_pool_update">The opaque_ref of the given pool_update</param>
public static List<XenRef<Host>> get_hosts(Session session, string _pool_update)
{
return XenRef<Host>.Create(session.proxy.pool_update_get_hosts(session.uuid, (_pool_update != null) ? _pool_update : "").parse());
}
/// <summary>
/// Introduce update VDI
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vdi">The VDI which contains a software update.</param>
public static XenRef<Pool_update> introduce(Session session, string _vdi)
{
return XenRef<Pool_update>.Create(session.proxy.pool_update_introduce(session.uuid, (_vdi != null) ? _vdi : "").parse());
}
/// <summary>
/// Introduce update VDI
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vdi">The VDI which contains a software update.</param>
public static XenRef<Task> async_introduce(Session session, string _vdi)
{
return XenRef<Task>.Create(session.proxy.async_pool_update_introduce(session.uuid, (_vdi != null) ? _vdi : "").parse());
}
/// <summary>
/// Execute the precheck stage of the selected update on a host
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_pool_update">The opaque_ref of the given pool_update</param>
/// <param name="_host">The host to run the prechecks on.</param>
public static livepatch_status precheck(Session session, string _pool_update, string _host)
{
return (livepatch_status)Helper.EnumParseDefault(typeof(livepatch_status), (string)session.proxy.pool_update_precheck(session.uuid, (_pool_update != null) ? _pool_update : "", (_host != null) ? _host : "").parse());
}
/// <summary>
/// Execute the precheck stage of the selected update on a host
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_pool_update">The opaque_ref of the given pool_update</param>
/// <param name="_host">The host to run the prechecks on.</param>
public static XenRef<Task> async_precheck(Session session, string _pool_update, string _host)
{
return XenRef<Task>.Create(session.proxy.async_pool_update_precheck(session.uuid, (_pool_update != null) ? _pool_update : "", (_host != null) ? _host : "").parse());
}
/// <summary>
/// Apply the selected update to a host
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_pool_update">The opaque_ref of the given pool_update</param>
/// <param name="_host">The host to apply the update to.</param>
public static void apply(Session session, string _pool_update, string _host)
{
session.proxy.pool_update_apply(session.uuid, (_pool_update != null) ? _pool_update : "", (_host != null) ? _host : "").parse();
}
/// <summary>
/// Apply the selected update to a host
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_pool_update">The opaque_ref of the given pool_update</param>
/// <param name="_host">The host to apply the update to.</param>
public static XenRef<Task> async_apply(Session session, string _pool_update, string _host)
{
return XenRef<Task>.Create(session.proxy.async_pool_update_apply(session.uuid, (_pool_update != null) ? _pool_update : "", (_host != null) ? _host : "").parse());
}
/// <summary>
/// Apply the selected update to all hosts in the pool
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_pool_update">The opaque_ref of the given pool_update</param>
public static void pool_apply(Session session, string _pool_update)
{
session.proxy.pool_update_pool_apply(session.uuid, (_pool_update != null) ? _pool_update : "").parse();
}
/// <summary>
/// Apply the selected update to all hosts in the pool
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_pool_update">The opaque_ref of the given pool_update</param>
public static XenRef<Task> async_pool_apply(Session session, string _pool_update)
{
return XenRef<Task>.Create(session.proxy.async_pool_update_pool_apply(session.uuid, (_pool_update != null) ? _pool_update : "").parse());
}
/// <summary>
/// Removes the update's files from all hosts in the pool, but does not revert the update
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_pool_update">The opaque_ref of the given pool_update</param>
public static void pool_clean(Session session, string _pool_update)
{
session.proxy.pool_update_pool_clean(session.uuid, (_pool_update != null) ? _pool_update : "").parse();
}
/// <summary>
/// Removes the update's files from all hosts in the pool, but does not revert the update
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_pool_update">The opaque_ref of the given pool_update</param>
public static XenRef<Task> async_pool_clean(Session session, string _pool_update)
{
return XenRef<Task>.Create(session.proxy.async_pool_update_pool_clean(session.uuid, (_pool_update != null) ? _pool_update : "").parse());
}
/// <summary>
/// Removes the database entry. Only works on unapplied update.
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_pool_update">The opaque_ref of the given pool_update</param>
public static void destroy(Session session, string _pool_update)
{
session.proxy.pool_update_destroy(session.uuid, (_pool_update != null) ? _pool_update : "").parse();
}
/// <summary>
/// Removes the database entry. Only works on unapplied update.
/// First published in .
/// </summary>
/// <param name="session">The session</param>
/// <param name="_pool_update">The opaque_ref of the given pool_update</param>
public static XenRef<Task> async_destroy(Session session, string _pool_update)
{
return XenRef<Task>.Create(session.proxy.async_pool_update_destroy(session.uuid, (_pool_update != null) ? _pool_update : "").parse());
}
/// <summary>
/// Return a list of all the pool_updates known to the system.
/// First published in .
/// </summary>
/// <param name="session">The session</param>
public static List<XenRef<Pool_update>> get_all(Session session)
{
return XenRef<Pool_update>.Create(session.proxy.pool_update_get_all(session.uuid).parse());
}
/// <summary>
/// Get all the pool_update Records at once, in a single XML RPC call
/// First published in .
/// </summary>
/// <param name="session">The session</param>
public static Dictionary<XenRef<Pool_update>, Pool_update> get_all_records(Session session)
{
return XenRef<Pool_update>.Create<Proxy_Pool_update>(session.proxy.pool_update_get_all_records(session.uuid).parse());
}
/// <summary>
/// Unique identifier/object reference
/// </summary>
public virtual string uuid
{
get { return _uuid; }
set
{
if (!Helper.AreEqual(value, _uuid))
{
_uuid = value;
Changed = true;
NotifyPropertyChanged("uuid");
}
}
}
private string _uuid;
/// <summary>
/// a human-readable name
/// </summary>
public virtual string name_label
{
get { return _name_label; }
set
{
if (!Helper.AreEqual(value, _name_label))
{
_name_label = value;
Changed = true;
NotifyPropertyChanged("name_label");
}
}
}
private string _name_label;
/// <summary>
/// a notes field containing human-readable description
/// </summary>
public virtual string name_description
{
get { return _name_description; }
set
{
if (!Helper.AreEqual(value, _name_description))
{
_name_description = value;
Changed = true;
NotifyPropertyChanged("name_description");
}
}
}
private string _name_description;
/// <summary>
/// Size of the update in bytes
/// </summary>
public virtual long installation_size
{
get { return _installation_size; }
set
{
if (!Helper.AreEqual(value, _installation_size))
{
_installation_size = value;
Changed = true;
NotifyPropertyChanged("installation_size");
}
}
}
private long _installation_size;
/// <summary>
/// GPG key of the update
/// </summary>
public virtual string key
{
get { return _key; }
set
{
if (!Helper.AreEqual(value, _key))
{
_key = value;
Changed = true;
NotifyPropertyChanged("key");
}
}
}
private string _key;
/// <summary>
/// What the client should do after this update has been applied.
/// </summary>
public virtual List<update_after_apply_guidance> after_apply_guidance
{
get { return _after_apply_guidance; }
set
{
if (!Helper.AreEqual(value, _after_apply_guidance))
{
_after_apply_guidance = value;
Changed = true;
NotifyPropertyChanged("after_apply_guidance");
}
}
}
private List<update_after_apply_guidance> _after_apply_guidance;
/// <summary>
/// VDI the update was uploaded to
/// </summary>
public virtual XenRef<VDI> vdi
{
get { return _vdi; }
set
{
if (!Helper.AreEqual(value, _vdi))
{
_vdi = value;
Changed = true;
NotifyPropertyChanged("vdi");
}
}
}
private XenRef<VDI> _vdi;
/// <summary>
/// The hosts that have applied this update.
/// </summary>
public virtual List<XenRef<Host>> hosts
{
get { return _hosts; }
set
{
if (!Helper.AreEqual(value, _hosts))
{
_hosts = value;
Changed = true;
NotifyPropertyChanged("hosts");
}
}
}
private List<XenRef<Host>> _hosts;
}
}
| |
// Copyright (c) Charlie Poole, Rob Prouse and Contributors. MIT License - see LICENSE.txt
#nullable enable
using System;
namespace NUnit.Framework
{
public abstract partial class Assert
{
#region IsAssignableFrom
/// <summary>
/// Asserts that an object may be assigned a value of a given Type. Returns without throwing an exception when
/// inside a multiple assert block.
/// </summary>
/// <param name="expected">The expected Type.</param>
/// <param name="actual">The object under examination</param>
/// <param name="message">The message to display in case of failure</param>
/// <param name="args">Array of objects to be used in formatting the message</param>
public static void IsAssignableFrom(Type expected, object? actual, string? message, params object?[]? args)
{
Assert.That(actual, Is.AssignableFrom(expected) ,message, args);
}
/// <summary>
/// Asserts that an object may be assigned a value of a given Type. Returns without throwing an exception when
/// inside a multiple assert block.
/// </summary>
/// <param name="expected">The expected Type.</param>
/// <param name="actual">The object under examination</param>
public static void IsAssignableFrom(Type expected, object? actual)
{
Assert.That(actual, Is.AssignableFrom(expected) ,null, null);
}
#endregion
#region IsAssignableFrom<TExpected>
/// <summary>
/// Asserts that an object may be assigned a value of a given Type. Returns without throwing an exception when
/// inside a multiple assert block.
/// </summary>
/// <typeparam name="TExpected">The expected Type.</typeparam>
/// <param name="actual">The object under examination</param>
/// <param name="message">The message to display in case of failure</param>
/// <param name="args">Array of objects to be used in formatting the message</param>
public static void IsAssignableFrom<TExpected>(object? actual, string? message, params object?[]? args)
{
Assert.That(actual, Is.AssignableFrom(typeof(TExpected)) ,message, args);
}
/// <summary>
/// Asserts that an object may be assigned a value of a given Type. Returns without throwing an exception when
/// inside a multiple assert block.
/// </summary>
/// <typeparam name="TExpected">The expected Type.</typeparam>
/// <param name="actual">The object under examination</param>
public static void IsAssignableFrom<TExpected>(object? actual)
{
Assert.That(actual, Is.AssignableFrom(typeof(TExpected)) ,null, null);
}
#endregion
#region IsNotAssignableFrom
/// <summary>
/// Asserts that an object may not be assigned a value of a given Type. Returns without throwing an exception
/// when inside a multiple assert block.
/// </summary>
/// <param name="expected">The expected Type.</param>
/// <param name="actual">The object under examination</param>
/// <param name="message">The message to display in case of failure</param>
/// <param name="args">Array of objects to be used in formatting the message</param>
public static void IsNotAssignableFrom(Type expected, object? actual, string? message, params object?[]? args)
{
Assert.That(actual, Is.Not.AssignableFrom(expected) ,message, args);
}
/// <summary>
/// Asserts that an object may not be assigned a value of a given Type. Returns without throwing an exception
/// when inside a multiple assert block.
/// </summary>
/// <param name="expected">The expected Type.</param>
/// <param name="actual">The object under examination</param>
public static void IsNotAssignableFrom(Type expected, object? actual)
{
Assert.That(actual, Is.Not.AssignableFrom(expected) ,null, null);
}
#endregion
#region IsNotAssignableFrom<TExpected>
/// <summary>
/// Asserts that an object may not be assigned a value of a given Type. Returns without throwing an exception
/// when inside a multiple assert block.
/// </summary>
/// <typeparam name="TExpected">The expected Type.</typeparam>
/// <param name="actual">The object under examination</param>
/// <param name="message">The message to display in case of failure</param>
/// <param name="args">Array of objects to be used in formatting the message</param>
public static void IsNotAssignableFrom<TExpected>(object? actual, string? message, params object?[]? args)
{
Assert.That(actual, Is.Not.AssignableFrom(typeof(TExpected)) ,message, args);
}
/// <summary>
/// Asserts that an object may not be assigned a value of a given Type. Returns without throwing an exception
/// when inside a multiple assert block.
/// </summary>
/// <typeparam name="TExpected">The expected Type.</typeparam>
/// <param name="actual">The object under examination</param>
public static void IsNotAssignableFrom<TExpected>(object? actual)
{
Assert.That(actual, Is.Not.AssignableFrom(typeof(TExpected)) ,null, null);
}
#endregion
#region IsInstanceOf
/// <summary>
/// Asserts that an object is an instance of a given type. Returns without throwing an exception when inside a
/// multiple assert block.
/// </summary>
/// <param name="expected">The expected Type</param>
/// <param name="actual">The object being examined</param>
/// <param name="message">The message to display in case of failure</param>
/// <param name="args">Array of objects to be used in formatting the message</param>
public static void IsInstanceOf(Type expected, object? actual, string? message, params object?[]? args)
{
Assert.That(actual, Is.InstanceOf(expected) ,message, args);
}
/// <summary>
/// Asserts that an object is an instance of a given type. Returns without throwing an exception when inside a
/// multiple assert block.
/// </summary>
/// <param name="expected">The expected Type</param>
/// <param name="actual">The object being examined</param>
public static void IsInstanceOf(Type expected, object? actual)
{
Assert.That(actual, Is.InstanceOf(expected) ,null, null);
}
#endregion
#region IsInstanceOf<TExpected>
/// <summary>
/// Asserts that an object is an instance of a given type. Returns without throwing an exception when inside a
/// multiple assert block.
/// </summary>
/// <typeparam name="TExpected">The expected Type</typeparam>
/// <param name="actual">The object being examined</param>
/// <param name="message">The message to display in case of failure</param>
/// <param name="args">Array of objects to be used in formatting the message</param>
public static void IsInstanceOf<TExpected>(object? actual, string? message, params object?[]? args)
{
Assert.That(actual, Is.InstanceOf(typeof(TExpected)) ,message, args);
}
/// <summary>
/// Asserts that an object is an instance of a given type. Returns without throwing an exception when inside a
/// multiple assert block.
/// </summary>
/// <typeparam name="TExpected">The expected Type</typeparam>
/// <param name="actual">The object being examined</param>
public static void IsInstanceOf<TExpected>(object? actual)
{
Assert.That(actual, Is.InstanceOf(typeof(TExpected)) ,null, null);
}
#endregion
#region IsNotInstanceOf
/// <summary>
/// Asserts that an object is not an instance of a given type. Returns without throwing an exception when inside
/// a multiple assert block.
/// </summary>
/// <param name="expected">The expected Type</param>
/// <param name="actual">The object being examined</param>
/// <param name="message">The message to display in case of failure</param>
/// <param name="args">Array of objects to be used in formatting the message</param>
public static void IsNotInstanceOf(Type expected, object? actual, string? message, params object?[]? args)
{
Assert.That(actual, Is.Not.InstanceOf(expected) ,message, args);
}
/// <summary>
/// Asserts that an object is not an instance of a given type. Returns without throwing an exception when inside
/// a multiple assert block.
/// </summary>
/// <param name="expected">The expected Type</param>
/// <param name="actual">The object being examined</param>
public static void IsNotInstanceOf(Type expected, object? actual)
{
Assert.That(actual, Is.Not.InstanceOf(expected) ,null, null);
}
#endregion
#region IsNotInstanceOf<TExpected>
/// <summary>
/// Asserts that an object is not an instance of a given type. Returns without throwing an exception when inside
/// a multiple assert block.
/// </summary>
/// <typeparam name="TExpected">The expected Type</typeparam>
/// <param name="actual">The object being examined</param>
/// <param name="message">The message to display in case of failure</param>
/// <param name="args">Array of objects to be used in formatting the message</param>
public static void IsNotInstanceOf<TExpected>(object? actual, string? message, params object?[]? args)
{
Assert.That(actual, Is.Not.InstanceOf(typeof(TExpected)) ,message, args);
}
/// <summary>
/// Asserts that an object is not an instance of a given type. Returns without throwing an exception when inside
/// a multiple assert block.
/// </summary>
/// <typeparam name="TExpected">The expected Type</typeparam>
/// <param name="actual">The object being examined</param>
public static void IsNotInstanceOf<TExpected>(object? actual)
{
Assert.That(actual, Is.Not.InstanceOf(typeof(TExpected)) ,null, null);
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using Foundation;
using ObjCRuntime;
using FM;
using FM.IceLink;
using FM.IceLink.WebRTC;
using Xamarin.iOS.VP8;
namespace Xamarin.Forms.Conference.WebRTC.iOS.VP8
{
/// <summary>
/// VP8 codec wrapper for Xamarin.iOS.
/// </summary>
class Vp8Codec : VideoCodec
{
private CocoaVp8Encoder _Encoder;
private CocoaVp8Decoder _Decoder;
private Vp8Padep _Padep;
/// <summary>
/// Initializes a new instance of the <see cref="Vp8Codec"/> class.
/// </summary>
public Vp8Codec()
{
_Padep = new Vp8Padep();
}
/// <summary>
/// Encodes a frame.
/// </summary>
/// <param name="frame">The frame.</param>
/// <returns></returns>
public override byte[] Encode(VideoBuffer frame)
{
if (_Encoder == null)
{
_Encoder = new CocoaVp8Encoder();
_Encoder.Quality = 0.5;
_Encoder.Bitrate = 320;
//_Encoder.Scale = 1.0;
}
if (frame.ResetKeyFrame)
{
_Encoder.SendKeyframe();
}
using (var pool = new NSAutoreleasePool())
{
VideoPlane planeY = frame.Planes[0];
VideoPlane planeU = frame.Planes[1];
VideoPlane planeV = frame.Planes[2];
GCHandle planeYDataHandle = GCHandle.Alloc(planeY.Data, GCHandleType.Pinned);
GCHandle planeUDataHandle = GCHandle.Alloc(planeU.Data, GCHandleType.Pinned);
GCHandle planeVDataHandle = GCHandle.Alloc(planeV.Data, GCHandleType.Pinned);
try
{
IntPtr planeYDataPointer = planeYDataHandle.AddrOfPinnedObject();
IntPtr planeUDataPointer = planeUDataHandle.AddrOfPinnedObject();
IntPtr planeVDataPointer = planeVDataHandle.AddrOfPinnedObject();
//TODO: index/length
using (var buffer = new CocoaVp8Buffer {
PlaneY = NSData.FromBytesNoCopy(planeYDataPointer, (uint)planeY.Data.Length, false),
PlaneU = NSData.FromBytesNoCopy(planeUDataPointer, (uint)planeU.Data.Length, false),
PlaneV = NSData.FromBytesNoCopy(planeVDataPointer, (uint)planeV.Data.Length, false),
StrideY = planeY.Stride,
StrideU = planeU.Stride,
StrideV = planeV.Stride,
Width = frame.Width,
Height = frame.Height
})
{
using (var encodedFrame = new NSMutableData())
{
if (_Encoder.EncodeBuffer(buffer, encodedFrame))
{
return encodedFrame.ToArray();
}
return null;
}
}
}
finally
{
planeYDataHandle.Free();
planeUDataHandle.Free();
planeVDataHandle.Free();
}
}
}
/// <summary>
/// Decodes an encoded frame.
/// </summary>
/// <param name="encodedFrame">The encoded frame.</param>
/// <returns></returns>
public override VideoBuffer Decode(byte[] encodedFrame)
{
if (_Decoder == null)
{
_Decoder = new CocoaVp8Decoder();
}
if (_Padep.SequenceNumberingViolated)
{
_Decoder.NeedsKeyFrame = true;
return null;
}
using (var pool = new NSAutoreleasePool())
{
GCHandle encodedFrameHandle = GCHandle.Alloc(encodedFrame, GCHandleType.Pinned);
try
{
IntPtr encodedFramePointer = encodedFrameHandle.AddrOfPinnedObject();
using (var encodedFrameData = NSData.FromBytesNoCopy(encodedFramePointer, (uint)encodedFrame.Length, false))
{
using (var buffer = new CocoaVp8Buffer())
{
if (_Decoder.DecodeFrame(encodedFrameData, buffer))
{
var planeYData = new byte[buffer.PlaneY.Length];
var planeUData = new byte[buffer.PlaneU.Length];
var planeVData = new byte[buffer.PlaneV.Length];
Marshal.Copy(buffer.PlaneY.Bytes, planeYData, 0, (int)buffer.PlaneY.Length);
Marshal.Copy(buffer.PlaneU.Bytes, planeUData, 0, (int)buffer.PlaneU.Length);
Marshal.Copy(buffer.PlaneV.Bytes, planeVData, 0, (int)buffer.PlaneV.Length);
return new VideoBuffer(buffer.Width, buffer.Height, new[] {
new VideoPlane(planeYData, buffer.StrideY),
new VideoPlane(planeUData, buffer.StrideU),
new VideoPlane(planeVData, buffer.StrideV)
}, VideoFormat.I420);
}
return null;
}
}
}
finally
{
encodedFrameHandle.Free();
}
}
}
/// <summary>
/// Gets whether the decoder needs a keyframe. This
/// is checked after every failed Decode operation.
/// </summary>
/// <returns></returns>
public override bool DecoderNeedsKeyFrame()
{
if (_Decoder == null)
{
return false;
}
return _Decoder.NeedsKeyFrame;
}
/// <summary>
/// Packetizes an encoded frame.
/// </summary>
/// <param name="encodedFrame">The encoded frame.</param>
/// <returns></returns>
public override RTPPacket[] Packetize(byte[] encodedFrame)
{
return _Padep.Packetize(encodedFrame, ClockRate);
}
/// <summary>
/// Depacketizes a packet.
/// </summary>
/// <param name="packet">The packet.</param>
/// <returns></returns>
public override byte[] Depacketize(RTPPacket packet)
{
return _Padep.Depacketize(packet);
}
private int _LossyCount;
private int _LosslessCount;
/// <summary>
/// Processes RTCP packets.
/// </summary>
/// <param name="packets">The packets to process.</param>
public override void ProcessRTCP(RTCPPacket[] packets)
{
if (_Encoder != null)
{
foreach (var packet in packets)
{
if (packet is RTCPPliPacket)
{
Log.Info("Received PLI for video stream.");
_Encoder.SendKeyframe();
}
else if (packet is RTCPReportPacket)
{
var report = (RTCPReportPacket)packet;
foreach (var block in report.ReportBlocks)
{
Log.DebugFormat("VP8 report: {0} packet loss ({1} cumulative packets lost)", block.PercentLost.ToString("P2"), block.CumulativeNumberOfPacketsLost.ToString());
if (block.PercentLost > 0)
{
_LosslessCount = 0;
_LossyCount++;
if (_LossyCount > 5 && (_Encoder.Quality > 0.0 || _Encoder.Bitrate > 64 /* || _Encoder.Scale > 0.2 */))
{
_LossyCount = 0;
if (_Encoder.Quality > 0.0)
{
_Encoder.Quality = MathAssistant.Max(0.0, _Encoder.Quality - 0.1);
Log.InfoFormat("Decreasing VP8 encoder quality to {0}.", _Encoder.Quality.ToString("P2"));
}
if (_Encoder.Bitrate > 64)
{
_Encoder.Bitrate = MathAssistant.Max(64, _Encoder.Bitrate - 64);
Log.InfoFormat("Decreasing VP8 encoder bitrate to {0}.", _Encoder.Bitrate.ToString());
}
/*if (_Encoder.Scale > 0.2)
{
_Encoder.Scale = MathAssistant.Max(0.2, _Encoder.Scale - 0.2);
Log.InfoFormat("Decreasing VP8 encoder scale to {0}.", _Encoder.Scale.ToString("P2"));
}*/
}
}
else
{
_LossyCount = 0;
_LosslessCount++;
if (_LosslessCount > 5 && (_Encoder.Quality < 1.0 || _Encoder.Bitrate < 640 /* || _Encoder.Scale < 1.0 */))
{
_LosslessCount = 0;
if (_Encoder.Quality < 1.0)
{
_Encoder.Quality = MathAssistant.Min(1.0, _Encoder.Quality + 0.1);
Log.InfoFormat("Increasing VP8 encoder quality to {0}.", _Encoder.Quality.ToString("P2"));
}
if (_Encoder.Bitrate < 640)
{
_Encoder.Bitrate = MathAssistant.Min(640, _Encoder.Bitrate + 64);
Log.InfoFormat("Increasing VP8 encoder bitrate to {0}.", _Encoder.Bitrate.ToString());
}
/*if (_Encoder.Scale < 1.0)
{
_Encoder.Scale = MathAssistant.Min(1.0, _Encoder.Scale + 0.2);
Log.InfoFormat("Increasing VP8 encoder scale to {0}.", _Encoder.Scale.ToString("P2"));
}*/
}
}
}
}
}
}
}
/// <summary>
/// Destroys the codec.
/// </summary>
public override void Destroy()
{
if (_Encoder != null)
{
_Encoder.Destroy();
_Encoder.Dispose();
_Encoder = null;
}
if (_Decoder != null)
{
_Decoder.Destroy();
_Decoder.Dispose();
_Decoder = null;
}
}
}
}
| |
///
// This file contains utility classes to access easily OpenKID repositories
// in order to extract information from them
//
// LICENCE:
// Copyright (c) 2016 Fairmat SRL.
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files
// (the "Software"), to deal in the Software without restriction,
// including without limitation the rights to use, copy, modify, merge,
// publish, distribute, sublicense, and/or sell copies of the Software,
// and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
// SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
// <copyright>Copyright (c) 2016 Fairmat SRL (http://www.fairmat.com)</copyright>
// <license>MIT.</license>
// <version>0.1</version>
///
///
// An implementation of several utils to access OpenKID repositories.
//
// <copyright>Copyright (c) 2016 Fairmat SRL (http://www.fairmat.com)</copyright>
// <license>MIT.</license>
// <version>0.1</version>
///
using openKid.Model;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Net.Http;
using System.Threading.Tasks;
using System.Xml;
using System.Xml.Linq;
namespace openKid.Util
{
public static class OpenKidUtils
{
/// <summary>
/// Inner function that loads an XML file from an url.
/// </summary>
/// <param name="xmlUri">
/// The position of the xml file to open.
/// </param>
/// <returns>
/// A parsed XML file object.
/// </returns>
private static async Task<XDocument> LoadXML(Uri xmlUri)
{
using (var client = new HttpClient())
{
XDocument xmlDocument = null;
client.BaseAddress = xmlUri;
client.DefaultRequestHeaders.Accept.Clear();
var response = await client.GetAsync(xmlUri);
if (response.IsSuccessStatusCode)
{
var responseStream = await response.Content.ReadAsStringAsync();
xmlDocument = XDocumentTryParse(responseStream);
}
return xmlDocument;
}
}
private static XDocument XDocumentTryParse(string xmlFile)
{
try
{
return XDocument.Parse(xmlFile);
}
catch (XmlException)
{
return null;
}
}
/// <summary>
/// Gets the last time the repository was updated (by checking the last updated field of the index file)
/// </summary>
/// <param name="repoUrl">
/// The URL of the repository to check for KIDs (check repo-example for an idea of its structure).
/// </param>
/// <returns>
/// A DateTime object with the last update date in it.
/// </returns>
public static async Task<DateTime?> GetLastUpdate(string repoUrl)
{
var xmlDoc = await LoadXML(CombineUri(repoUrl, "kidsindex.xml"));
return ConvertToDate(xmlDoc.Root.Element("LastUpdated")?.Value);
}
/// <summary>
/// Gets a list of all KIDs in the index of the provided repository url.
/// </summary>
/// <param name="repoUrl">
/// The URL of the repository to check for KIDs (check repo-example for an idea of its structure).
/// </param>
/// <returns>
/// A list of all KIDs entry.
/// </returns>
public static async Task<IList<KidEntry>> GetAllAvailableKIDs(string repoUrl)
{
IList<KidEntry> kidEntryList = new List<KidEntry>();
var xmlDoc = await LoadXML(CombineUri(repoUrl, "kidsindex.xml"));
if (xmlDoc != null)
{
var entries = from e in xmlDoc.Root.Elements("KIDS").Elements("KIDEntry")
select e;
foreach (var entry in entries)
{
kidEntryList.Add(ParseSingleKIDEntry(entry));
}
}
return kidEntryList;
}
/// <summary>
/// Inner function which extracts a single KID entry from the index file.
/// </summary>
/// <param name="entry">
/// A XML element pointing related to a <KIDEntry>
/// </param>
/// <returns>
/// A KID entry
/// </returns>
private static KidEntry ParseSingleKIDEntry(XElement entry)
{
var kidEntry = new KidEntry()
{
Issuer = entry.Element("Issuer")?.Value,
Isin = entry.Element("ISIN")?.Value,
ProductName = entry.Element("ProductName")?.Value,
FirstPublished = ConvertToDate(entry.Element("FirstPublished")?.Value),
LastUpdated = ConvertToDate(entry.Element("LastUpdated")?.Value),
HistoryUrl = entry.Element("History")?.Value,
};
kidEntry.Versions = ParseVersionsEntry(entry);
return kidEntry;
}
/// <summary>
/// Inner function used to parse <Versions>...</Versions> data as the format is shared between the history and main index files.
/// </summary>
/// <param name="entry">
/// A XML element pointing related to a <KIDEntry>
/// </param>
/// <returns>
/// A list of all versions
/// </returns>
private static IList<KidVersion> ParseVersionsEntry(XElement entry)
{
var kidVersionList = new List<KidVersion>();
var versions = from v in entry.Elements("Versions").Elements("Version")
select v;
foreach (var version in versions)
{
var kidVersion = new KidVersion()
{
Language = version.Element("Language").Value,
Url = version.Element("Url").Value
};
kidVersionList.Add(kidVersion);
}
return kidVersionList;
}
/// <summary>
/// Gets a list of all KIDs in the index of the provided repository url, which have been updated or added since the provided date.
/// </summary>
/// <param name="repoUrl">
/// The URL of the repository to check for KIDs (check repo-example for an idea of its structure).
/// </param>
/// <param name="minDate">
/// The date to check against to filter out older KIDs.
/// </param>
/// <returns>
/// A list of all KIDs entry updated or added after the specified date
/// </returns>
public static async Task<IList<KidEntry>> GetAllAvailableKIDsAfterDate(string repoUrl, DateTime minDate)
{
var kidList = await GetAllAvailableKIDs(repoUrl);
var filteredKidList = from f in kidList
where (f.FirstPublished ?? DateTime.MinValue) >= minDate || (f.LastUpdated ?? DateTime.MinValue) >= minDate
select f;
return filteredKidList.ToList();
}
/// <summary>
/// Gets the publishing history of a specific ISIN from the history file associated to it.
/// This is done by first checking for the index file to contain the ISIN of the product we are interested into, then checking the
/// reference within the history folder of the repository. That file is then parsed to provide the full history of the products we are interested into.
/// </summary>
/// <param name="repoUrl">
/// The URL of the repository to check for KIDs (check repo-example for an idea of its structure).
/// </param>
/// <param name="isin">
/// The product ISIN we are interested into.
/// </param>
/// <returns>
/// A list of history entries provided by the correlated history file.
/// </returns>
public static async Task<IList<KidUpdate>> GetProductPublishingHistory(string repoUrl, string isin)
{
IList<KidUpdate> kidUpdateList = new List<KidUpdate>();
var xmlDoc = await LoadXML(CombineUri(repoUrl, "kidsindex.xml"));
if (xmlDoc != null)
{
var entry = (
from e in xmlDoc.Root.Elements("KIDS").Elements("KIDEntry")
where isin.Equals(e.Element("ISIN").Value)
select e
).FirstOrDefault();
if (entry != null)
{
var historyUrl = entry.Element("History")?.Value;
if (historyUrl != null)
{
var xmlHistoryDoc = await LoadXML(CombineUri(repoUrl, historyUrl));
var updateEntries = from e in xmlHistoryDoc.Root.Elements("Update")
select e;
foreach (var updateEntry in updateEntries)
{
var kidUpdate = new KidUpdate()
{
PublishDate = ConvertToDate(updateEntry.Element("PublishDate")?.Value),
};
kidUpdate.Versions = ParseVersionsEntry(updateEntry);
kidUpdateList.Add(kidUpdate);
}
}
}
}
return kidUpdateList;
}
private static Uri CombineUri(string baseUri, string relativeOrAbsoluteUri)
{
return new Uri(new Uri(baseUri), relativeOrAbsoluteUri);
}
private static DateTime? ConvertToDate(string dateString)
{
if (string.IsNullOrEmpty(dateString))
{
return null;
}
return DateTime.ParseExact(dateString, "yyyy-MM-dd", CultureInfo.InvariantCulture);
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
// =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
//
// IntRangePartitionerTests.cs - Tests for range partitioner for integer range.
//
// PLEASE NOTE !! - For tests that need to iterate the elements inside the partitions more
// than once, we need to call GetPartitions for the second time. Iterating a second times
// over the first enumerable<tuples> / IList<IEnumerator<tuples> will yield no elements
//
// PLEASE NOTE!! - we use lazy evaluation wherever possible to allow for more than Int32.MaxValue
// elements. ToArray / toList will result in an OOM
//
// Taken from:
// \qa\clr\testsrc\pfx\Functional\Common\Partitioner\YetiTests\RangePartitioner\IntRangePartitionerTests.cs
// =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
using System;
using System.Collections;
using System.Collections.Concurrent;
using System.Collections.Generic;
using Xunit;
namespace System.Collections.Concurrent.Tests
{
public class IntRangePartitionerTests
{
/// <summary>
/// Ensure that the partitioner returned has properties set correctly
/// </summary>
[Fact]
public static void CheckKeyProperties()
{
var partitioner = Partitioner.Create(0, 1000);
Assert.True(partitioner.KeysOrderedInEachPartition, "Expected KeysOrderedInEachPartition to be set to true");
Assert.False(partitioner.KeysOrderedAcrossPartitions, "KeysOrderedAcrossPartitions to be set to false");
Assert.True(partitioner.KeysNormalized, "Expected KeysNormalized to be set to true");
partitioner = Partitioner.Create(0, 1000, 90);
Assert.True(partitioner.KeysOrderedInEachPartition, "Expected KeysOrderedInEachPartition to be set to true");
Assert.False(partitioner.KeysOrderedAcrossPartitions, "KeysOrderedAcrossPartitions to be set to false");
Assert.True(partitioner.KeysNormalized, "Expected KeysNormalized to be set to true");
}
/// <summary>
/// GetPartitions returns an IList<IEnumerator<Tuple<int, int>>
/// We unroll the tuples and flatten them to a single sequence
/// The single sequence is compared to the original range for verification
/// </summary>
[Fact]
public static void CheckGetPartitions()
{
CheckGetPartitions(0, 1, 1);
CheckGetPartitions(1, 1999, 3);
CheckGetPartitions(2147473647, 9999, 4);
CheckGetPartitions(-1999, 5000, 63);
CheckGetPartitions(-2147483648, 5000, 63);
}
public static void CheckGetPartitions(int from, int count, int dop)
{
int to = from + count;
var partitioner = Partitioner.Create(from, to);
//var elements = dopPartitions.SelectMany(enumerator => enumerator.UnRoll());
IList<int> elements = new List<int>();
foreach (var partition in partitioner.GetPartitions(dop))
{
foreach (var item in partition.UnRoll())
elements.Add(item);
}
Assert.True(elements.CompareSequences<int>(RangePartitionerHelpers.IntEnumerable(from, to)), "GetPartitions element mismatch");
}
/// <summary>
/// CheckGetDynamicPartitions returns an IEnumerable<Tuple<int, int>>
/// We unroll the tuples and flatten them to a single sequence
/// The single sequence is compared to the original range for verification
/// </summary>
/// <param name="from"></param>
/// <param name="count"></param>
[Fact]
public static void CheckGetDynamicPartitions()
{
CheckGetDynamicPartitions(0, 1);
CheckGetDynamicPartitions(1, 1999);
CheckGetDynamicPartitions(2147473647, 9999);
CheckGetDynamicPartitions(-1999, 5000);
CheckGetDynamicPartitions(-2147483648, 5000);
}
public static void CheckGetDynamicPartitions(int from, int count)
{
int to = from + count;
var partitioner = Partitioner.Create(from, to);
//var elements = partitioner.GetDynamicPartitions().SelectMany(tuple => tuple.UnRoll());
IList<int> elements = new List<int>();
foreach (var partition in partitioner.GetDynamicPartitions())
{
foreach (var item in partition.UnRoll())
elements.Add(item);
}
Assert.True(elements.CompareSequences<int>(RangePartitionerHelpers.IntEnumerable(from, to)), "GetDynamicPartitions Element mismatch");
}
/// <summary>
/// GetOrderablePartitions returns an IList<IEnumerator<KeyValuePair<long, Tuple<int, int>>>
/// We unroll the tuples and flatten them to a single sequence
/// The single sequence is compared to the original range for verification
/// Also the indices are extracted to ensure that they are ordered & normalized
/// </summary>
[Fact]
public static void CheckGetOrderablePartitions()
{
CheckGetOrderablePartitions(0, 1, 1);
CheckGetOrderablePartitions(1, 1999, 3);
CheckGetOrderablePartitions(2147473647, 9999, 4);
CheckGetOrderablePartitions(-1999, 5000, 63);
CheckGetOrderablePartitions(-2147483648, 5000, 63);
}
public static void CheckGetOrderablePartitions(int from, int count, int dop)
{
int to = from + count;
var partitioner = Partitioner.Create(from, to);
//var elements = partitioner.GetOrderablePartitions(dop).SelectMany(enumerator => enumerator.UnRoll());
IList<int> elements = new List<int>();
foreach (var partition in partitioner.GetPartitions(dop))
{
foreach (var item in partition.UnRoll())
elements.Add(item);
}
Assert.True(elements.CompareSequences<int>(RangePartitionerHelpers.IntEnumerable(from, to)), "GetOrderablePartitions Element mismatch");
//var keys = partitioner.GetOrderablePartitions(dop).SelectMany(enumerator => enumerator.UnRollIndices()).ToArray();
IList<long> keys = new List<long>();
foreach (var partition in partitioner.GetOrderablePartitions(dop))
{
foreach (var item in partition.UnRollIndices())
keys.Add(item);
}
Assert.True(keys.CompareSequences<long>(RangePartitionerHelpers.LongEnumerable(keys[0], keys.Count)), "GetOrderablePartitions key mismatch");
}
/// <summary>
/// GetOrderableDynamicPartitions returns an IEnumerable<KeyValuePair<long, Tuple<int, int>>
/// We unroll the tuples and flatten them to a single sequence
/// The single sequence is compared to the original range for verification
/// Also the indices are extracted to ensure that they are ordered & normalized
/// </summary>
[Fact]
public static void GetOrderableDynamicPartitions()
{
GetOrderableDynamicPartitions(0, 1);
GetOrderableDynamicPartitions(1, 1999);
GetOrderableDynamicPartitions(2147473647, 9999);
GetOrderableDynamicPartitions(-1999, 5000);
GetOrderableDynamicPartitions(-2147483648, 5000);
}
private static void GetOrderableDynamicPartitions(int from, int count)
{
int to = from + count;
var partitioner = Partitioner.Create(from, to);
//var elements = partitioner.GetOrderableDynamicPartitions().SelectMany(tuple => tuple.UnRoll());
IList<int> elements = new List<int>();
foreach (var partition in partitioner.GetOrderableDynamicPartitions())
{
foreach (var item in partition.UnRoll())
elements.Add(item);
}
Assert.True(elements.CompareSequences<int>(RangePartitionerHelpers.IntEnumerable(from, to)), "GetOrderableDynamicPartitions Element mismatch");
//var keys = partitioner.GetOrderableDynamicPartitions().Select(tuple => tuple.Key).ToArray();
IList<long> keys = new List<long>();
foreach (var tuple in partitioner.GetOrderableDynamicPartitions())
{
keys.Add(tuple.Key);
}
Assert.True(keys.CompareSequences<long>(RangePartitionerHelpers.LongEnumerable(keys[0], keys.Count)), "GetOrderableDynamicPartitions key mismatch");
}
/// <summary>
/// GetPartitions returns an IList<IEnumerator<Tuple<int, int>>
/// We unroll the tuples and flatten them to a single sequence
/// The single sequence is compared to the original range for verification
/// This method tests the partitioner created with user provided desiredRangeSize
/// The range sizes for individual ranges are checked to see if they are equal to
/// desiredRangeSize. The last range may have less than or equal to desiredRangeSize.
/// </summary>
[Fact]
public static void CheckGetPartitionsWithRange()
{
CheckGetPartitionsWithRange(1999, 1000, 20, 1);
CheckGetPartitionsWithRange(-1999, 1000, 100, 2);
CheckGetPartitionsWithRange(1999, 1, 2000, 3);
CheckGetPartitionsWithRange(2147482647, 999, 600, 4);
CheckGetPartitionsWithRange(-2147483648, 1000, 19, 63);
}
public static void CheckGetPartitionsWithRange(int from, int count, int desiredRangeSize, int dop)
{
int to = from + count;
var partitioner = Partitioner.Create(from, to, desiredRangeSize);
//var elements = partitioner.GetPartitions(dop).SelectMany(enumerator => enumerator.UnRoll());
IList<int> elements = new List<int>();
foreach (var partition in partitioner.GetPartitions(dop))
{
foreach (var item in partition.UnRoll())
elements.Add(item);
}
Assert.True(elements.CompareSequences<int>(RangePartitionerHelpers.IntEnumerable(from, to)), "GetPartitions element mismatch");
//var rangeSizes = partitioner.GetPartitions(dop).SelectMany(enumerator => enumerator.GetRangeSize()).ToArray();
IList<int> rangeSizes = new List<int>();
foreach (var partition in partitioner.GetPartitions(dop))
{
foreach (var item in partition.GetRangeSize())
rangeSizes.Add(item);
}
ValidateRangeSize(desiredRangeSize, rangeSizes);
}
/// <summary>
/// CheckGetDynamicPartitionsWithRange returns an IEnumerable<Tuple<int, int>>
/// We unroll the tuples and flatten them to a single sequence
/// The single sequence is compared to the original range for verification
/// This method tests the partitioner created with user provided desiredRangeSize
/// The range sizes for individual ranges are checked to see if they are equal to
/// desiredRangeSize. The last range may have less than or equal to desiredRangeSize.
/// </summary>
[Fact]
public static void CheckGetDynamicPartitionsWithRange()
{
CheckGetDynamicPartitionsWithRange(1999, 1000, 20);
CheckGetDynamicPartitionsWithRange(-1999, 1000, 100);
CheckGetDynamicPartitionsWithRange(1999, 1, 2000);
CheckGetDynamicPartitionsWithRange(2147482647, 999, 600);
CheckGetDynamicPartitionsWithRange(-2147483648, 1000, 19);
}
public static void CheckGetDynamicPartitionsWithRange(int from, int count, int desiredRangeSize)
{
int to = from + count;
var partitioner = Partitioner.Create(from, to, desiredRangeSize);
//var elements = partitioner.GetDynamicPartitions().SelectMany(tuple => tuple.UnRoll());
IList<int> elements = new List<int>();
foreach (var partition in partitioner.GetDynamicPartitions())
{
foreach (var item in partition.UnRoll())
elements.Add(item);
}
Assert.True(elements.CompareSequences<int>(RangePartitionerHelpers.IntEnumerable(from, to)), "GetDynamicPartitions Element mismatch");
//var rangeSizes = partitioner.GetDynamicPartitions().Select(tuple => tuple.GetRangeSize()).ToArray();
IList<int> rangeSizes = new List<int>();
foreach (var partition in partitioner.GetDynamicPartitions())
{
rangeSizes.Add(partition.GetRangeSize());
}
ValidateRangeSize(desiredRangeSize, rangeSizes);
}
/// <summary>
/// GetOrderablePartitions returns an IList<IEnumerator<KeyValuePair<long, Tuple<int, int>>>
/// We unroll the tuples and flatten them to a single sequence
/// The single sequence is compared to the original range for verification
/// Also the indices are extracted to ensure that they are ordered & normalized
/// This method tests the partitioner created with user provided desiredRangeSize
/// The range sizes for individual ranges are checked to see if they are equal to
/// desiredRangeSize. The last range may have less than or equal to desiredRangeSize.
/// </summary>
[Fact]
public static void CheckGetOrderablePartitionsWithRange()
{
CheckGetOrderablePartitionsWithRange(1999, 1000, 20, 1);
CheckGetOrderablePartitionsWithRange(-1999, 1000, 100, 2);
CheckGetOrderablePartitionsWithRange(1999, 1, 2000, 3);
CheckGetOrderablePartitionsWithRange(2147482647, 999, 600, 4);
CheckGetOrderablePartitionsWithRange(-2147483648, 1000, 19, 63);
}
private static void CheckGetOrderablePartitionsWithRange(int from, int count, int desiredRangeSize, int dop)
{
int to = from + count;
var partitioner = Partitioner.Create(from, to, desiredRangeSize);
//var elements = partitioner.GetOrderablePartitions(dop).SelectMany(enumerator => enumerator.UnRoll());
IList<int> elements = new List<int>();
foreach (var partition in partitioner.GetOrderablePartitions(dop))
{
foreach (var item in partition.UnRoll())
elements.Add(item);
}
Assert.True(elements.CompareSequences<int>(RangePartitionerHelpers.IntEnumerable(from, to)), "GetOrderablePartitions Element mismatch");
//var keys = partitioner.GetOrderablePartitions(dop).SelectMany(enumerator => enumerator.UnRollIndices()).ToArray();
IList<long> keys = new List<long>();
foreach (var partition in partitioner.GetOrderablePartitions(dop))
{
foreach (var item in partition.UnRollIndices())
keys.Add(item);
}
Assert.True(keys.CompareSequences<long>(RangePartitionerHelpers.LongEnumerable(keys[0], keys.Count)), "GetOrderablePartitions key mismatch");
//var rangeSizes = partitioner.GetOrderablePartitions(dop).SelectMany(enumerator => enumerator.GetRangeSize()).ToArray();
IList<int> rangeSizes = new List<int>();
foreach (var partition in partitioner.GetOrderablePartitions(dop))
{
foreach (var item in partition.GetRangeSize())
rangeSizes.Add(item);
}
ValidateRangeSize(desiredRangeSize, rangeSizes);
}
/// <summary>
/// GetOrderableDynamicPartitions returns an IEnumerable<KeyValuePair<long, Tuple<int, int>>
/// We unroll the tuples and flatten them to a single sequence
/// The single sequence is compared to the original range for verification
/// Also the indices are extracted to ensure that they are ordered & normalized
/// This method tests the partitioner created with user provided desiredRangeSize
/// The range sizes for individual ranges are checked to see if they are equal to
/// desiredRangeSize. The last range may have less than or equal to desiredRangeSize.
/// </summary>
[Fact]
public static void GetOrderableDynamicPartitionsWithRange()
{
GetOrderableDynamicPartitionsWithRange(1999, 1000, 20);
GetOrderableDynamicPartitionsWithRange(-1999, 1000, 100);
GetOrderableDynamicPartitionsWithRange(1999, 1, 2000);
GetOrderableDynamicPartitionsWithRange(2147482647, 999, 600);
GetOrderableDynamicPartitionsWithRange(-2147483648, 1000, 19);
}
private static void GetOrderableDynamicPartitionsWithRange(int from, int count, int desiredRangeSize)
{
int to = from + count;
var partitioner = Partitioner.Create(from, to, desiredRangeSize);
//var elements = partitioner.GetOrderableDynamicPartitions().SelectMany(tuple => tuple.UnRoll());
IList<int> elements = new List<int>();
foreach (var tuple in partitioner.GetOrderableDynamicPartitions())
{
foreach (var item in tuple.UnRoll())
elements.Add(item);
}
Assert.True(elements.CompareSequences<int>(RangePartitionerHelpers.IntEnumerable(from, to)), "GetOrderableDynamicPartitions Element mismatch");
//var keys = partitioner.GetOrderableDynamicPartitions().Select(tuple => tuple.Key).ToArray();
IList<long> keys = new List<long>();
foreach (var tuple in partitioner.GetOrderableDynamicPartitions())
{
keys.Add(tuple.Key);
}
Assert.True(keys.CompareSequences<long>(RangePartitionerHelpers.LongEnumerable(keys[0], keys.Count)), "GetOrderableDynamicPartitions key mismatch");
//var rangeSizes = partitioner.GetOrderableDynamicPartitions().Select(tuple => tuple.GetRangeSize()).ToArray();
IList<int> rangeSizes = new List<int>();
foreach (var partition in partitioner.GetOrderableDynamicPartitions())
{
rangeSizes.Add(partition.GetRangeSize());
}
ValidateRangeSize(desiredRangeSize, rangeSizes);
}
/// <summary>
/// Helper function to validate the the range size of the partitioners match what the user specified
/// (desiredRangeSize).
/// </summary>
/// <param name="desiredRangeSize"></param>
/// <param name="rangeSizes"></param>
public static void ValidateRangeSize(int desiredRangeSize, IList<int> rangeSizes)
{
//var rangesWithDifferentRangeSize = rangeSizes.Take(rangeSizes.Length - 1).Where(r => r != desiredRangeSize).ToArray();
IList<int> rangesWithDifferentRangeSize = new List<int>();
// ensuring that every range, size from the last one is the same.
int numToTake = rangeSizes.Count - 1;
for (int i = 0; i < numToTake; i++)
{
int range = rangeSizes[i];
if (range != desiredRangeSize)
rangesWithDifferentRangeSize.Add(range);
}
if (rangesWithDifferentRangeSize.Count != 0)
{
Console.Write("Invalid Range size: ");
foreach (var r in rangesWithDifferentRangeSize)
Console.Write("{0} ", r);
Console.WriteLine();
Assert.False(true,
String.Format("Expected all ranges (except last) to have size {0}. {1} ranges has different size", desiredRangeSize, rangesWithDifferentRangeSize));
}
var lastRange = rangeSizes[rangeSizes.Count - 1];
Assert.True(desiredRangeSize >= lastRange, String.Format("Expect={0}, Actual={1}", desiredRangeSize, lastRange));
}
[Fact]
public static void RangePartitionerChunking()
{
RangePartitionerChunking(1999, 1000, 10);
RangePartitionerChunking(89, 17823, -1);
}
/// <summary>
/// Ensure that the range partitioner doesnt chunk up elements i.e. uses chunk size = 1
/// </summary>
/// <param name="from"></param>
/// <param name="count"></param>
/// <param name="rangeSize"></param>
public static void RangePartitionerChunking(int from, int count, int rangeSize)
{
int to = from + count;
var partitioner = (rangeSize == -1) ? Partitioner.Create(from, to) : Partitioner.Create(from, to, rangeSize);
// Check static partitions
var partitions = partitioner.GetPartitions(2);
// Initialize the from / to values from the first element
if (!partitions[0].MoveNext()) return;
Assert.Equal(from, partitions[0].Current.Item1);
if (rangeSize == -1)
{
rangeSize = partitions[0].Current.Item2 - partitions[0].Current.Item1;
}
int nextExpectedFrom = partitions[0].Current.Item2;
int nextExpectedTo = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize);
// Ensure that each partition gets one range only
// we check this by alternating partitions asking for elements and make sure
// that we get ranges in a sequence. If chunking were to happen then we wouldnt see a sequence
int actualCount = partitions[0].Current.Item2 - partitions[0].Current.Item1;
while (true)
{
if (!partitions[0].MoveNext()) break;
Assert.Equal(nextExpectedFrom, partitions[0].Current.Item1);
Assert.Equal(nextExpectedTo, partitions[0].Current.Item2);
nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize);
nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize);
actualCount += partitions[0].Current.Item2 - partitions[0].Current.Item1;
if (!partitions[1].MoveNext()) break;
Assert.Equal(nextExpectedFrom, partitions[1].Current.Item1);
Assert.Equal(nextExpectedTo, partitions[1].Current.Item2);
nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize);
nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize);
actualCount += partitions[1].Current.Item2 - partitions[1].Current.Item1;
if (!partitions[1].MoveNext()) break;
Assert.Equal(nextExpectedFrom, partitions[1].Current.Item1);
Assert.Equal(nextExpectedTo, partitions[1].Current.Item2);
nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize);
nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize);
actualCount += partitions[1].Current.Item2 - partitions[1].Current.Item1;
if (!partitions[0].MoveNext()) break;
Assert.Equal(nextExpectedFrom, partitions[0].Current.Item1);
Assert.Equal(nextExpectedTo, partitions[0].Current.Item2);
nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize);
nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize);
actualCount += partitions[0].Current.Item2 - partitions[0].Current.Item1;
}
// Verifying that all items are there
Assert.Equal(count, actualCount);
}
[Fact]
public static void RangePartitionerDynamicChunking()
{
RangePartitionerDynamicChunking(1999, 1000, 10);
RangePartitionerDynamicChunking(1, 884354, -1);
}
/// <summary>
/// Ensure that the range partitioner doesnt chunk up elements i.e. uses chunk size = 1
/// </summary>
/// <param name="from"></param>
/// <param name="count"></param>
/// <param name="rangeSize"></param>
public static void RangePartitionerDynamicChunking(int from, int count, int rangeSize)
{
int to = from + count;
var partitioner = (rangeSize == -1) ? Partitioner.Create(from, to) : Partitioner.Create(from, to, rangeSize);
// Check static partitions
var partitions = partitioner.GetDynamicPartitions();
var partition1 = partitions.GetEnumerator();
var partition2 = partitions.GetEnumerator();
// Initialize the from / to values from the first element
if (!partition1.MoveNext()) return;
Assert.True(from == partition1.Current.Item1);
if (rangeSize == -1)
{
rangeSize = partition1.Current.Item2 - partition1.Current.Item1;
}
int nextExpectedFrom = partition1.Current.Item2;
int nextExpectedTo = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize);
// Ensure that each partition gets one range only
// we check this by alternating partitions asking for elements and make sure
// that we get ranges in a sequence. If chunking were to happen then we wouldnt see a sequence
int actualCount = partition1.Current.Item2 - partition1.Current.Item1;
while (true)
{
if (!partition1.MoveNext()) break;
Assert.Equal(nextExpectedFrom, partition1.Current.Item1);
Assert.Equal(nextExpectedTo, partition1.Current.Item2);
nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize);
nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize);
actualCount += partition1.Current.Item2 - partition1.Current.Item1;
if (!partition2.MoveNext()) break;
Assert.Equal(nextExpectedFrom, partition2.Current.Item1);
Assert.Equal(nextExpectedTo, partition2.Current.Item2);
nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize);
nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize);
actualCount += partition2.Current.Item2 - partition2.Current.Item1;
if (!partition2.MoveNext()) break;
Assert.Equal(nextExpectedFrom, partition2.Current.Item1);
Assert.Equal(nextExpectedTo, partition2.Current.Item2);
nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize);
nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize);
actualCount += partition2.Current.Item2 - partition2.Current.Item1;
if (!partition1.MoveNext()) break;
Assert.Equal(nextExpectedFrom, partition1.Current.Item1);
Assert.Equal(nextExpectedTo, partition1.Current.Item2);
nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize);
nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize);
actualCount += partition1.Current.Item2 - partition1.Current.Item1;
}
// Verifying that all items are there
Assert.Equal(count, actualCount);
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Threading;
using System.Text;
using System.Timers;
using log4net;
using Nini.Config;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
using OpenSim.Services.Interfaces;
using Mono.Addins;
using PermissionMask = OpenSim.Framework.PermissionMask;
namespace OpenSim.Region.CoreModules.Avatar.AvatarFactory
{
[Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "AvatarFactoryModule")]
public class AvatarFactoryModule : IAvatarFactoryModule, INonSharedRegionModule
{
private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
public const string BAKED_TEXTURES_REPORT_FORMAT = "{0,-9} {1}";
private Scene m_scene = null;
private int m_savetime = 5; // seconds to wait before saving changed appearance
private int m_sendtime = 2; // seconds to wait before sending changed appearance
private bool m_reusetextures = false;
private int m_checkTime = 500; // milliseconds to wait between checks for appearance updates
private System.Timers.Timer m_updateTimer = new System.Timers.Timer();
private Dictionary<UUID,long> m_savequeue = new Dictionary<UUID,long>();
private Dictionary<UUID,long> m_sendqueue = new Dictionary<UUID,long>();
private object m_setAppearanceLock = new object();
#region Region Module interface
public void Initialise(IConfigSource config)
{
IConfig appearanceConfig = config.Configs["Appearance"];
if (appearanceConfig != null)
{
m_savetime = Convert.ToInt32(appearanceConfig.GetString("DelayBeforeAppearanceSave",Convert.ToString(m_savetime)));
m_sendtime = Convert.ToInt32(appearanceConfig.GetString("DelayBeforeAppearanceSend",Convert.ToString(m_sendtime)));
m_reusetextures = appearanceConfig.GetBoolean("ReuseTextures",m_reusetextures);
// m_log.InfoFormat("[AVFACTORY] configured for {0} save and {1} send",m_savetime,m_sendtime);
}
}
public void AddRegion(Scene scene)
{
if (m_scene == null)
m_scene = scene;
scene.RegisterModuleInterface<IAvatarFactoryModule>(this);
scene.EventManager.OnNewClient += SubscribeToClientEvents;
}
public void RemoveRegion(Scene scene)
{
if (scene == m_scene)
{
scene.UnregisterModuleInterface<IAvatarFactoryModule>(this);
scene.EventManager.OnNewClient -= SubscribeToClientEvents;
}
m_scene = null;
}
public void RegionLoaded(Scene scene)
{
m_updateTimer.Enabled = false;
m_updateTimer.AutoReset = true;
m_updateTimer.Interval = m_checkTime; // 500 milliseconds wait to start async ops
m_updateTimer.Elapsed += new ElapsedEventHandler(HandleAppearanceUpdateTimer);
}
public void Close()
{
}
public string Name
{
get { return "Default Avatar Factory"; }
}
public bool IsSharedModule
{
get { return false; }
}
public Type ReplaceableInterface
{
get { return null; }
}
private void SubscribeToClientEvents(IClientAPI client)
{
client.OnRequestWearables += Client_OnRequestWearables;
client.OnSetAppearance += Client_OnSetAppearance;
client.OnAvatarNowWearing += Client_OnAvatarNowWearing;
client.OnCachedTextureRequest += Client_OnCachedTextureRequest;
}
#endregion
#region IAvatarFactoryModule
/// </summary>
/// <param name="sp"></param>
/// <param name="texture"></param>
/// <param name="visualParam"></param>
public void SetAppearance(IScenePresence sp, AvatarAppearance appearance)
{
DoSetAppearance(sp, appearance.Texture, appearance.VisualParams, new List<CachedTextureRequestArg>());
}
/// <summary>
/// Set appearance data (texture asset IDs and slider settings)
/// </summary>
/// <param name="sp"></param>
/// <param name="texture"></param>
/// <param name="visualParam"></param>
public void SetAppearance(IScenePresence sp, Primitive.TextureEntry textureEntry, byte[] visualParams)
{
DoSetAppearance(sp, textureEntry, visualParams, new List<CachedTextureRequestArg>());
}
/// <summary>
/// Set appearance data (texture asset IDs and slider settings)
/// </summary>
/// <param name="sp"></param>
/// <param name="texture"></param>
/// <param name="visualParam"></param>
protected void DoSetAppearance(IScenePresence sp, Primitive.TextureEntry textureEntry, byte[] visualParams, List<CachedTextureRequestArg> hashes)
{
// m_log.DebugFormat(
// "[AVFACTORY]: start SetAppearance for {0}, te {1}, visualParams {2}",
// sp.Name, textureEntry, visualParams);
// TODO: This is probably not necessary any longer, just assume the
// textureEntry set implies that the appearance transaction is complete
bool changed = false;
// Process the texture entry transactionally, this doesn't guarantee that Appearance is
// going to be handled correctly but it does serialize the updates to the appearance
lock (m_setAppearanceLock)
{
// Process the visual params, this may change height as well
if (visualParams != null)
{
// string[] visualParamsStrings = new string[visualParams.Length];
// for (int i = 0; i < visualParams.Length; i++)
// visualParamsStrings[i] = visualParams[i].ToString();
// m_log.DebugFormat(
// "[AVFACTORY]: Setting visual params for {0} to {1}",
// client.Name, string.Join(", ", visualParamsStrings));
float oldHeight = sp.Appearance.AvatarHeight;
changed = sp.Appearance.SetVisualParams(visualParams);
if (sp.Appearance.AvatarHeight != oldHeight && sp.Appearance.AvatarHeight > 0)
((ScenePresence)sp).SetHeight(sp.Appearance.AvatarHeight);
}
// Process the baked texture array
if (textureEntry != null)
{
// m_log.DebugFormat("[AVFACTORY]: Received texture update for {0} {1}", sp.Name, sp.UUID);
// WriteBakedTexturesReport(sp, m_log.DebugFormat);
changed = sp.Appearance.SetTextureEntries(textureEntry) || changed;
// WriteBakedTexturesReport(sp, m_log.DebugFormat);
// If bake textures are missing and this is not an NPC, request a rebake from client
if (!ValidateBakedTextureCache(sp) && (((ScenePresence)sp).PresenceType != PresenceType.Npc))
RequestRebake(sp, true);
// Save the wearble hashes in the appearance
sp.Appearance.ResetTextureHashes();
if (m_reusetextures)
{
foreach (CachedTextureRequestArg arg in hashes)
sp.Appearance.SetTextureHash(arg.BakedTextureIndex,arg.WearableHashID);
}
// This appears to be set only in the final stage of the appearance
// update transaction. In theory, we should be able to do an immediate
// appearance send and save here.
}
// NPC should send to clients immediately and skip saving appearance
if (((ScenePresence)sp).PresenceType == PresenceType.Npc)
{
SendAppearance((ScenePresence)sp);
return;
}
// save only if there were changes, send no matter what (doesn't hurt to send twice)
if (changed)
QueueAppearanceSave(sp.ControllingClient.AgentId);
QueueAppearanceSend(sp.ControllingClient.AgentId);
}
// m_log.WarnFormat("[AVFACTORY]: complete SetAppearance for {0}:\n{1}",client.AgentId,sp.Appearance.ToString());
}
private void SendAppearance(ScenePresence sp)
{
// Send the appearance to everyone in the scene
sp.SendAppearanceToAllOtherAgents();
// Send animations back to the avatar as well
sp.Animator.SendAnimPack();
}
public bool SendAppearance(UUID agentId)
{
// m_log.DebugFormat("[AVFACTORY]: Sending appearance for {0}", agentId);
ScenePresence sp = m_scene.GetScenePresence(agentId);
if (sp == null)
{
// This is expected if the user has gone away.
// m_log.DebugFormat("[AVFACTORY]: Agent {0} no longer in the scene", agentId);
return false;
}
SendAppearance(sp);
return true;
}
public Dictionary<BakeType, Primitive.TextureEntryFace> GetBakedTextureFaces(UUID agentId)
{
ScenePresence sp = m_scene.GetScenePresence(agentId);
if (sp == null)
return new Dictionary<BakeType, Primitive.TextureEntryFace>();
return GetBakedTextureFaces(sp);
}
public bool SaveBakedTextures(UUID agentId)
{
ScenePresence sp = m_scene.GetScenePresence(agentId);
if (sp == null)
return false;
m_log.DebugFormat(
"[AV FACTORY]: Permanently saving baked textures for {0} in {1}",
sp.Name, m_scene.RegionInfo.RegionName);
Dictionary<BakeType, Primitive.TextureEntryFace> bakedTextures = GetBakedTextureFaces(sp);
if (bakedTextures.Count == 0)
return false;
foreach (BakeType bakeType in bakedTextures.Keys)
{
Primitive.TextureEntryFace bakedTextureFace = bakedTextures[bakeType];
if (bakedTextureFace == null)
{
// This can happen legitimately, since some baked textures might not exist
//m_log.WarnFormat(
// "[AV FACTORY]: No texture ID set for {0} for {1} in {2} not found when trying to save permanently",
// bakeType, sp.Name, m_scene.RegionInfo.RegionName);
continue;
}
AssetBase asset = m_scene.AssetService.Get(bakedTextureFace.TextureID.ToString());
if (asset != null)
{
// Replace an HG ID with the simple asset ID so that we can persist textures for foreign HG avatars
asset.ID = asset.FullID.ToString();
asset.Temporary = false;
asset.Local = false;
m_scene.AssetService.Store(asset);
}
else
{
m_log.WarnFormat(
"[AV FACTORY]: Baked texture id {0} not found for bake {1} for avatar {2} in {3} when trying to save permanently",
bakedTextureFace.TextureID, bakeType, sp.Name, m_scene.RegionInfo.RegionName);
}
}
return true;
}
/// <summary>
/// Queue up a request to send appearance.
/// </summary>
/// <remarks>
/// Makes it possible to accumulate changes without sending out each one separately.
/// </remarks>
/// <param name="agentId"></param>
public void QueueAppearanceSend(UUID agentid)
{
// m_log.DebugFormat("[AVFACTORY]: Queue appearance send for {0}", agentid);
// 10000 ticks per millisecond, 1000 milliseconds per second
long timestamp = DateTime.Now.Ticks + Convert.ToInt64(m_sendtime * 1000 * 10000);
lock (m_sendqueue)
{
m_sendqueue[agentid] = timestamp;
m_updateTimer.Start();
}
}
public void QueueAppearanceSave(UUID agentid)
{
// m_log.DebugFormat("[AVFACTORY]: Queueing appearance save for {0}", agentid);
// 10000 ticks per millisecond, 1000 milliseconds per second
long timestamp = DateTime.Now.Ticks + Convert.ToInt64(m_savetime * 1000 * 10000);
lock (m_savequeue)
{
m_savequeue[agentid] = timestamp;
m_updateTimer.Start();
}
}
public bool ValidateBakedTextureCache(IScenePresence sp)
{
bool defonly = true; // are we only using default textures
// Process the texture entry
for (int i = 0; i < AvatarAppearance.BAKE_INDICES.Length; i++)
{
int idx = AvatarAppearance.BAKE_INDICES[i];
Primitive.TextureEntryFace face = sp.Appearance.Texture.FaceTextures[idx];
// if there is no texture entry, skip it
if (face == null)
continue;
// m_log.DebugFormat(
// "[AVFACTORY]: Looking for texture {0}, id {1} for {2} {3}",
// face.TextureID, idx, client.Name, client.AgentId);
// if the texture is one of the "defaults" then skip it
// this should probably be more intelligent (skirt texture doesnt matter
// if the avatar isnt wearing a skirt) but if any of the main baked
// textures is default then the rest should be as well
if (face.TextureID == UUID.Zero || face.TextureID == AppearanceManager.DEFAULT_AVATAR_TEXTURE)
continue;
defonly = false; // found a non-default texture reference
if (m_scene.AssetService.Get(face.TextureID.ToString()) == null)
return false;
}
// m_log.DebugFormat("[AVFACTORY]: Completed texture check for {0} {1}", sp.Name, sp.UUID);
// If we only found default textures, then the appearance is not cached
return (defonly ? false : true);
}
public int RequestRebake(IScenePresence sp, bool missingTexturesOnly)
{
int texturesRebaked = 0;
for (int i = 0; i < AvatarAppearance.BAKE_INDICES.Length; i++)
{
int idx = AvatarAppearance.BAKE_INDICES[i];
Primitive.TextureEntryFace face = sp.Appearance.Texture.FaceTextures[idx];
// if there is no texture entry, skip it
if (face == null)
continue;
// m_log.DebugFormat(
// "[AVFACTORY]: Looking for texture {0}, id {1} for {2} {3}",
// face.TextureID, idx, client.Name, client.AgentId);
// if the texture is one of the "defaults" then skip it
// this should probably be more intelligent (skirt texture doesnt matter
// if the avatar isnt wearing a skirt) but if any of the main baked
// textures is default then the rest should be as well
if (face.TextureID == UUID.Zero || face.TextureID == AppearanceManager.DEFAULT_AVATAR_TEXTURE)
continue;
if (missingTexturesOnly)
{
if (m_scene.AssetService.Get(face.TextureID.ToString()) != null)
{
continue;
}
else
{
// On inter-simulator teleports, this occurs if baked textures are not being stored by the
// grid asset service (which means that they are not available to the new region and so have
// to be re-requested from the client).
//
// The only available core OpenSimulator behaviour right now
// is not to store these textures, temporarily or otherwise.
m_log.DebugFormat(
"[AVFACTORY]: Missing baked texture {0} ({1}) for {2}, requesting rebake.",
face.TextureID, idx, sp.Name);
}
}
else
{
m_log.DebugFormat(
"[AVFACTORY]: Requesting rebake of {0} ({1}) for {2}.",
face.TextureID, idx, sp.Name);
}
texturesRebaked++;
sp.ControllingClient.SendRebakeAvatarTextures(face.TextureID);
}
return texturesRebaked;
}
#endregion
#region AvatarFactoryModule private methods
private Dictionary<BakeType, Primitive.TextureEntryFace> GetBakedTextureFaces(ScenePresence sp)
{
if (sp.IsChildAgent)
return new Dictionary<BakeType, Primitive.TextureEntryFace>();
Dictionary<BakeType, Primitive.TextureEntryFace> bakedTextures
= new Dictionary<BakeType, Primitive.TextureEntryFace>();
AvatarAppearance appearance = sp.Appearance;
Primitive.TextureEntryFace[] faceTextures = appearance.Texture.FaceTextures;
foreach (int i in Enum.GetValues(typeof(BakeType)))
{
BakeType bakeType = (BakeType)i;
if (bakeType == BakeType.Unknown)
continue;
// m_log.DebugFormat(
// "[AVFACTORY]: NPC avatar {0} has texture id {1} : {2}",
// acd.AgentID, i, acd.Appearance.Texture.FaceTextures[i]);
int ftIndex = (int)AppearanceManager.BakeTypeToAgentTextureIndex(bakeType);
Primitive.TextureEntryFace texture = faceTextures[ftIndex]; // this will be null if there's no such baked texture
bakedTextures[bakeType] = texture;
}
return bakedTextures;
}
private void HandleAppearanceUpdateTimer(object sender, EventArgs ea)
{
long now = DateTime.Now.Ticks;
lock (m_sendqueue)
{
Dictionary<UUID, long> sends = new Dictionary<UUID, long>(m_sendqueue);
foreach (KeyValuePair<UUID, long> kvp in sends)
{
// We have to load the key and value into local parameters to avoid a race condition if we loop
// around and load kvp with a different value before FireAndForget has launched its thread.
UUID avatarID = kvp.Key;
long sendTime = kvp.Value;
// m_log.DebugFormat("[AVFACTORY]: Handling queued appearance updates for {0}, update delta to now is {1}", avatarID, sendTime - now);
if (sendTime < now)
{
Util.FireAndForget(o => SendAppearance(avatarID));
m_sendqueue.Remove(avatarID);
}
}
}
lock (m_savequeue)
{
Dictionary<UUID, long> saves = new Dictionary<UUID, long>(m_savequeue);
foreach (KeyValuePair<UUID, long> kvp in saves)
{
// We have to load the key and value into local parameters to avoid a race condition if we loop
// around and load kvp with a different value before FireAndForget has launched its thread.
UUID avatarID = kvp.Key;
long sendTime = kvp.Value;
if (sendTime < now)
{
Util.FireAndForget(o => SaveAppearance(avatarID));
m_savequeue.Remove(avatarID);
}
}
// We must lock both queues here so that QueueAppearanceSave() or *Send() don't m_updateTimer.Start() on
// another thread inbetween the first count calls and m_updateTimer.Stop() on this thread.
lock (m_sendqueue)
if (m_savequeue.Count == 0 && m_sendqueue.Count == 0)
m_updateTimer.Stop();
}
}
private void SaveAppearance(UUID agentid)
{
// We must set appearance parameters in the en_US culture in order to avoid issues where values are saved
// in a culture where decimal points are commas and then reloaded in a culture which just treats them as
// number seperators.
Culture.SetCurrentCulture();
ScenePresence sp = m_scene.GetScenePresence(agentid);
if (sp == null)
{
// This is expected if the user has gone away.
// m_log.DebugFormat("[AVFACTORY]: Agent {0} no longer in the scene", agentid);
return;
}
// m_log.DebugFormat("[AVFACTORY]: Saving appearance for avatar {0}", agentid);
// This could take awhile since it needs to pull inventory
// We need to do it at the point of save so that there is a sufficient delay for any upload of new body part/shape
// assets and item asset id changes to complete.
// I don't think we need to worry about doing this within m_setAppearanceLock since the queueing avoids
// multiple save requests.
SetAppearanceAssets(sp.UUID, sp.Appearance);
// List<AvatarAttachment> attachments = sp.Appearance.GetAttachments();
// foreach (AvatarAttachment att in attachments)
// {
// m_log.DebugFormat(
// "[AVFACTORY]: For {0} saving attachment {1} at point {2}",
// sp.Name, att.ItemID, att.AttachPoint);
// }
m_scene.AvatarService.SetAppearance(agentid, sp.Appearance);
// Trigger this here because it's the final step in the set/queue/save process for appearance setting.
// Everything has been updated and stored. Ensures bakes have been persisted (if option is set to persist bakes).
m_scene.EventManager.TriggerAvatarAppearanceChanged(sp);
}
private void SetAppearanceAssets(UUID userID, AvatarAppearance appearance)
{
IInventoryService invService = m_scene.InventoryService;
if (invService.GetRootFolder(userID) != null)
{
for (int i = 0; i < AvatarWearable.MAX_WEARABLES; i++)
{
for (int j = 0; j < appearance.Wearables[i].Count; j++)
{
if (appearance.Wearables[i][j].ItemID == UUID.Zero)
continue;
// Ignore ruth's assets
if (appearance.Wearables[i][j].ItemID == AvatarWearable.DefaultWearables[i][0].ItemID)
continue;
InventoryItemBase baseItem = new InventoryItemBase(appearance.Wearables[i][j].ItemID, userID);
baseItem = invService.GetItem(baseItem);
if (baseItem != null)
{
appearance.Wearables[i].Add(appearance.Wearables[i][j].ItemID, baseItem.AssetID);
}
else
{
m_log.ErrorFormat(
"[AVFACTORY]: Can't find inventory item {0} for {1}, setting to default",
appearance.Wearables[i][j].ItemID, (WearableType)i);
appearance.Wearables[i].RemoveItem(appearance.Wearables[i][j].ItemID);
}
}
}
}
else
{
m_log.WarnFormat("[AVFACTORY]: user {0} has no inventory, appearance isn't going to work", userID);
}
}
#endregion
#region Client Event Handlers
/// <summary>
/// Tell the client for this scene presence what items it should be wearing now
/// </summary>
/// <param name="client"></param>
private void Client_OnRequestWearables(IClientAPI client)
{
// m_log.DebugFormat("[AVFACTORY]: Client_OnRequestWearables called for {0} ({1})", client.Name, client.AgentId);
ScenePresence sp = m_scene.GetScenePresence(client.AgentId);
if (sp != null)
client.SendWearables(sp.Appearance.Wearables, sp.Appearance.Serial++);
else
m_log.WarnFormat("[AVFACTORY]: Client_OnRequestWearables unable to find presence for {0}", client.AgentId);
}
/// <summary>
/// Set appearance data (texture asset IDs and slider settings) received from a client
/// </summary>
/// <param name="client"></param>
/// <param name="texture"></param>
/// <param name="visualParam"></param>
private void Client_OnSetAppearance(IClientAPI client, Primitive.TextureEntry textureEntry, byte[] visualParams, List<CachedTextureRequestArg> hashes)
{
// m_log.WarnFormat("[AVFACTORY]: Client_OnSetAppearance called for {0} ({1})", client.Name, client.AgentId);
ScenePresence sp = m_scene.GetScenePresence(client.AgentId);
if (sp != null)
DoSetAppearance(sp, textureEntry, visualParams, hashes);
else
m_log.WarnFormat("[AVFACTORY]: Client_OnSetAppearance unable to find presence for {0}", client.AgentId);
}
/// <summary>
/// Update what the avatar is wearing using an item from their inventory.
/// </summary>
/// <param name="client"></param>
/// <param name="e"></param>
private void Client_OnAvatarNowWearing(IClientAPI client, AvatarWearingArgs e)
{
// m_log.WarnFormat("[AVFACTORY]: Client_OnAvatarNowWearing called for {0} ({1})", client.Name, client.AgentId);
ScenePresence sp = m_scene.GetScenePresence(client.AgentId);
if (sp == null)
{
m_log.WarnFormat("[AVFACTORY]: Client_OnAvatarNowWearing unable to find presence for {0}", client.AgentId);
return;
}
// we need to clean out the existing textures
sp.Appearance.ResetAppearance();
// operate on a copy of the appearance so we don't have to lock anything yet
AvatarAppearance avatAppearance = new AvatarAppearance(sp.Appearance, false);
foreach (AvatarWearingArgs.Wearable wear in e.NowWearing)
{
if (wear.Type < AvatarWearable.MAX_WEARABLES)
avatAppearance.Wearables[wear.Type].Add(wear.ItemID, UUID.Zero);
}
avatAppearance.GetAssetsFrom(sp.Appearance);
lock (m_setAppearanceLock)
{
// Update only those fields that we have changed. This is important because the viewer
// often sends AvatarIsWearing and SetAppearance packets at once, and AvatarIsWearing
// shouldn't overwrite the changes made in SetAppearance.
sp.Appearance.Wearables = avatAppearance.Wearables;
sp.Appearance.Texture = avatAppearance.Texture;
// We don't need to send the appearance here since the "iswearing" will trigger a new set
// of visual param and baked texture changes. When those complete, the new appearance will be sent
QueueAppearanceSave(client.AgentId);
}
}
/// <summary>
/// Respond to the cached textures request from the client
/// </summary>
/// <param name="client"></param>
/// <param name="serial"></param>
/// <param name="cachedTextureRequest"></param>
private void Client_OnCachedTextureRequest(IClientAPI client, int serial, List<CachedTextureRequestArg> cachedTextureRequest)
{
// m_log.DebugFormat("[AVFACTORY]: Client_OnCachedTextureRequest called for {0} ({1})", client.Name, client.AgentId);
ScenePresence sp = m_scene.GetScenePresence(client.AgentId);
List<CachedTextureResponseArg> cachedTextureResponse = new List<CachedTextureResponseArg>();
foreach (CachedTextureRequestArg request in cachedTextureRequest)
{
UUID texture = UUID.Zero;
int index = request.BakedTextureIndex;
if (m_reusetextures)
{
if (sp.Appearance.GetTextureHash(index) == request.WearableHashID)
{
Primitive.TextureEntryFace face = sp.Appearance.Texture.FaceTextures[index];
if (face != null)
texture = face.TextureID;
}
else
{
// We know that that hash is wrong, null it out
// and wait for the setappearance call
sp.Appearance.SetTextureHash(index,UUID.Zero);
}
// m_log.WarnFormat("[AVFACTORY]: use texture {0} for index {1}; hash={2}",texture,index,request.WearableHashID);
}
CachedTextureResponseArg response = new CachedTextureResponseArg();
response.BakedTextureIndex = index;
response.BakedTextureID = texture;
response.HostName = null;
cachedTextureResponse.Add(response);
}
// m_log.WarnFormat("[AVFACTORY]: serial is {0}",serial);
// The serial number appears to be used to match requests and responses
// in the texture transaction. We just send back the serial number
// that was provided in the request. The viewer bumps this for us.
client.SendCachedTextureResponse(sp, serial, cachedTextureResponse);
}
#endregion
public void WriteBakedTexturesReport(IScenePresence sp, ReportOutputAction outputAction)
{
outputAction("For {0} in {1}", sp.Name, m_scene.RegionInfo.RegionName);
outputAction(BAKED_TEXTURES_REPORT_FORMAT, "Bake Type", "UUID");
Dictionary<BakeType, Primitive.TextureEntryFace> bakedTextures = GetBakedTextureFaces(sp.UUID);
foreach (BakeType bt in bakedTextures.Keys)
{
string rawTextureID;
if (bakedTextures[bt] == null)
{
rawTextureID = "not set";
}
else
{
rawTextureID = bakedTextures[bt].TextureID.ToString();
if (m_scene.AssetService.Get(rawTextureID) == null)
rawTextureID += " (not found)";
else
rawTextureID += " (uploaded)";
}
outputAction(BAKED_TEXTURES_REPORT_FORMAT, bt, rawTextureID);
}
bool bakedTextureValid = m_scene.AvatarFactory.ValidateBakedTextureCache(sp);
outputAction("{0} baked appearance texture is {1}", sp.Name, bakedTextureValid ? "OK" : "incomplete");
}
}
}
| |
// The MIT License (MIT)
//
// Copyright (c) 2014-2016, Institute for Software & Systems Engineering
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
namespace SafetySharp.Runtime.Serialization
{
using System.IO;
using System.Linq;
using System.Text;
using Analysis;
using Modeling;
using Utilities;
/// <summary>
/// Serializes a <see cref="RuntimeModel" /> instance into a <see cref="Stream" />.
/// </summary>
internal class RuntimeModelSerializer
{
private readonly object _syncObject = new object();
private OpenSerializationDelegate _deserializer;
private byte[] _serializedModel;
private StateVectorLayout _stateVector;
#region Serialization
/// <summary>
/// Serializes the <paramref name="model" /> and the <paramref name="formulas" />.
/// </summary>
/// <param name="model">The model that should be serialized.</param>
/// <param name="formulas">The formulas that should be serialized.</param>
public void Serialize(ModelBase model, params Formula[] formulas)
{
Requires.NotNull(model, nameof(model));
Requires.NotNull(formulas, nameof(formulas));
using (var buffer = new MemoryStream())
using (var writer = new BinaryWriter(buffer, Encoding.UTF8, leaveOpen: true))
{
SerializeModel(writer, model, formulas);
lock (_syncObject)
_serializedModel = buffer.ToArray();
}
}
/// <summary>
/// Returns the serialized <paramref name="model" /> and the <paramref name="formulas" />.
/// </summary>
/// <param name="model">The model that should be serialized.</param>
/// <param name="formulas">The formulas that should be serialized.</param>
public static byte[] Save(ModelBase model, params Formula[] formulas)
{
var serializer = new RuntimeModelSerializer();
serializer.Serialize(model, formulas);
lock (serializer._syncObject)
return serializer._serializedModel;
}
/// <summary>
/// Serializes the <paramref name="model" />.
/// </summary>
private unsafe void SerializeModel(BinaryWriter writer, ModelBase model, Formula[] formulas)
{
// Collect all objects contained in the model
var objectTable = CreateObjectTable(model, formulas);
// Prepare the serialization of the model's initial state
lock (_syncObject)
{
_stateVector = SerializationRegistry.Default.GetStateVectorLayout(model, objectTable, SerializationMode.Full);
_deserializer = null;
}
var stateVectorSize = _stateVector.SizeInBytes;
var serializer = _stateVector.CreateSerializer(objectTable);
// Serialize the object table
SerializeObjectTable(objectTable, writer);
// Serialize the object identifier of the model itself and the formulas
writer.Write(objectTable.GetObjectIdentifier(model));
writer.Write(formulas.Length);
foreach (var formula in formulas)
writer.Write(objectTable.GetObjectIdentifier(formula));
// Serialize the initial state
var serializedState = stackalloc byte[stateVectorSize];
serializer(serializedState);
// Copy the serialized state to the stream
writer.Write(stateVectorSize);
for (var i = 0; i < stateVectorSize; ++i)
writer.Write(serializedState[i]);
}
/// <summary>
/// Creates the object table for the <paramref name="model" /> and <paramref name="formulas" />.
/// </summary>
private static ObjectTable CreateObjectTable(ModelBase model, Formula[] formulas)
{
var objects = model.Roots.Cast<object>().Concat(formulas).Concat(new[] { model });
return new ObjectTable(SerializationRegistry.Default.GetReferencedObjects(objects.ToArray(), SerializationMode.Full));
}
/// <summary>
/// Serializes the <paramref name="objectTable" /> using the <paramref name="writer" />.
/// </summary>
/// <param name="objectTable">The object table that should be serialized.</param>
/// <param name="writer">The writer the serialized information should be written to.</param>
private static void SerializeObjectTable(ObjectTable objectTable, BinaryWriter writer)
{
Requires.NotNull(objectTable, nameof(objectTable));
Requires.NotNull(writer, nameof(writer));
// Serialize the objects contained in the table
writer.Write(objectTable.Count);
foreach (var obj in objectTable)
{
var serializerIndex = SerializationRegistry.Default.GetSerializerIndex(obj);
writer.Write(serializerIndex);
SerializationRegistry.Default.GetSerializer(serializerIndex).SerializeType(obj, writer);
}
}
#endregion
#region Deserialization
/// <summary>
/// Loads a <see cref="SerializedRuntimeModel" /> from the <paramref name="serializedModel" />.
/// </summary>
/// <param name="serializedModel">The serialized model that should be loaded.</param>
public static SerializedRuntimeModel LoadSerializedData(byte[] serializedModel)
{
Requires.NotNull(serializedModel, nameof(serializedModel));
var serializer = new RuntimeModelSerializer { _serializedModel = serializedModel };
return serializer.LoadSerializedData();
}
/// <summary>
/// Loads a <see cref="SerializedRuntimeModel" /> instance.
/// </summary>
public SerializedRuntimeModel LoadSerializedData()
{
Requires.That(_serializedModel != null, "No model is loaded that could be serialized.");
using (var reader = new BinaryReader(new MemoryStream(_serializedModel), Encoding.UTF8, leaveOpen: true))
return DeserializeModel(_serializedModel, reader);
}
/// <summary>
/// Loads a <see cref="RuntimeModel" /> instance.
/// </summary>
public RuntimeModel Load()
{
return new RuntimeModel(LoadSerializedData());
}
/// <summary>
/// Deserializes a <see cref="RuntimeModel" /> from the <paramref name="reader" />.
/// </summary>
private unsafe SerializedRuntimeModel DeserializeModel(byte[] buffer, BinaryReader reader)
{
// Deserialize the object table
var objectTable = DeserializeObjectTable(reader);
// Deserialize the object identifiers of the model itself and the root formulas
var model = (ModelBase)objectTable.GetObject(reader.ReadUInt16());
var formulas = new Formula[reader.ReadInt32()];
for (var i = 0; i < formulas.Length; ++i)
formulas[i] = (Formula)objectTable.GetObject(reader.ReadUInt16());
// Copy the serialized initial state from the stream
var stateVectorSize = reader.ReadInt32();
var serializedState = stackalloc byte[stateVectorSize];
for (var i = 0; i < stateVectorSize; ++i)
serializedState[i] = reader.ReadByte();
// Deserialize the model's initial state
OpenSerializationDelegate deserializer;
lock (_syncObject)
{
if (_stateVector == null)
_stateVector = SerializationRegistry.Default.GetStateVectorLayout(model, objectTable, SerializationMode.Full);
if (_deserializer == null)
_deserializer = _stateVector.CreateDeserializer();
deserializer = _deserializer;
}
deserializer(objectTable, serializedState);
// We instantiate the runtime type for each component and replace the original component
// instance with the new runtime instance; we also replace all of the component's fault effects
// with that instance and deserialize the initial state again. Afterwards, we have completely
// replaced the original instance with its runtime instance, taking over all serialized data
objectTable.SubstituteRuntimeInstances();
deserializer(objectTable, serializedState);
// We substitute the dummy delegate objects with the actual instances obtained from the DelegateMetadata instances
objectTable.SubstituteDelegates();
deserializer(objectTable, serializedState);
// Return the serialized model data
return new SerializedRuntimeModel(model, buffer, objectTable, formulas);
}
/// <summary>
/// Deserializes the <see cref="ObjectTable" /> from the <paramref name="reader" />.
/// </summary>
/// <param name="reader">The reader the objects should be deserialized from.</param>
private static ObjectTable DeserializeObjectTable(BinaryReader reader)
{
Requires.NotNull(reader, nameof(reader));
// Deserialize the objects contained in the table
var objects = new object[reader.ReadInt32()];
for (var i = 0; i < objects.Length; ++i)
{
var serializer = SerializationRegistry.Default.GetSerializer(reader.ReadInt32());
objects[i] = serializer.InstantiateType(reader);
}
return new ObjectTable(objects);
}
#endregion
}
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for Additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
namespace NStorage.FileSystem
{
using System;
using System.IO;
using NStorage.Utility;
using System.Text;
/**
* Represents an Ole10Native record which is wrapped around certain binary
* files being embedded in OLE2 documents.
*
* @author Rainer Schwarze
*/
public class Ole10Native
{
public static String OLE10_NATIVE = "\x0001Ole10Native";
protected static String ISO1 = "ISO-8859-1";
// (the fields as they appear in the raw record:)
private int totalSize; // 4 bytes, total size of record not including this field
private short flags1 = 2; // 2 bytes, unknown, mostly [02 00]
private String label; // ASCIIZ, stored in this field without the terminating zero
private String fileName; // ASCIIZ, stored in this field without the terminating zero
private short flags2 = 0; // 2 bytes, unknown, mostly [00 00]
private short unknown1 = 3; // see below
private String command; // ASCIIZ, stored in this field without the terminating zero
private byte[] dataBuffer; // varying size, the actual native data
private short flags3 = 0; // some final flags? or zero terminators?, sometimes not there
/**
* the field encoding mode - merely a try-and-error guess ...
**/
private enum EncodingMode {
/**
* the data is stored in parsed format - including label, command, etc.
*/
parsed,
/**
* the data is stored raw after the length field
*/
unparsed,
/**
* the data is stored raw after the length field and the flags1 field
*/
compact
}
private EncodingMode mode;
/// <summary>
/// Creates an instance of this class from an embedded OLE Object. The OLE Object is expected
/// to include a stream "{01}Ole10Native" which Contains the actual
/// data relevant for this class.
/// </summary>
/// <param name="poifs">poifs POI Filesystem object</param>
/// <returns>Returns an instance of this class</returns>
public static Ole10Native CreateFromEmbeddedOleObject(POIFSFileSystem poifs)
{
return CreateFromEmbeddedOleObject(poifs.Root);
}
/// <summary>
/// Creates an instance of this class from an embedded OLE Object. The OLE Object is expected
/// to include a stream "{01}Ole10Native" which contains the actual
/// data relevant for this class.
/// </summary>
/// <param name="directory">directory POI Filesystem object</param>
/// <returns>Returns an instance of this class</returns>
public static Ole10Native CreateFromEmbeddedOleObject(DirectoryNode directory)
{
DocumentEntry nativeEntry =
(DocumentEntry)directory.GetEntry(OLE10_NATIVE);
byte[] data = new byte[nativeEntry.Size];
directory.CreateDocumentInputStream(nativeEntry).Read(data);
return new Ole10Native(data, 0);
}
/**
* Creates an instance and fills the fields based on ... the fields
*/
public Ole10Native(String label, String filename, String command, byte[] data)
{
Label=(label);
FileName=(filename);
Command=(command);
DataBuffer=(data);
mode = EncodingMode.parsed;
}
/**
* Creates an instance and Fills the fields based on the data in the given buffer.
*
* @param data The buffer Containing the Ole10Native record
* @param offset The start offset of the record in the buffer
* @param plain as of POI 3.11 this parameter is ignored
* @throws Ole10NativeException on invalid or unexcepted data format
*/
[Obsolete("parameter plain is ignored, use {@link #Ole10Native(byte[],int)}")]
public Ole10Native(byte[] data, int offset, bool plain)
: this(data, offset)
{
}
/**
* Creates an instance and Fills the fields based on the data in the given buffer.
*
* @param data The buffer Containing the Ole10Native record
* @param offset The start offset of the record in the buffer
* @throws Ole10NativeException on invalid or unexcepted data format
*/
public Ole10Native(byte[] data, int offset)
{
int ofs = offset; // current offset, Initialized to start
if (data.Length < offset + 2)
{
throw new Ole10NativeException("data is too small");
}
totalSize = LittleEndian.GetInt(data, ofs);
ofs += LittleEndianConsts.INT_SIZE;
mode = EncodingMode.unparsed;
if (LittleEndian.GetShort(data, ofs) == 2)
{
// some files like equations don't have a valid filename,
// but somehow encode the formula right away in the ole10 header
if (char.IsControl((char)data[ofs + LittleEndianConsts.SHORT_SIZE]))
{
mode = EncodingMode.compact;
}
else
{
mode = EncodingMode.parsed;
}
}
int dataSize = 0;
switch (mode)
{
case EncodingMode.parsed:
flags1 = LittleEndian.GetShort(data, ofs);
ofs += LittleEndianConsts.SHORT_SIZE;
int len = GetStringLength(data, ofs);
label = StringUtil.GetFromCompressedUnicode(data, ofs, len - 1);
ofs += len;
len = GetStringLength(data, ofs);
fileName = StringUtil.GetFromCompressedUnicode(data, ofs, len - 1);
ofs += len;
flags2 = LittleEndian.GetShort(data, ofs);
ofs += LittleEndianConsts.SHORT_SIZE;
unknown1 = LittleEndian.GetShort(data, ofs);
ofs += LittleEndianConsts.SHORT_SIZE;
len = LittleEndian.GetInt(data, ofs);
ofs += LittleEndianConsts.INT_SIZE;
command = StringUtil.GetFromCompressedUnicode(data, ofs, len - 1);
ofs += len;
if (totalSize < ofs)
{
throw new Ole10NativeException("Invalid Ole10Native");
}
dataSize = LittleEndian.GetInt(data, ofs);
ofs += LittleEndianConsts.INT_SIZE;
if (dataSize < 0 || totalSize - (ofs - LittleEndianConsts.INT_SIZE) < dataSize)
{
throw new Ole10NativeException("Invalid Ole10Native");
}
break;
case EncodingMode.compact:
flags1 = LittleEndian.GetShort(data, ofs);
ofs += LittleEndianConsts.SHORT_SIZE;
dataSize = totalSize - LittleEndianConsts.SHORT_SIZE;
break;
case EncodingMode.unparsed:
dataSize = totalSize;
break;
}
dataBuffer = new byte[dataSize];
Array.Copy(data, ofs, dataBuffer, 0, dataSize);
ofs += dataSize;
}
/*
* Helper - determine length of zero terminated string (ASCIIZ).
*/
private static int GetStringLength(byte[] data, int ofs)
{
int len = 0;
while (len + ofs < data.Length && data[ofs + len] != 0)
{
len++;
}
len++;
return len;
}
/**
* Returns the value of the totalSize field - the total length of the structure
* is totalSize + 4 (value of this field + size of this field).
*
* @return the totalSize
*/
public int TotalSize
{
get
{
return totalSize;
}
}
/**
* Returns flags1 - currently unknown - usually 0x0002.
*
* @return the flags1
*/
public short Flags1
{
get
{
return flags1;
}
set { flags1 = value; }
}
/**
* Returns the label field - usually the name of the file (without directory) but
* probably may be any name specified during packaging/embedding the data.
*
* @return the label
*/
public String Label
{
get
{
return label;
}
set
{
label = value;
}
}
/**
* Returns the fileName field - usually the name of the file being embedded
* including the full path.
*
* @return the fileName
*/
public String FileName
{
get
{
return fileName;
}
set
{
fileName = value;
}
}
/**
* Returns flags2 - currently unknown - mostly 0x0000.
*
* @return the flags2
*/
public short Flags2
{
get
{
return flags2;
}
set
{
flags2 = value;
}
}
/**
* Returns unknown1 field - currently unknown.
*
* @return the unknown1
*/
public short Unknown1
{
get
{
return unknown1;
}
set
{
unknown1 = value;
}
}
/**
* Returns the command field - usually the name of the file being embedded
* including the full path, may be a command specified during embedding the file.
*
* @return the command
*/
public String Command
{
get
{
return command;
}
set { command = value; }
}
/**
* Returns the size of the embedded file. If the size is 0 (zero), no data has been
* embedded. To be sure, that no data has been embedded, check whether
* {@link #getDataBuffer()} returns <code>null</code>.
*
* @return the dataSize
*/
public int DataSize
{
get{return dataBuffer.Length;}
}
/**
* Returns the buffer Containing the embedded file's data, or <code>null</code>
* if no data was embedded. Note that an embedding may provide information about
* the data, but the actual data is not included. (So label, filename etc. are
* available, but this method returns <code>null</code>.)
*
* @return the dataBuffer
*/
public byte[] DataBuffer
{
get{return dataBuffer;}
set { dataBuffer = value; }
}
/**
* Returns the flags3 - currently unknown.
*
* @return the flags3
*/
public short Flags3
{
get
{
return flags3;
}
set
{
flags3 = value;
}
}
/**
* Have the contents printer out into an OutputStream, used when writing a
* file back out to disk (Normally, atom classes will keep their bytes
* around, but non atom classes will just request the bytes from their
* children, then chuck on their header and return)
*/
public void WriteOut(Stream out1)
{
byte[] intbuf = new byte[LittleEndianConsts.INT_SIZE];
byte[] shortbuf = new byte[LittleEndianConsts.SHORT_SIZE];
byte[] zerobuf = { 0, 0, 0, 0 };
LittleEndianOutputStream leosOut = new LittleEndianOutputStream(out1);
switch (mode)
{
case EncodingMode.parsed:
{
MemoryStream bos = new MemoryStream();
LittleEndianOutputStream leos = new LittleEndianOutputStream(bos);
// total size, will be determined later ..
leos.WriteShort(Flags1);
leos.Write(Encoding.GetEncoding(ISO1).GetBytes(Label));
leos.WriteByte(0);
leos.Write(Encoding.GetEncoding(ISO1).GetBytes(FileName));
leos.WriteByte(0);
leos.WriteShort(Flags2);
leos.WriteShort(Unknown1);
leos.WriteInt(Command.Length + 1);
leos.Write(Encoding.GetEncoding(ISO1).GetBytes(Command));
leos.WriteByte(0);
leos.WriteInt(DataSize);
leos.Write(DataBuffer);
leos.WriteShort(Flags3);
//leos.Close(); // satisfy compiler ...
leosOut.WriteInt((int)bos.Length); // total size
bos.WriteTo(out1);
break;
}
case EncodingMode.compact:
leosOut.WriteInt(DataSize + LittleEndianConsts.SHORT_SIZE);
leosOut.WriteShort(Flags1);
out1.Write(DataBuffer, 0, DataBuffer.Length);
break;
default:
case EncodingMode.unparsed:
leosOut.WriteInt(DataSize);
out1.Write(DataBuffer, 0, DataBuffer.Length);
break;
}
}
}
}
| |
using System;
using ICSharpCode.SharpZipLib.Checksum;
namespace ICSharpCode.SharpZipLib.Zip.Compression
{
/// <summary>
/// Strategies for deflater
/// </summary>
public enum DeflateStrategy
{
/// <summary>
/// The default strategy
/// </summary>
Default = 0,
/// <summary>
/// This strategy will only allow longer string repetitions. It is
/// useful for random data with a small character set.
/// </summary>
Filtered = 1,
/// <summary>
/// This strategy will not look for string repetitions at all. It
/// only encodes with Huffman trees (which means, that more common
/// characters get a smaller encoding.
/// </summary>
HuffmanOnly = 2
}
// DEFLATE ALGORITHM:
//
// The uncompressed stream is inserted into the window array. When
// the window array is full the first half is thrown away and the
// second half is copied to the beginning.
//
// The head array is a hash table. Three characters build a hash value
// and they the value points to the corresponding index in window of
// the last string with this hash. The prev array implements a
// linked list of matches with the same hash: prev[index & WMASK] points
// to the previous index with the same hash.
//
/// <summary>
/// Low level compression engine for deflate algorithm which uses a 32K sliding window
/// with secondary compression from Huffman/Shannon-Fano codes.
/// </summary>
public class DeflaterEngine
{
#region Constants
const int TooFar = 4096;
#endregion
#region Constructors
/// <summary>
/// Construct instance with pending buffer
/// </summary>
/// <param name="pending">
/// Pending buffer to use
/// </param>>
public DeflaterEngine(DeflaterPending pending)
{
this.pending = pending;
huffman = new DeflaterHuffman(pending);
adler = new Adler32();
window = new byte[2 * DeflaterConstants.WSIZE];
head = new short[DeflaterConstants.HASH_SIZE];
prev = new short[DeflaterConstants.WSIZE];
// We start at index 1, to avoid an implementation deficiency, that
// we cannot build a repeat pattern at index 0.
blockStart = strstart = 1;
}
#endregion
/// <summary>
/// Deflate drives actual compression of data
/// </summary>
/// <param name="flush">True to flush input buffers</param>
/// <param name="finish">Finish deflation with the current input.</param>
/// <returns>Returns true if progress has been made.</returns>
public bool Deflate(bool flush, bool finish)
{
bool progress;
do {
FillWindow();
bool canFlush = flush && (inputOff == inputEnd);
#if DebugDeflation
if (DeflaterConstants.DEBUGGING) {
Console.WriteLine("window: [" + blockStart + "," + strstart + ","
+ lookahead + "], " + compressionFunction + "," + canFlush);
}
#endif
switch (compressionFunction) {
case DeflaterConstants.DEFLATE_STORED:
progress = DeflateStored(canFlush, finish);
break;
case DeflaterConstants.DEFLATE_FAST:
progress = DeflateFast(canFlush, finish);
break;
case DeflaterConstants.DEFLATE_SLOW:
progress = DeflateSlow(canFlush, finish);
break;
default:
throw new InvalidOperationException("unknown compressionFunction");
}
} while (pending.IsFlushed && progress); // repeat while we have no pending output and progress was made
return progress;
}
/// <summary>
/// Sets input data to be deflated. Should only be called when <code>NeedsInput()</code>
/// returns true
/// </summary>
/// <param name="buffer">The buffer containing input data.</param>
/// <param name="offset">The offset of the first byte of data.</param>
/// <param name="count">The number of bytes of data to use as input.</param>
public void SetInput(byte[] buffer, int offset, int count)
{
if (buffer == null) {
throw new ArgumentNullException(nameof(buffer));
}
if (offset < 0) {
throw new ArgumentOutOfRangeException(nameof(offset));
}
if (count < 0) {
throw new ArgumentOutOfRangeException(nameof(count));
}
if (inputOff < inputEnd) {
throw new InvalidOperationException("Old input was not completely processed");
}
int end = offset + count;
/* We want to throw an ArrayIndexOutOfBoundsException early. The
* check is very tricky: it also handles integer wrap around.
*/
if ((offset > end) || (end > buffer.Length)) {
throw new ArgumentOutOfRangeException(nameof(count));
}
inputBuf = buffer;
inputOff = offset;
inputEnd = end;
}
/// <summary>
/// Determines if more <see cref="SetInput">input</see> is needed.
/// </summary>
/// <returns>Return true if input is needed via <see cref="SetInput">SetInput</see></returns>
public bool NeedsInput()
{
return (inputEnd == inputOff);
}
/// <summary>
/// Set compression dictionary
/// </summary>
/// <param name="buffer">The buffer containing the dictionary data</param>
/// <param name="offset">The offset in the buffer for the first byte of data</param>
/// <param name="length">The length of the dictionary data.</param>
public void SetDictionary(byte[] buffer, int offset, int length)
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && (strstart != 1) )
{
throw new InvalidOperationException("strstart not 1");
}
#endif
adler.Update(buffer, offset, length);
if (length < DeflaterConstants.MIN_MATCH) {
return;
}
if (length > DeflaterConstants.MAX_DIST) {
offset += length - DeflaterConstants.MAX_DIST;
length = DeflaterConstants.MAX_DIST;
}
System.Array.Copy(buffer, offset, window, strstart, length);
UpdateHash();
--length;
while (--length > 0) {
InsertString();
strstart++;
}
strstart += 2;
blockStart = strstart;
}
/// <summary>
/// Reset internal state
/// </summary>
public void Reset()
{
huffman.Reset();
adler.Reset();
blockStart = strstart = 1;
lookahead = 0;
totalIn = 0;
prevAvailable = false;
matchLen = DeflaterConstants.MIN_MATCH - 1;
for (int i = 0; i < DeflaterConstants.HASH_SIZE; i++) {
head[i] = 0;
}
for (int i = 0; i < DeflaterConstants.WSIZE; i++) {
prev[i] = 0;
}
}
/// <summary>
/// Reset Adler checksum
/// </summary>
public void ResetAdler()
{
adler.Reset();
}
/// <summary>
/// Get current value of Adler checksum
/// </summary>
public int Adler {
get {
return unchecked((int)adler.Value);
}
}
/// <summary>
/// Total data processed
/// </summary>
public long TotalIn {
get {
return totalIn;
}
}
/// <summary>
/// Get/set the <see cref="DeflateStrategy">deflate strategy</see>
/// </summary>
public DeflateStrategy Strategy {
get {
return strategy;
}
set {
strategy = value;
}
}
/// <summary>
/// Set the deflate level (0-9)
/// </summary>
/// <param name="level">The value to set the level to.</param>
public void SetLevel(int level)
{
if ((level < 0) || (level > 9)) {
throw new ArgumentOutOfRangeException(nameof(level));
}
goodLength = DeflaterConstants.GOOD_LENGTH[level];
max_lazy = DeflaterConstants.MAX_LAZY[level];
niceLength = DeflaterConstants.NICE_LENGTH[level];
max_chain = DeflaterConstants.MAX_CHAIN[level];
if (DeflaterConstants.COMPR_FUNC[level] != compressionFunction) {
#if DebugDeflation
if (DeflaterConstants.DEBUGGING) {
Console.WriteLine("Change from " + compressionFunction + " to "
+ DeflaterConstants.COMPR_FUNC[level]);
}
#endif
switch (compressionFunction) {
case DeflaterConstants.DEFLATE_STORED:
if (strstart > blockStart) {
huffman.FlushStoredBlock(window, blockStart,
strstart - blockStart, false);
blockStart = strstart;
}
UpdateHash();
break;
case DeflaterConstants.DEFLATE_FAST:
if (strstart > blockStart) {
huffman.FlushBlock(window, blockStart, strstart - blockStart,
false);
blockStart = strstart;
}
break;
case DeflaterConstants.DEFLATE_SLOW:
if (prevAvailable) {
huffman.TallyLit(window[strstart - 1] & 0xff);
}
if (strstart > blockStart) {
huffman.FlushBlock(window, blockStart, strstart - blockStart, false);
blockStart = strstart;
}
prevAvailable = false;
matchLen = DeflaterConstants.MIN_MATCH - 1;
break;
}
compressionFunction = DeflaterConstants.COMPR_FUNC[level];
}
}
/// <summary>
/// Fill the window
/// </summary>
public void FillWindow()
{
/* If the window is almost full and there is insufficient lookahead,
* move the upper half to the lower one to make room in the upper half.
*/
if (strstart >= DeflaterConstants.WSIZE + DeflaterConstants.MAX_DIST) {
SlideWindow();
}
/* If there is not enough lookahead, but still some input left,
* read in the input
*/
if (lookahead < DeflaterConstants.MIN_LOOKAHEAD && inputOff < inputEnd) {
int more = 2 * DeflaterConstants.WSIZE - lookahead - strstart;
if (more > inputEnd - inputOff) {
more = inputEnd - inputOff;
}
System.Array.Copy(inputBuf, inputOff, window, strstart + lookahead, more);
adler.Update(inputBuf, inputOff, more);
inputOff += more;
totalIn += more;
lookahead += more;
}
if (lookahead >= DeflaterConstants.MIN_MATCH) {
UpdateHash();
}
}
void UpdateHash()
{
/*
if (DEBUGGING) {
Console.WriteLine("updateHash: "+strstart);
}
*/
ins_h = (window[strstart] << DeflaterConstants.HASH_SHIFT) ^ window[strstart + 1];
}
/// <summary>
/// Inserts the current string in the head hash and returns the previous
/// value for this hash.
/// </summary>
/// <returns>The previous hash value</returns>
int InsertString()
{
short match;
int hash = ((ins_h << DeflaterConstants.HASH_SHIFT) ^ window[strstart + (DeflaterConstants.MIN_MATCH - 1)]) & DeflaterConstants.HASH_MASK;
#if DebugDeflation
if (DeflaterConstants.DEBUGGING)
{
if (hash != (((window[strstart] << (2*HASH_SHIFT)) ^
(window[strstart + 1] << HASH_SHIFT) ^
(window[strstart + 2])) & HASH_MASK)) {
throw new SharpZipBaseException("hash inconsistent: " + hash + "/"
+window[strstart] + ","
+window[strstart + 1] + ","
+window[strstart + 2] + "," + HASH_SHIFT);
}
}
#endif
prev[strstart & DeflaterConstants.WMASK] = match = head[hash];
head[hash] = unchecked((short)strstart);
ins_h = hash;
return match & 0xffff;
}
void SlideWindow()
{
Array.Copy(window, DeflaterConstants.WSIZE, window, 0, DeflaterConstants.WSIZE);
matchStart -= DeflaterConstants.WSIZE;
strstart -= DeflaterConstants.WSIZE;
blockStart -= DeflaterConstants.WSIZE;
// Slide the hash table (could be avoided with 32 bit values
// at the expense of memory usage).
for (int i = 0; i < DeflaterConstants.HASH_SIZE; ++i) {
int m = head[i] & 0xffff;
head[i] = (short)(m >= DeflaterConstants.WSIZE ? (m - DeflaterConstants.WSIZE) : 0);
}
// Slide the prev table.
for (int i = 0; i < DeflaterConstants.WSIZE; i++) {
int m = prev[i] & 0xffff;
prev[i] = (short)(m >= DeflaterConstants.WSIZE ? (m - DeflaterConstants.WSIZE) : 0);
}
}
/// <summary>
/// Find the best (longest) string in the window matching the
/// string starting at strstart.
///
/// Preconditions:
/// <code>
/// strstart + DeflaterConstants.MAX_MATCH <= window.length.</code>
/// </summary>
/// <param name="curMatch"></param>
/// <returns>True if a match greater than the minimum length is found</returns>
bool FindLongestMatch( int curMatch )
{
int match;
int scan = strstart;
// scanMax is the highest position that we can look at
int scanMax = scan + Math.Min( DeflaterConstants.MAX_MATCH, lookahead ) - 1;
int limit = Math.Max( scan - DeflaterConstants.MAX_DIST, 0 );
byte[] window = this.window;
short[] prev = this.prev;
int chainLength = this.max_chain;
int niceLength = Math.Min( this.niceLength, lookahead );
matchLen = Math.Max( matchLen, DeflaterConstants.MIN_MATCH - 1 );
if (scan + matchLen > scanMax) return false;
byte scan_end1 = window[scan + matchLen - 1];
byte scan_end = window[scan + matchLen];
// Do not waste too much time if we already have a good match:
if (matchLen >= this.goodLength) chainLength >>= 2;
do
{
match = curMatch;
scan = strstart;
if (window[match + matchLen] != scan_end
|| window[match + matchLen - 1] != scan_end1
|| window[match] != window[scan]
|| window[++match] != window[++scan])
{
continue;
}
// scan is set to strstart+1 and the comparison passed, so
// scanMax - scan is the maximum number of bytes we can compare.
// below we compare 8 bytes at a time, so first we compare
// (scanMax - scan) % 8 bytes, so the remainder is a multiple of 8
switch( (scanMax - scan) % 8 )
{
case 1: if (window[++scan] == window[++match]) break;
break;
case 2: if (window[++scan] == window[++match]
&& window[++scan] == window[++match]) break;
break;
case 3: if (window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]) break;
break;
case 4: if (window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]) break;
break;
case 5: if (window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]) break;
break;
case 6: if (window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]) break;
break;
case 7: if (window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]) break;
break;
}
if (window[scan] == window[match])
{
/* We check for insufficient lookahead only every 8th comparison;
* the 256th check will be made at strstart + 258 unless lookahead is
* exhausted first.
*/
do
{
if (scan == scanMax)
{
++scan; // advance to first position not matched
++match;
break;
}
}
while (window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]);
}
if (scan - strstart > matchLen)
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && (ins_h == 0) )
Console.Error.WriteLine("Found match: " + curMatch + "-" + (scan - strstart));
#endif
matchStart = curMatch;
matchLen = scan - strstart;
if (matchLen >= niceLength)
break;
scan_end1 = window[scan - 1];
scan_end = window[scan];
}
} while ((curMatch = (prev[curMatch & DeflaterConstants.WMASK] & 0xffff)) > limit && 0 != --chainLength );
return matchLen >= DeflaterConstants.MIN_MATCH;
}
bool DeflateStored(bool flush, bool finish)
{
if (!flush && (lookahead == 0)) {
return false;
}
strstart += lookahead;
lookahead = 0;
int storedLength = strstart - blockStart;
if ((storedLength >= DeflaterConstants.MAX_BLOCK_SIZE) || // Block is full
(blockStart < DeflaterConstants.WSIZE && storedLength >= DeflaterConstants.MAX_DIST) || // Block may move out of window
flush) {
bool lastBlock = finish;
if (storedLength > DeflaterConstants.MAX_BLOCK_SIZE) {
storedLength = DeflaterConstants.MAX_BLOCK_SIZE;
lastBlock = false;
}
#if DebugDeflation
if (DeflaterConstants.DEBUGGING)
{
Console.WriteLine("storedBlock[" + storedLength + "," + lastBlock + "]");
}
#endif
huffman.FlushStoredBlock(window, blockStart, storedLength, lastBlock);
blockStart += storedLength;
return !lastBlock;
}
return true;
}
bool DeflateFast(bool flush, bool finish)
{
if (lookahead < DeflaterConstants.MIN_LOOKAHEAD && !flush) {
return false;
}
while (lookahead >= DeflaterConstants.MIN_LOOKAHEAD || flush) {
if (lookahead == 0) {
// We are flushing everything
huffman.FlushBlock(window, blockStart, strstart - blockStart, finish);
blockStart = strstart;
return false;
}
if (strstart > 2 * DeflaterConstants.WSIZE - DeflaterConstants.MIN_LOOKAHEAD) {
/* slide window, as FindLongestMatch needs this.
* This should only happen when flushing and the window
* is almost full.
*/
SlideWindow();
}
int hashHead;
if (lookahead >= DeflaterConstants.MIN_MATCH &&
(hashHead = InsertString()) != 0 &&
strategy != DeflateStrategy.HuffmanOnly &&
strstart - hashHead <= DeflaterConstants.MAX_DIST &&
FindLongestMatch(hashHead)) {
// longestMatch sets matchStart and matchLen
#if DebugDeflation
if (DeflaterConstants.DEBUGGING)
{
for (int i = 0 ; i < matchLen; i++) {
if (window[strstart + i] != window[matchStart + i]) {
throw new SharpZipBaseException("Match failure");
}
}
}
#endif
bool full = huffman.TallyDist(strstart - matchStart, matchLen);
lookahead -= matchLen;
if (matchLen <= max_lazy && lookahead >= DeflaterConstants.MIN_MATCH) {
while (--matchLen > 0) {
++strstart;
InsertString();
}
++strstart;
} else {
strstart += matchLen;
if (lookahead >= DeflaterConstants.MIN_MATCH - 1) {
UpdateHash();
}
}
matchLen = DeflaterConstants.MIN_MATCH - 1;
if (!full) {
continue;
}
} else {
// No match found
huffman.TallyLit(window[strstart] & 0xff);
++strstart;
--lookahead;
}
if (huffman.IsFull()) {
bool lastBlock = finish && (lookahead == 0);
huffman.FlushBlock(window, blockStart, strstart - blockStart, lastBlock);
blockStart = strstart;
return !lastBlock;
}
}
return true;
}
bool DeflateSlow(bool flush, bool finish)
{
if (lookahead < DeflaterConstants.MIN_LOOKAHEAD && !flush) {
return false;
}
while (lookahead >= DeflaterConstants.MIN_LOOKAHEAD || flush) {
if (lookahead == 0) {
if (prevAvailable) {
huffman.TallyLit(window[strstart - 1] & 0xff);
}
prevAvailable = false;
// We are flushing everything
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && !flush)
{
throw new SharpZipBaseException("Not flushing, but no lookahead");
}
#endif
huffman.FlushBlock(window, blockStart, strstart - blockStart,
finish);
blockStart = strstart;
return false;
}
if (strstart >= 2 * DeflaterConstants.WSIZE - DeflaterConstants.MIN_LOOKAHEAD) {
/* slide window, as FindLongestMatch needs this.
* This should only happen when flushing and the window
* is almost full.
*/
SlideWindow();
}
int prevMatch = matchStart;
int prevLen = matchLen;
if (lookahead >= DeflaterConstants.MIN_MATCH) {
int hashHead = InsertString();
if (strategy != DeflateStrategy.HuffmanOnly &&
hashHead != 0 &&
strstart - hashHead <= DeflaterConstants.MAX_DIST &&
FindLongestMatch(hashHead)) {
// longestMatch sets matchStart and matchLen
// Discard match if too small and too far away
if (matchLen <= 5 && (strategy == DeflateStrategy.Filtered || (matchLen == DeflaterConstants.MIN_MATCH && strstart - matchStart > TooFar))) {
matchLen = DeflaterConstants.MIN_MATCH - 1;
}
}
}
// previous match was better
if ((prevLen >= DeflaterConstants.MIN_MATCH) && (matchLen <= prevLen)) {
#if DebugDeflation
if (DeflaterConstants.DEBUGGING)
{
for (int i = 0 ; i < matchLen; i++) {
if (window[strstart-1+i] != window[prevMatch + i])
throw new SharpZipBaseException();
}
}
#endif
huffman.TallyDist(strstart - 1 - prevMatch, prevLen);
prevLen -= 2;
do {
strstart++;
lookahead--;
if (lookahead >= DeflaterConstants.MIN_MATCH) {
InsertString();
}
} while (--prevLen > 0);
strstart++;
lookahead--;
prevAvailable = false;
matchLen = DeflaterConstants.MIN_MATCH - 1;
} else {
if (prevAvailable) {
huffman.TallyLit(window[strstart - 1] & 0xff);
}
prevAvailable = true;
strstart++;
lookahead--;
}
if (huffman.IsFull()) {
int len = strstart - blockStart;
if (prevAvailable) {
len--;
}
bool lastBlock = (finish && (lookahead == 0) && !prevAvailable);
huffman.FlushBlock(window, blockStart, len, lastBlock);
blockStart += len;
return !lastBlock;
}
}
return true;
}
#region Instance Fields
// Hash index of string to be inserted
int ins_h;
/// <summary>
/// Hashtable, hashing three characters to an index for window, so
/// that window[index]..window[index+2] have this hash code.
/// Note that the array should really be unsigned short, so you need
/// to and the values with 0xffff.
/// </summary>
short[] head;
/// <summary>
/// <code>prev[index & WMASK]</code> points to the previous index that has the
/// same hash code as the string starting at index. This way
/// entries with the same hash code are in a linked list.
/// Note that the array should really be unsigned short, so you need
/// to and the values with 0xffff.
/// </summary>
short[] prev;
int matchStart;
// Length of best match
int matchLen;
// Set if previous match exists
bool prevAvailable;
int blockStart;
/// <summary>
/// Points to the current character in the window.
/// </summary>
int strstart;
/// <summary>
/// lookahead is the number of characters starting at strstart in
/// window that are valid.
/// So window[strstart] until window[strstart+lookahead-1] are valid
/// characters.
/// </summary>
int lookahead;
/// <summary>
/// This array contains the part of the uncompressed stream that
/// is of relevance. The current character is indexed by strstart.
/// </summary>
byte[] window;
DeflateStrategy strategy;
int max_chain, max_lazy, niceLength, goodLength;
/// <summary>
/// The current compression function.
/// </summary>
int compressionFunction;
/// <summary>
/// The input data for compression.
/// </summary>
byte[] inputBuf;
/// <summary>
/// The total bytes of input read.
/// </summary>
long totalIn;
/// <summary>
/// The offset into inputBuf, where input data starts.
/// </summary>
int inputOff;
/// <summary>
/// The end offset of the input data.
/// </summary>
int inputEnd;
DeflaterPending pending;
DeflaterHuffman huffman;
/// <summary>
/// The adler checksum
/// </summary>
Adler32 adler;
#endregion
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.Compute
{
using Microsoft.Azure;
using Microsoft.Azure.Management;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Microsoft.Rest.Azure.OData;
using Models;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// Extension methods for VirtualMachineImagesOperations.
/// </summary>
public static partial class VirtualMachineImagesOperationsExtensions
{
/// <summary>
/// Gets a virtual machine image.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='location'>
/// The name of a supported Azure region.
/// </param>
/// <param name='publisherName'>
/// A valid image publisher.
/// </param>
/// <param name='offer'>
/// A valid image publisher offer.
/// </param>
/// <param name='skus'>
/// A valid image SKU.
/// </param>
/// <param name='version'>
/// A valid image SKU version.
/// </param>
public static VirtualMachineImage Get(this IVirtualMachineImagesOperations operations, string location, string publisherName, string offer, string skus, string version)
{
return operations.GetAsync(location, publisherName, offer, skus, version).GetAwaiter().GetResult();
}
/// <summary>
/// Gets a virtual machine image.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='location'>
/// The name of a supported Azure region.
/// </param>
/// <param name='publisherName'>
/// A valid image publisher.
/// </param>
/// <param name='offer'>
/// A valid image publisher offer.
/// </param>
/// <param name='skus'>
/// A valid image SKU.
/// </param>
/// <param name='version'>
/// A valid image SKU version.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<VirtualMachineImage> GetAsync(this IVirtualMachineImagesOperations operations, string location, string publisherName, string offer, string skus, string version, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetWithHttpMessagesAsync(location, publisherName, offer, skus, version, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets a list of all virtual machine image versions for the specified
/// location, publisher, offer, and SKU.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='location'>
/// The name of a supported Azure region.
/// </param>
/// <param name='publisherName'>
/// A valid image publisher.
/// </param>
/// <param name='offer'>
/// A valid image publisher offer.
/// </param>
/// <param name='skus'>
/// A valid image SKU.
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
public static IList<VirtualMachineImageResource> List(this IVirtualMachineImagesOperations operations, string location, string publisherName, string offer, string skus, ODataQuery<VirtualMachineImageResource> odataQuery = default(ODataQuery<VirtualMachineImageResource>))
{
return ((IVirtualMachineImagesOperations)operations).ListAsync(location, publisherName, offer, skus, odataQuery).GetAwaiter().GetResult();
}
/// <summary>
/// Gets a list of all virtual machine image versions for the specified
/// location, publisher, offer, and SKU.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='location'>
/// The name of a supported Azure region.
/// </param>
/// <param name='publisherName'>
/// A valid image publisher.
/// </param>
/// <param name='offer'>
/// A valid image publisher offer.
/// </param>
/// <param name='skus'>
/// A valid image SKU.
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IList<VirtualMachineImageResource>> ListAsync(this IVirtualMachineImagesOperations operations, string location, string publisherName, string offer, string skus, ODataQuery<VirtualMachineImageResource> odataQuery = default(ODataQuery<VirtualMachineImageResource>), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListWithHttpMessagesAsync(location, publisherName, offer, skus, odataQuery, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets a list of virtual machine image offers for the specified location and
/// publisher.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='location'>
/// The name of a supported Azure region.
/// </param>
/// <param name='publisherName'>
/// A valid image publisher.
/// </param>
public static IList<VirtualMachineImageResource> ListOffers(this IVirtualMachineImagesOperations operations, string location, string publisherName)
{
return operations.ListOffersAsync(location, publisherName).GetAwaiter().GetResult();
}
/// <summary>
/// Gets a list of virtual machine image offers for the specified location and
/// publisher.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='location'>
/// The name of a supported Azure region.
/// </param>
/// <param name='publisherName'>
/// A valid image publisher.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IList<VirtualMachineImageResource>> ListOffersAsync(this IVirtualMachineImagesOperations operations, string location, string publisherName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListOffersWithHttpMessagesAsync(location, publisherName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets a list of virtual machine image publishers for the specified Azure
/// location.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='location'>
/// The name of a supported Azure region.
/// </param>
public static IList<VirtualMachineImageResource> ListPublishers(this IVirtualMachineImagesOperations operations, string location)
{
return operations.ListPublishersAsync(location).GetAwaiter().GetResult();
}
/// <summary>
/// Gets a list of virtual machine image publishers for the specified Azure
/// location.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='location'>
/// The name of a supported Azure region.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IList<VirtualMachineImageResource>> ListPublishersAsync(this IVirtualMachineImagesOperations operations, string location, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListPublishersWithHttpMessagesAsync(location, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets a list of virtual machine image SKUs for the specified location,
/// publisher, and offer.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='location'>
/// The name of a supported Azure region.
/// </param>
/// <param name='publisherName'>
/// A valid image publisher.
/// </param>
/// <param name='offer'>
/// A valid image publisher offer.
/// </param>
public static IList<VirtualMachineImageResource> ListSkus(this IVirtualMachineImagesOperations operations, string location, string publisherName, string offer)
{
return operations.ListSkusAsync(location, publisherName, offer).GetAwaiter().GetResult();
}
/// <summary>
/// Gets a list of virtual machine image SKUs for the specified location,
/// publisher, and offer.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='location'>
/// The name of a supported Azure region.
/// </param>
/// <param name='publisherName'>
/// A valid image publisher.
/// </param>
/// <param name='offer'>
/// A valid image publisher offer.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IList<VirtualMachineImageResource>> ListSkusAsync(this IVirtualMachineImagesOperations operations, string location, string publisherName, string offer, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListSkusWithHttpMessagesAsync(location, publisherName, offer, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
namespace ParquetSharp.Column.Values.Dictionary
{
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using ParquetSharp.Bytes;
using ParquetSharp.Column.Page;
using ParquetSharp.Column.Values.Plain;
using ParquetSharp.Column.Values.Rle;
using ParquetSharp.IO;
using ParquetSharp.IO.Api;
/**
* Will attempt to encode values using a dictionary and fall back to plain encoding
* if the dictionary gets too big
*
* @author Julien Le Dem
*
*/
public abstract class DictionaryValuesWriter : ValuesWriter, RequiresFallback
{
private static readonly Log LOG = Log.getLog(typeof(DictionaryValuesWriter));
/* max entries allowed for the dictionary will fail over to plain encoding if reached */
private static readonly int MAX_DICTIONARY_ENTRIES = int.MaxValue - 1;
private static readonly int MIN_INITIAL_SLAB_SIZE = 64;
/* encoding to label the data page */
private readonly Encoding encodingForDataPage;
/* encoding to label the dictionary page */
protected readonly Encoding encodingForDictionaryPage;
/* maximum size in bytes allowed for the dictionary will fail over to plain encoding if reached */
protected readonly int maxDictionaryByteSize;
/* will become true if the dictionary becomes too big */
protected bool dictionaryTooBig;
/* current size in bytes the dictionary will take once serialized */
protected int dictionaryByteSize;
/* size in bytes of the dictionary at the end of last dictionary encoded page (in case the current page falls back to PLAIN) */
protected int lastUsedDictionaryByteSize;
/* size in items of the dictionary at the end of last dictionary encoded page (in case the current page falls back to PLAIN) */
protected int lastUsedDictionarySize;
/* dictionary encoded values */
protected List<int> encodedValues = new List<int>();
/** indicates if this is the first page being processed */
protected bool firstPage = true;
protected ByteBufferAllocator allocator;
/* Track the list of writers used so they can be appropriately closed when necessary
(currently used for off-heap memory which is not garbage collected) */
private List<RunLengthBitPackingHybridEncoder> encoders
= new List<RunLengthBitPackingHybridEncoder>();
/**
* @param maxDictionaryByteSize
*/
protected DictionaryValuesWriter(int maxDictionaryByteSize, Encoding encodingForDataPage, Encoding encodingForDictionaryPage, ByteBufferAllocator allocator)
{
this.allocator = allocator;
this.maxDictionaryByteSize = maxDictionaryByteSize;
this.encodingForDataPage = encodingForDataPage;
this.encodingForDictionaryPage = encodingForDictionaryPage;
}
protected DictionaryPage dictPage(ValuesWriter dictPageWriter)
{
DictionaryPage ret = new DictionaryPage(dictPageWriter.getBytes(), lastUsedDictionarySize, encodingForDictionaryPage);
dictPageWriter.close();
return ret;
}
public bool shouldFallBack()
{
// if the dictionary reaches the max byte size or the values can not be encoded on 4 bytes anymore.
return dictionaryByteSize > maxDictionaryByteSize
|| getDictionarySize() > MAX_DICTIONARY_ENTRIES;
}
public bool isCompressionSatisfying(long rawSize, long encodedSize)
{
return (encodedSize + dictionaryByteSize) < rawSize;
}
public void fallBackAllValuesTo(ValuesWriter writer)
{
fallBackDictionaryEncodedData(writer);
if (lastUsedDictionarySize == 0)
{
// if we never used the dictionary
// we free dictionary encoded data
clearDictionaryContent();
dictionaryByteSize = 0;
encodedValues = new List<int>();
}
}
abstract protected void fallBackDictionaryEncodedData(ValuesWriter writer);
public override long getBufferedSize()
{
return encodedValues.Count * 4;
}
public override long getAllocatedSize()
{
// size used in memory
return encodedValues.Count * 4 + dictionaryByteSize;
}
public override BytesInput getBytes()
{
int maxDicId = getDictionarySize() - 1;
if (Log.DEBUG) LOG.debug("max dic id " + maxDicId);
int bitWidth = BytesUtils.getWidthFromMaxInt(maxDicId);
int initialSlabSize =
CapacityByteArrayOutputStream.initialSlabSizeHeuristic(MIN_INITIAL_SLAB_SIZE, maxDictionaryByteSize, 10);
RunLengthBitPackingHybridEncoder encoder =
new RunLengthBitPackingHybridEncoder(bitWidth, initialSlabSize, maxDictionaryByteSize, this.allocator);
encoders.Add(encoder);
try
{
foreach (int value in encodedValues)
{
encoder.writeInt(value);
}
// encodes the bit width
byte[] bytesHeader = new byte[] { (byte)bitWidth };
BytesInput rleEncodedBytes = encoder.toBytes();
if (Log.DEBUG) LOG.debug("rle encoded bytes " + rleEncodedBytes.size());
BytesInput bytes = BytesInput.concat(BytesInput.from(bytesHeader), rleEncodedBytes);
// remember size of dictionary when we last wrote a page
lastUsedDictionarySize = getDictionarySize();
lastUsedDictionaryByteSize = dictionaryByteSize;
return bytes;
}
catch (IOException e)
{
throw new ParquetEncodingException("could not encode the values", e);
}
}
public override Encoding getEncoding()
{
return encodingForDataPage;
}
public override void reset()
{
close();
encodedValues = new List<int>();
}
public override void close()
{
encodedValues = null;
foreach (RunLengthBitPackingHybridEncoder encoder in encoders)
{
encoder.close();
}
encoders.Clear();
}
public override void resetDictionary()
{
lastUsedDictionaryByteSize = 0;
lastUsedDictionarySize = 0;
dictionaryTooBig = false;
clearDictionaryContent();
}
/**
* clear/free the underlying dictionary content
*/
protected abstract void clearDictionaryContent();
/**
* @return size in items
*/
protected internal abstract int getDictionarySize();
public override string memUsageString(string prefix)
{
return string.Format(
CultureInfo.InvariantCulture,
"{0} DictionaryValuesWriter{{\n"
+ "{1}\n"
+ "{2}\n"
+ "{3}}}\n",
prefix,
prefix + " dict:" + dictionaryByteSize,
prefix + " values:" + (encodedValues.Count * 4).ToString(CultureInfo.InvariantCulture),
prefix
);
}
/**
*
*/
public class PlainBinaryDictionaryValuesWriter : DictionaryValuesWriter
{
/* type specific dictionary content */
protected Dictionary<Binary, int> binaryDictionaryContent = new Dictionary<Binary, int>();
/**
* @param maxDictionaryByteSize
*/
public PlainBinaryDictionaryValuesWriter(int maxDictionaryByteSize, Encoding encodingForDataPage, Encoding encodingForDictionaryPage, ByteBufferAllocator allocator)
: base(maxDictionaryByteSize, encodingForDataPage, encodingForDictionaryPage, allocator)
{
}
public override void writeBytes(Binary v)
{
int id;
if (!binaryDictionaryContent.TryGetValue(v, out id))
{
id = binaryDictionaryContent.Count;
binaryDictionaryContent.Add(v.copy(), id);
// length as int (4 bytes) + actual bytes
dictionaryByteSize += 4 + v.length();
}
encodedValues.Add(id);
}
public override DictionaryPage toDictPageAndClose()
{
if (lastUsedDictionarySize > 0)
{
// return a dictionary only if we actually used it
PlainValuesWriter dictionaryEncoder = new PlainValuesWriter(lastUsedDictionaryByteSize, maxDictionaryByteSize, allocator);
Binary[] reverseDictionary = GetDictionaryValues(lastUsedDictionarySize);
// write only the part of the dict that we used
for (int i = 0; i < lastUsedDictionarySize; i++)
{
dictionaryEncoder.writeBytes(reverseDictionary[i]);
}
return dictPage(dictionaryEncoder);
}
return null;
}
protected internal override int getDictionarySize()
{
return binaryDictionaryContent.Count;
}
protected override void clearDictionaryContent()
{
binaryDictionaryContent.Clear();
}
protected override void fallBackDictionaryEncodedData(ValuesWriter writer)
{
//build reverse dictionary
Binary[] reverseDictionary = GetDictionaryValues(getDictionarySize());
//fall back to plain encoding
foreach (int id in encodedValues)
{
writer.writeBytes(reverseDictionary[id]);
}
}
protected Binary[] GetDictionaryValues(int count)
{
Binary[] result = new Binary[count];
foreach (KeyValuePair<Binary, int> entry in binaryDictionaryContent)
{
if (entry.Value < count)
{
result[entry.Value] = entry.Key;
}
}
return result;
}
}
/**
*
*/
public class PlainFixedLenArrayDictionaryValuesWriter : PlainBinaryDictionaryValuesWriter
{
private readonly int length;
/**
* @param maxDictionaryByteSize
*/
public PlainFixedLenArrayDictionaryValuesWriter(int maxDictionaryByteSize, int length, Encoding encodingForDataPage, Encoding encodingForDictionaryPage, ByteBufferAllocator allocator)
: base(maxDictionaryByteSize, encodingForDataPage, encodingForDictionaryPage, allocator)
{
this.length = length;
}
public override void writeBytes(Binary value)
{
int id;
if (!binaryDictionaryContent.TryGetValue(value, out id))
{
id = binaryDictionaryContent.Count;
binaryDictionaryContent.Add(value.copy(), id);
dictionaryByteSize += length;
}
encodedValues.Add(id);
}
public override DictionaryPage toDictPageAndClose()
{
if (lastUsedDictionarySize > 0)
{
// return a dictionary only if we actually used it
FixedLenByteArrayPlainValuesWriter dictionaryEncoder = new FixedLenByteArrayPlainValuesWriter(length, lastUsedDictionaryByteSize, maxDictionaryByteSize, allocator);
Binary[] reverseDictionary = GetDictionaryValues(lastUsedDictionarySize);
// write only the part of the dict that we used
for (int i = 0; i < lastUsedDictionarySize; i++)
{
dictionaryEncoder.writeBytes(reverseDictionary[i]);
}
return dictPage(dictionaryEncoder);
}
return null;
}
}
/**
*
*/
public class PlainLongDictionaryValuesWriter : DictionaryValuesWriter
{
/* type specific dictionary content */
private Dictionary<long, int> longDictionaryContent = new Dictionary<long, int>();
/**
* @param maxDictionaryByteSize
*/
public PlainLongDictionaryValuesWriter(int maxDictionaryByteSize, Encoding encodingForDataPage, Encoding encodingForDictionaryPage, ByteBufferAllocator allocator)
: base(maxDictionaryByteSize, encodingForDataPage, encodingForDictionaryPage, allocator)
{
}
public override void writeLong(long v)
{
int id;
if (!longDictionaryContent.TryGetValue(v, out id))
{
id = longDictionaryContent.Count;
longDictionaryContent.Add(v, id);
dictionaryByteSize += 8;
}
encodedValues.Add(id);
}
public override DictionaryPage toDictPageAndClose()
{
if (lastUsedDictionarySize > 0)
{
// return a dictionary only if we actually used it
PlainValuesWriter dictionaryEncoder = new PlainValuesWriter(lastUsedDictionaryByteSize, maxDictionaryByteSize, allocator);
long[] reverseDictionary = GetDictionaryValues(lastUsedDictionarySize);
// write only the part of the dict that we used
for (int i = 0; i < lastUsedDictionarySize; i++)
{
dictionaryEncoder.writeLong(reverseDictionary[i]);
}
return dictPage(dictionaryEncoder);
}
return null;
}
protected internal override int getDictionarySize()
{
return longDictionaryContent.Count;
}
protected override void clearDictionaryContent()
{
longDictionaryContent.Clear();
}
protected override void fallBackDictionaryEncodedData(ValuesWriter writer)
{
//build reverse dictionary
long[] reverseDictionary = GetDictionaryValues(getDictionarySize());
//fall back to plain encoding
foreach (int id in encodedValues)
{
writer.writeLong(reverseDictionary[id]);
}
}
protected long[] GetDictionaryValues(int count)
{
long[] result = new long[count];
foreach (KeyValuePair<long, int> entry in longDictionaryContent)
{
if (entry.Value < count)
{
result[entry.Value] = entry.Key;
}
}
return result;
}
}
/**
*
*/
public class PlainDoubleDictionaryValuesWriter : DictionaryValuesWriter
{
/* type specific dictionary content */
private Dictionary<double, int> doubleDictionaryContent = new Dictionary<double, int>();
/**
* @param maxDictionaryByteSize
*/
public PlainDoubleDictionaryValuesWriter(int maxDictionaryByteSize, Encoding encodingForDataPage, Encoding encodingForDictionaryPage, ByteBufferAllocator allocator)
: base(maxDictionaryByteSize, encodingForDataPage, encodingForDictionaryPage, allocator)
{
}
public override void writeDouble(double v)
{
int id;
if (!doubleDictionaryContent.TryGetValue(v, out id))
{
id = doubleDictionaryContent.Count;
doubleDictionaryContent.Add(v, id);
dictionaryByteSize += 8;
}
encodedValues.Add(id);
}
public override DictionaryPage toDictPageAndClose()
{
if (lastUsedDictionarySize > 0)
{
// return a dictionary only if we actually used it
PlainValuesWriter dictionaryEncoder = new PlainValuesWriter(lastUsedDictionaryByteSize, maxDictionaryByteSize, allocator);
double[] reverseDictionary = GetDictionaryValues(lastUsedDictionarySize);
// write only the part of the dict that we used
for (int i = 0; i < lastUsedDictionarySize; i++)
{
dictionaryEncoder.writeDouble(reverseDictionary[i]);
}
return dictPage(dictionaryEncoder);
}
return null;
}
protected internal override int getDictionarySize()
{
return doubleDictionaryContent.Count;
}
protected override void clearDictionaryContent()
{
doubleDictionaryContent.Clear();
}
protected override void fallBackDictionaryEncodedData(ValuesWriter writer)
{
//build reverse dictionary
double[] reverseDictionary = GetDictionaryValues(getDictionarySize());
//fall back to plain encoding
foreach (int id in encodedValues)
{
writer.writeDouble(reverseDictionary[id]);
}
}
private double[] GetDictionaryValues(int count)
{
double[] result = new double[count];
foreach (KeyValuePair<double, int> entry in doubleDictionaryContent)
{
if (entry.Value < count)
{
result[entry.Value] = entry.Key;
}
}
return result;
}
}
/**
*
*/
public class PlainIntegerDictionaryValuesWriter : DictionaryValuesWriter
{
/* type specific dictionary content */
private Dictionary<int, int> intDictionaryContent = new Dictionary<int, int>();
/**
* @param maxDictionaryByteSize
*/
public PlainIntegerDictionaryValuesWriter(int maxDictionaryByteSize, Encoding encodingForDataPage, Encoding encodingForDictionaryPage, ByteBufferAllocator allocator)
: base(maxDictionaryByteSize, encodingForDataPage, encodingForDictionaryPage, allocator)
{
}
public override void writeInteger(int v)
{
int id;
if (!intDictionaryContent.TryGetValue(v, out id))
{
id = intDictionaryContent.Count;
intDictionaryContent.Add(v, id);
dictionaryByteSize += 4;
}
encodedValues.Add(id);
}
public override DictionaryPage toDictPageAndClose()
{
if (lastUsedDictionarySize > 0)
{
// return a dictionary only if we actually used it
PlainValuesWriter dictionaryEncoder = new PlainValuesWriter(lastUsedDictionaryByteSize, maxDictionaryByteSize, allocator);
int[] reverseDictionary = GetDictionaryValues(lastUsedDictionarySize);
// write only the part of the dict that we used
for (int i = 0; i < lastUsedDictionarySize; i++)
{
dictionaryEncoder.writeInteger(reverseDictionary[i]);
}
return dictPage(dictionaryEncoder);
}
return null;
}
protected internal override int getDictionarySize()
{
return intDictionaryContent.Count;
}
protected override void clearDictionaryContent()
{
intDictionaryContent.Clear();
}
protected override void fallBackDictionaryEncodedData(ValuesWriter writer)
{
//build reverse dictionary
int[] reverseDictionary = GetDictionaryValues(getDictionarySize());
//fall back to plain encoding
foreach (int id in encodedValues)
{
writer.writeInteger(reverseDictionary[id]);
}
}
private int[] GetDictionaryValues(int count)
{
int[] result = new int[count];
foreach (KeyValuePair<int, int> entry in intDictionaryContent)
{
if (entry.Value < count)
{
result[entry.Value] = entry.Key;
}
}
return result;
}
}
/**
*
*/
public class PlainFloatDictionaryValuesWriter : DictionaryValuesWriter
{
/* type specific dictionary content */
private Dictionary<float, int> floatDictionaryContent = new Dictionary<float, int>();
/**
* @param maxDictionaryByteSize
*/
public PlainFloatDictionaryValuesWriter(int maxDictionaryByteSize, Encoding encodingForDataPage, Encoding encodingForDictionaryPage, ByteBufferAllocator allocator)
: base(maxDictionaryByteSize, encodingForDataPage, encodingForDictionaryPage, allocator)
{
}
public override void writeFloat(float v)
{
int id;
if (!floatDictionaryContent.TryGetValue(v, out id))
{
id = floatDictionaryContent.Count;
floatDictionaryContent.Add(v, id);
dictionaryByteSize += 4;
}
encodedValues.Add(id);
}
public override DictionaryPage toDictPageAndClose()
{
if (lastUsedDictionarySize > 0)
{
// return a dictionary only if we actually used it
PlainValuesWriter dictionaryEncoder = new PlainValuesWriter(lastUsedDictionaryByteSize, maxDictionaryByteSize, allocator);
float[] reverseDictionary = GetDictionaryValues(lastUsedDictionarySize);
// write only the part of the dict that we used
for (int i = 0; i < lastUsedDictionarySize; i++)
{
dictionaryEncoder.writeFloat(reverseDictionary[i]);
}
return dictPage(dictionaryEncoder);
}
return null;
}
protected internal override int getDictionarySize()
{
return floatDictionaryContent.Count;
}
protected override void clearDictionaryContent()
{
floatDictionaryContent.Clear();
}
protected override void fallBackDictionaryEncodedData(ValuesWriter writer)
{
//build reverse dictionary
float[] reverseDictionary = GetDictionaryValues(getDictionarySize());
//fall back to plain encoding
foreach (int id in encodedValues)
{
writer.writeFloat(reverseDictionary[id]);
}
}
private float[] GetDictionaryValues(int count)
{
float[] result = new float[count];
foreach (KeyValuePair<float, int> entry in floatDictionaryContent)
{
if (entry.Value < count)
{
result[entry.Value] = entry.Key;
}
}
return result;
}
}
}
}
| |
// ****************************************************************
// This is free software licensed under the NUnit license. You
// may obtain a copy of the license as well as information regarding
// copyright ownership at http://nunit.org.
// ****************************************************************
using System;
using System.Drawing;
using System.Collections;
using System.ComponentModel;
using System.Windows.Forms;
using NUnit.Util;
namespace NUnit.UiKit
{
/// <summary>
/// Displays a dialog for creation of a new configuration.
/// The dialog collects and validates the name and the
/// name of a configuration to be copied and then adds the
/// new configuration to the project.
///
/// A DialogResult of DialogResult.OK indicates that the
/// configuration was added successfully.
/// </summary>
public class AddConfigurationDialog : System.Windows.Forms.Form
{
#region Instance variables
private NUnitProject project;
private string configurationName;
private string copyConfigurationName;
private System.Windows.Forms.Button okButton;
private System.Windows.Forms.Button cancelButton;
private System.Windows.Forms.TextBox configurationNameTextBox;
private System.Windows.Forms.Label label1;
private System.Windows.Forms.Label label2;
private System.Windows.Forms.ComboBox configurationComboBox;
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.Container components = null;
#endregion
#region Construction and Disposal
public AddConfigurationDialog( NUnitProject project )
{
InitializeComponent();
this.project = project;
}
/// <summary>
/// Clean up any resources being used.
/// </summary>
protected override void Dispose( bool disposing )
{
if( disposing )
{
if(components != null)
{
components.Dispose();
}
}
base.Dispose( disposing );
}
#endregion
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.configurationNameTextBox = new System.Windows.Forms.TextBox();
this.okButton = new System.Windows.Forms.Button();
this.cancelButton = new System.Windows.Forms.Button();
this.configurationComboBox = new System.Windows.Forms.ComboBox();
this.label1 = new System.Windows.Forms.Label();
this.label2 = new System.Windows.Forms.Label();
this.SuspendLayout();
//
// configurationNameTextBox
//
this.configurationNameTextBox.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.configurationNameTextBox.Location = new System.Drawing.Point(16, 24);
this.configurationNameTextBox.Name = "configurationNameTextBox";
this.configurationNameTextBox.Size = new System.Drawing.Size(254, 22);
this.configurationNameTextBox.TabIndex = 0;
this.configurationNameTextBox.Text = "";
//
// okButton
//
this.okButton.Anchor = System.Windows.Forms.AnchorStyles.Bottom;
this.okButton.Location = new System.Drawing.Point(50, 120);
this.okButton.Name = "okButton";
this.okButton.Size = new System.Drawing.Size(76, 23);
this.okButton.TabIndex = 1;
this.okButton.Text = "OK";
this.okButton.Click += new System.EventHandler(this.okButton_Click);
//
// cancelButton
//
this.cancelButton.Anchor = System.Windows.Forms.AnchorStyles.Bottom;
this.cancelButton.DialogResult = System.Windows.Forms.DialogResult.Cancel;
this.cancelButton.Location = new System.Drawing.Point(155, 120);
this.cancelButton.Name = "cancelButton";
this.cancelButton.TabIndex = 2;
this.cancelButton.Text = "Cancel";
//
// configurationComboBox
//
this.configurationComboBox.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.configurationComboBox.ItemHeight = 16;
this.configurationComboBox.Location = new System.Drawing.Point(16, 80);
this.configurationComboBox.Name = "configurationComboBox";
this.configurationComboBox.Size = new System.Drawing.Size(256, 24);
this.configurationComboBox.TabIndex = 3;
//
// label1
//
this.label1.Location = new System.Drawing.Point(16, 8);
this.label1.Name = "label1";
this.label1.Size = new System.Drawing.Size(248, 16);
this.label1.TabIndex = 4;
this.label1.Text = "Configuration Name:";
//
// label2
//
this.label2.Location = new System.Drawing.Point(16, 63);
this.label2.Name = "label2";
this.label2.Size = new System.Drawing.Size(240, 17);
this.label2.TabIndex = 5;
this.label2.Text = "Copy Settings From:";
//
// AddConfigurationDialog
//
this.AcceptButton = this.okButton;
this.CancelButton = this.cancelButton;
this.ClientSize = new System.Drawing.Size(280, 149);
this.Controls.Add(this.label2);
this.Controls.Add(this.label1);
this.Controls.Add(this.configurationComboBox);
this.Controls.Add(this.cancelButton);
this.Controls.Add(this.okButton);
this.Controls.Add(this.configurationNameTextBox);
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedToolWindow;
this.Name = "AddConfigurationDialog";
this.ShowInTaskbar = false;
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent;
this.Text = "New Configuration";
this.Load += new System.EventHandler(this.ConfigurationNameDialog_Load);
this.ResumeLayout(false);
}
#endregion
#region Properties
public string ConfigurationName
{
get { return configurationName; }
}
public string CopyConfigurationName
{
get { return copyConfigurationName; }
}
#endregion
#region Methods
private void ConfigurationNameDialog_Load(object sender, System.EventArgs e)
{
configurationComboBox.Items.Add( "<none>" );
configurationComboBox.SelectedIndex = 0;
foreach( ProjectConfig config in project.Configs )
{
int index = configurationComboBox.Items.Add( config.Name );
if ( config.Name == project.ActiveConfigName )
configurationComboBox.SelectedIndex = index;
}
}
private void okButton_Click(object sender, System.EventArgs e)
{
configurationName = configurationNameTextBox.Text;
if ( configurationName == string.Empty )
{
UserMessage.Display( "No configuration name provided", "Configuration Name Error" );
return;
}
if ( project.Configs.Contains( configurationName ) )
{
// TODO: Need general error message display
UserMessage.Display( "A configuration with that name already exists", "Configuration Name Error" );
return;
}
// ToDo: Move more of this to project
ProjectConfig newConfig = new ProjectConfig( configurationName );
copyConfigurationName = null;
if ( configurationComboBox.SelectedIndex > 0 )
{
copyConfigurationName = (string)configurationComboBox.SelectedItem;
ProjectConfig copyConfig = project.Configs[copyConfigurationName];
if ( copyConfig != null )
foreach( string assembly in copyConfig.Assemblies )
newConfig.Assemblies.Add( assembly );
}
project.Configs.Add( newConfig );
DialogResult = DialogResult.OK;
Close();
}
#endregion
}
}
| |
//
// MassStorageDevice.cs
//
// Author:
// Aaron Bockover <[email protected]>
//
// Copyright (C) 2008 Novell, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.IO;
using System.Linq;
using System.Collections.Generic;
using Mono.Unix;
using Hyena;
using Banshee.Base;
using Banshee.Hardware;
using Banshee.Collection;
using Banshee.Collection.Database;
namespace Banshee.Dap.MassStorage
{
public class MassStorageDevice : IDeviceMediaCapabilities
{
private MassStorageSource source;
public MassStorageSource Source {
get { return source; }
set { source = value; }
}
public MassStorageDevice ()
{
}
public MassStorageDevice (MassStorageSource source)
{
Source = source;
}
public virtual void SourceInitialize ()
{
}
public virtual bool DeleteTrackHook (DatabaseTrackInfo track)
{
return true;
}
public virtual bool ShouldIgnoreDevice ()
{
return File.Exists (IsNotAudioPlayerPath);
}
public virtual bool LoadDeviceConfiguration ()
{
string path = IsAudioPlayerPath;
if (!File.Exists (path)) {
return false;
}
LoadConfig ();
return true;
}
protected void LoadConfig ()
{
IDictionary<string, string[]> config = null;
if (File.Exists (IsAudioPlayerPath)) {
try {
using (var reader = new StreamReader (IsAudioPlayerPath)) {
config = new KeyValueParser (reader);
has_is_audio_player_file = true;
}
} catch (Exception e) {
Log.Error ("Error parsing " + IsAudioPlayerPath, e);
}
}
LoadConfig (config);
}
protected void LoadConfig (IDictionary<string, string[]> config)
{
if (config == null) {
config = new Dictionary<string, string[]> ();
}
name = GetPreferredValue ("name", config, DefaultName);
cover_art_file_type = GetPreferredValue ("cover_art_file_type", config, DefaultCoverArtFileType);
cover_art_file_name = GetPreferredValue ("cover_art_file_name", config, DefaultCoverArtFileName);
cover_art_size = GetPreferredValue ("cover_art_size", config, DefaultCoverArtSize);
var all_audio_folders = MergeValues ("audio_folders", config, DefaultAudioFolders);
var all_video_folders = MergeValues ("video_folders", config, DefaultVideoFolders);
if (!FileSystem.IsCaseSensitive (source.Volume)) {
all_audio_folders = all_audio_folders.Distinct (StringComparer.CurrentCultureIgnoreCase).ToArray ();
all_video_folders = all_video_folders.Distinct (StringComparer.CurrentCultureIgnoreCase).ToArray ();
}
audio_folders = all_audio_folders;
video_folders = all_video_folders;
playback_mime_types = MergeValues ("output_formats", config, DefaultPlaybackMimeTypes);
playlist_formats = MergeValues ("playlist_formats", config, DefaultPlaylistFormats);
var playlist_path = GetPreferredValue ("playlist_path", config, DefaultPlaylistPath);
PlaylistPaths = playlist_path != null ? new string [] { playlist_path } : new string [0];
folder_depth = GetPreferredValue ("folder_depth", config, DefaultFolderDepth);
string preferred_folder_separator = GetPreferredValue ("folder_separator", config, DefaultFolderSeparator);
if (preferred_folder_separator == Paths.Folder.DosSeparator.ToString () || preferred_folder_separator == "DOS") {
folder_separator = Paths.Folder.DosSeparator;
} else {
folder_separator = Paths.Folder.UnixSeparator;
}
}
private string[] MergeValues (string key, IDictionary<string, string[]> config, string[] defaultValues)
{
if (config.ContainsKey (key)) {
return config[key].Union (defaultValues).ToArray ();
}
return defaultValues;
}
private int GetPreferredValue (string key, IDictionary<string, string[]> config, int defaultValue)
{
int parsedValue;
if (config.ContainsKey (key) && config[key].Length > 0
&& int.TryParse (config[key][0], out parsedValue)) {
return parsedValue;
}
return defaultValue;
}
private string GetPreferredValue (string key, IDictionary<string, string[]> config, string defaultValue)
{
if (config.ContainsKey (key)) {
return config[key][0];
}
return defaultValue;
}
public virtual bool GetTrackPath (TrackInfo track, out string path)
{
path = null;
return false;
}
private bool has_is_audio_player_file;
public bool HasIsAudioPlayerFile {
get { return has_is_audio_player_file; }
}
private string IsAudioPlayerPath {
get { return System.IO.Path.Combine (source.Volume.MountPoint, ".is_audio_player"); }
}
private string IsNotAudioPlayerPath {
get { return System.IO.Path.Combine (source.Volume.MountPoint, ".is_not_audio_player"); }
}
protected virtual string DefaultName {
get { return source.Volume.Name; }
}
private string name;
public virtual string Name {
get { return name ?? source.Volume.Name; }
}
protected virtual int DefaultCoverArtSize {
get { return 200; }
}
private int cover_art_size;
public virtual int CoverArtSize {
get { return cover_art_size; }
}
protected virtual int DefaultFolderDepth {
get { return -1; }
}
private int folder_depth = -1;
public virtual int FolderDepth {
get { return folder_depth; }
}
internal virtual int MinimumFolderDepth {
get { return FolderDepth; }
}
internal virtual Uri RootPath {
get { return null; }
}
protected virtual string DefaultFolderSeparator {
get { return null; }
}
private char folder_separator;
public virtual char FolderSeparator {
get { return folder_separator; }
}
protected virtual string [] DefaultAudioFolders {
get { return new string[0]; }
}
private string[] audio_folders = new string[0];
public virtual string[] AudioFolders {
get { return audio_folders; }
}
protected virtual string[] DefaultVideoFolders {
get { return new string[0]; }
}
private string[] video_folders = new string[0];
public virtual string[] VideoFolders {
get { return video_folders; }
}
protected virtual string DefaultCoverArtFileType {
get { return ""; }
}
private string cover_art_file_type;
public virtual string CoverArtFileType {
get { return cover_art_file_type; }
}
protected virtual string DefaultCoverArtFileName {
get { return "cover.jpg"; }
}
private string cover_art_file_name;
public virtual string CoverArtFileName {
get { return cover_art_file_name; }
}
protected virtual string[] DefaultPlaylistFormats {
get { return new string[0]; }
}
private string[] playlist_formats;
public virtual string[] PlaylistFormats {
get { return playlist_formats; }
}
protected virtual string DefaultPlaylistPath {
get { return null; }
}
public virtual string [] PlaylistPaths {
get; private set;
}
protected virtual string[] DefaultPlaybackMimeTypes {
get { return new string[0]; }
}
private string[] playback_mime_types;
public virtual string[] PlaybackMimeTypes {
get { return playback_mime_types; }
}
public virtual string DeviceType {
get { return "mass-storage"; }
}
public virtual string [] GetIconNames ()
{
return null;
}
public bool IsType (string type)
{
return type == DeviceType;
}
}
}
| |
//
// PurchasesController.cs
//
// Author:
// Eddy Zavaleta <[email protected]>
// Eduardo Nieto <[email protected]>
//
// Copyright (C) 2011-2017 Eddy Zavaleta, Mictlanix, and contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using Castle.ActiveRecord;
using Mictlanix.BE.Model;
using Mictlanix.BE.Web.Models;
using Mictlanix.BE.Web.Mvc;
using Mictlanix.BE.Web.Helpers;
namespace Mictlanix.BE.Web.Controllers.Mvc {
[Authorize]
public class PurchasesController : CustomController {
public ActionResult Index ()
{
if (!CashHelpers.ValidateExchangeRate ()) {
return View ("InvalidExchangeRate");
}
var search = SearchPurchaseOrders (new Search<PurchaseOrder> {
Limit = WebConfig.PageSize
});
return View (search);
}
[HttpPost]
public ActionResult Index (Search<PurchaseOrder> search)
{
if (ModelState.IsValid) {
search = SearchPurchaseOrders (search);
}
if (Request.IsAjaxRequest ()) {
return PartialView ("_Index", search);
} else {
return View (search);
}
}
Search<PurchaseOrder> SearchPurchaseOrders (Search<PurchaseOrder> search)
{
IQueryable<PurchaseOrder> qry;
if (search.Pattern == null) {
qry = from x in PurchaseOrder.Queryable
orderby x.Id descending
select x;
} else {
qry = from x in PurchaseOrder.Queryable
where x.Supplier.Name.Contains (search.Pattern)
orderby x.Id descending
select x;
}
search.Total = qry.Count ();
search.Results = qry.Skip (search.Offset).Take (search.Limit).ToList ();
return search;
}
//public ViewResult Print (int id)
//{
// var item = PurchaseOrder.Find (id);
// return View (item);
//}
public ActionResult Details (int id)
{
var item = PurchaseOrder.Find (id);
return View (item);
}
public ActionResult Create ()
{
//if (!CashHelpers.ValidateExchangeRate ()) {
// return View ("InvalidExchangeRate");
//}
return PartialView ("_Create", new PurchaseOrder ());
}
[HttpPost]
public ActionResult Create (PurchaseOrder item)
{
if (!ModelState.IsValid)
return PartialView ("_Create", item);
item.Supplier = Supplier.Find (item.SupplierId);
item.Creator = CurrentUser.Employee;
item.Updater = item.Creator;
item.CreationTime = DateTime.Now;
item.ModificationTime = item.CreationTime;
using (var scope = new TransactionScope ()) {
item.CreateAndFlush ();
}
return PartialView ("_CreateSuccesful", new PurchaseOrder { Id = item.Id });
}
public ActionResult Edit (int id)
{
var item = PurchaseOrder.Find (id);
if (Request.IsAjaxRequest ()) {
return PartialView ("_MasterEditView", item);
}
if (!CashHelpers.ValidateExchangeRate ()) {
return View ("InvalidExchangeRate");
}
if (item.IsCompleted || item.IsCancelled) {
return RedirectToAction ("Details", new {
id = item.Id
});
}
return View (item);
}
public ActionResult DiscardChanges (int id)
{
return PartialView ("_MasterView", PurchaseOrder.TryFind (id));
}
[HttpPost]
public ActionResult Edit (PurchaseOrder item)
{
var entity = PurchaseOrder.Find (item.Id);
entity.Supplier = Supplier.Find (item.SupplierId);
entity.Updater = CurrentUser.Employee;
entity.ModificationTime = DateTime.Now;
entity.Comment = item.Comment;
using (var scope = new TransactionScope ()) {
entity.UpdateAndFlush ();
}
return PartialView ("_MasterView", entity);
}
[HttpPost]
public JsonResult AddPurchaseDetail (int movement, int warehouse, int product)
{
var p = Product.Find (product);
var cost = (from x in ProductPrice.Queryable
where x.Product.Id == product && x.List.Id == 0
select x.Value).SingleOrDefault ();
var item = new PurchaseOrderDetail {
Order = PurchaseOrder.Find (movement),
Warehouse = Warehouse.Find (warehouse),
Product = p,
ProductCode = p.Code,
ProductName = p.Name,
Quantity = 1,
TaxRate = p.TaxRate,
IsTaxIncluded = p.IsTaxIncluded,
DiscountRate = 0,
Price = cost,
ExchangeRate = CashHelpers.GetTodayDefaultExchangeRate (),
Currency = WebConfig.DefaultCurrency
};
using (var scope = new TransactionScope ()) {
item.CreateAndFlush ();
}
return Json (new {
id = item.Id
});
}
[HttpPost]
public JsonResult EditDetailQuantity (int id, decimal value)
{
var detail = PurchaseOrderDetail.Find (id);
if (value > 0) {
detail.Quantity = value;
using (var scope = new TransactionScope ()) {
detail.UpdateAndFlush ();
}
}
return Json (new {
id = id,
value = detail.Quantity,
total = detail.Total.ToString ("c")
});
}
[HttpPost]
public JsonResult EditDetailPrice (int id, string value)
{
var detail = PurchaseOrderDetail.Find (id);
bool success;
decimal val;
success = decimal.TryParse (value.Trim (),
System.Globalization.NumberStyles.Currency,
null, out val);
if (success && val >= 0) {
detail.Price = val;
using (var scope = new TransactionScope ()) {
detail.UpdateAndFlush ();
}
}
return Json (new {
id = id,
value = detail.Price.ToString ("c"),
total = detail.Total.ToString ("c")
});
}
[HttpPost]
public ActionResult EditDetailCurrency (int id, string value)
{
var detail = PurchaseOrderDetail.Find (id);
CurrencyCode val;
bool success;
success = Enum.TryParse<CurrencyCode> (value.Trim (), out val);
if (success) {
decimal rate = CashHelpers.GetTodayExchangeRate (val);
if (rate == 0) {
Response.StatusCode = 400;
return Content (Resources.Message_InvalidExchangeRate);
}
detail.Currency = val;
detail.ExchangeRate = CashHelpers.GetTodayExchangeRate (val);
using (var scope = new TransactionScope ()) {
detail.Update ();
}
}
return Json (new {
id = id,
value = detail.Currency.ToString (),
rate = detail.ExchangeRate,
total = detail.Total.ToString ("c")
});
}
[HttpPost]
public JsonResult EditDetailDiscount (int id, string value)
{
var detail = PurchaseOrderDetail.Find (id);
bool success;
decimal discount;
success = decimal.TryParse (value.TrimEnd (new char [] { ' ', '%' }), out discount);
discount /= 100m;
if (success && discount >= 0 && discount <= 1) {
detail.DiscountRate = discount;
using (var scope = new TransactionScope ()) {
detail.UpdateAndFlush ();
}
}
return Json (new {
id = id,
value = detail.DiscountRate.ToString ("p"),
total = detail.Total.ToString ("c")
});
}
[HttpPost]
public ActionResult SetItemTaxRate (int id, string value)
{
var entity = PurchaseOrderDetail.Find (id);
bool success;
decimal val;
if (entity.Order.IsCompleted || entity.Order.IsCancelled) {
Response.StatusCode = 400;
return Content (Resources.ItemAlreadyCompletedOrCancelled);
}
success = decimal.TryParse (value.TrimEnd (new char [] { ' ', '%' }), out val);
// TODO: VAT value range validation
if (success) {
entity.TaxRate = val;
using (var scope = new TransactionScope ()) {
entity.Update ();
}
}
return Json (new {
id = entity.Id,
value = entity.FormattedValueFor (x => x.TaxRate),
total = entity.FormattedValueFor (x => x.Total),
total2 = entity.FormattedValueFor (x => x.TotalEx)
});
}
[HttpPost]
public JsonResult EditDetailWarehouse (int id, int value)
{
var detail = PurchaseOrderDetail.Find (id);
detail.Warehouse = Warehouse.Find (value);
using (var scope = new TransactionScope ()) {
detail.UpdateAndFlush ();
}
return Json (new {
id = id,
value = detail.Warehouse.Name
});
}
public ActionResult GetTotals (int id)
{
var order = PurchaseOrder.Find (id);
return PartialView ("_Totals", order);
}
public ActionResult GetDetail (int id)
{
return PartialView ("_DetailEditView", PurchaseOrderDetail.Find (id));
}
[HttpPost]
public JsonResult RemoveDetail (int id)
{
var item = PurchaseOrderDetail.Find (id);
using (var scope = new TransactionScope ()) {
item.DeleteAndFlush ();
}
return Json (new {
id = id,
result = true
});
}
// TODO: Remove inventory stuff
[HttpPost]
public ActionResult Confirm (int id)
{
PurchaseOrder item = PurchaseOrder.Find (id);
var qry = from x in item.Details
group x by x.Warehouse into g
select new {
Warehouse = g.Key,
Details = g.ToList ()
};
var dt = DateTime.Now;
var employee = CurrentUser.Employee;
using (var scope = new TransactionScope ()) {
foreach (var x in qry) {
var master = new InventoryReceipt {
Order = item,
Warehouse = x.Warehouse,
CreationTime = dt,
ModificationTime = dt,
Creator = employee,
Updater = employee,
Store = x.Warehouse.Store
};
master.Create ();
foreach (var y in x.Details) {
var detail = new InventoryReceiptDetail {
Receipt = master,
Product = y.Product,
QuantityOrdered = y.Quantity,
Quantity = y.Quantity,
ProductCode = y.ProductCode,
ProductName = y.ProductName
};
detail.Create ();
InventoryHelpers.ChangeNotification (TransactionType.PurchaseOrder, item.Id,
item.ModificationTime, x.Warehouse, null, y.Product, y.Quantity);
}
}
foreach (var x in item.Details) {
var price = x.Product.Prices.SingleOrDefault (t => t.List.Id == 0);
if (price == null) {
price = new ProductPrice {
List = PriceList.Find (0),
Product = x.Product
};
}
price.Value = x.Price;
price.Save ();
}
item.IsCompleted = true;
item.ModificationTime = DateTime.Now;
item.UpdateAndFlush ();
}
return RedirectToAction ("Index");
}
[HttpPost]
public ActionResult Cancel (int id)
{
var item = PurchaseOrder.Find (id);
item.IsCancelled = true;
using (var scope = new TransactionScope ()) {
item.UpdateAndFlush ();
}
return RedirectToAction ("Index");
}
}
}
| |
using System;
namespace Server.Items
{
public abstract class BaseShoes : BaseClothing
{
public BaseShoes( int itemID ) : this( itemID, 0 )
{
}
public BaseShoes( int itemID, int hue ) : base( itemID, Layer.Shoes, hue )
{
}
public BaseShoes( Serial serial ) : base( serial )
{
}
public override bool Scissor( Mobile from, Scissors scissors )
{
if( DefaultResource == CraftResource.None )
return base.Scissor( from, scissors );
from.SendLocalizedMessage( 502440 ); // Scissors can not be used on that to produce anything.
return false;
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 2 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
switch ( version )
{
case 2: break; // empty, resource removed
case 1:
{
m_Resource = (CraftResource)reader.ReadInt();
break;
}
case 0:
{
m_Resource = DefaultResource;
break;
}
}
}
}
[Flipable( 0x2307, 0x2308 )]
public class FurBoots : BaseShoes
{
[Constructable]
public FurBoots() : this( 0 )
{
}
[Constructable]
public FurBoots( int hue ) : base( 0x2307, hue )
{
Weight = 3.0;
}
public FurBoots( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
[FlipableAttribute( 0x170b, 0x170c )]
public class Boots : BaseShoes
{
public override CraftResource DefaultResource{ get{ return CraftResource.RegularLeather; } }
[Constructable]
public Boots() : this( 0 )
{
}
[Constructable]
public Boots( int hue ) : base( 0x170B, hue )
{
Weight = 3.0;
}
public Boots( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
[Flipable]
public class ThighBoots : BaseShoes, IArcaneEquip
{
#region Arcane Impl
private int m_MaxArcaneCharges, m_CurArcaneCharges;
[CommandProperty( AccessLevel.GameMaster )]
public int MaxArcaneCharges
{
get{ return m_MaxArcaneCharges; }
set{ m_MaxArcaneCharges = value; InvalidateProperties(); Update(); }
}
[CommandProperty( AccessLevel.GameMaster )]
public int CurArcaneCharges
{
get{ return m_CurArcaneCharges; }
set{ m_CurArcaneCharges = value; InvalidateProperties(); Update(); }
}
[CommandProperty( AccessLevel.GameMaster )]
public bool IsArcane
{
get{ return ( m_MaxArcaneCharges > 0 && m_CurArcaneCharges >= 0 ); }
}
public override void OnSingleClick( Mobile from )
{
base.OnSingleClick( from );
if ( IsArcane )
LabelTo( from, 1061837, String.Format( "{0}\t{1}", m_CurArcaneCharges, m_MaxArcaneCharges ) );
}
public void Update()
{
if ( IsArcane )
ItemID = 0x26AF;
else if ( ItemID == 0x26AF )
ItemID = 0x1711;
if ( IsArcane && CurArcaneCharges == 0 )
Hue = 0;
}
public override void GetProperties( ObjectPropertyList list )
{
base.GetProperties( list );
if ( IsArcane )
list.Add( 1061837, "{0}\t{1}", m_CurArcaneCharges, m_MaxArcaneCharges ); // arcane charges: ~1_val~ / ~2_val~
}
public void Flip()
{
if ( ItemID == 0x1711 )
ItemID = 0x1712;
else if ( ItemID == 0x1712 )
ItemID = 0x1711;
}
#endregion
public override CraftResource DefaultResource{ get{ return CraftResource.RegularLeather; } }
[Constructable]
public ThighBoots() : this( 0 )
{
}
[Constructable]
public ThighBoots( int hue ) : base( 0x1711, hue )
{
Weight = 4.0;
}
public ThighBoots( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 1 ); // version
if ( IsArcane )
{
writer.Write( true );
writer.Write( (int) m_CurArcaneCharges );
writer.Write( (int) m_MaxArcaneCharges );
}
else
{
writer.Write( false );
}
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
switch ( version )
{
case 1:
{
if ( reader.ReadBool() )
{
m_CurArcaneCharges = reader.ReadInt();
m_MaxArcaneCharges = reader.ReadInt();
if ( Hue == 2118 )
Hue = ArcaneGem.DefaultArcaneHue;
}
break;
}
}
}
}
[FlipableAttribute( 0x170f, 0x1710 )]
public class Shoes : BaseShoes
{
public override CraftResource DefaultResource{ get{ return CraftResource.RegularLeather; } }
[Constructable]
public Shoes() : this( 0 )
{
}
[Constructable]
public Shoes( int hue ) : base( 0x170F, hue )
{
Weight = 2.0;
}
public Shoes( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
[FlipableAttribute( 0x170d, 0x170e )]
public class Sandals : BaseShoes
{
public override CraftResource DefaultResource{ get{ return CraftResource.RegularLeather; } }
[Constructable]
public Sandals() : this( 0 )
{
}
[Constructable]
public Sandals( int hue ) : base( 0x170D, hue )
{
Weight = 1.0;
}
public Sandals( Serial serial ) : base( serial )
{
}
public override bool Dye( Mobile from, DyeTub sender )
{
return false;
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
[Flipable( 0x2797, 0x27E2 )]
public class NinjaTabi : BaseShoes
{
[Constructable]
public NinjaTabi() : this( 0 )
{
}
[Constructable]
public NinjaTabi( int hue ) : base( 0x2797, hue )
{
Weight = 2.0;
}
public NinjaTabi( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
[Flipable( 0x2796, 0x27E1 )]
public class SamuraiTabi : BaseShoes
{
[Constructable]
public SamuraiTabi() : this( 0 )
{
}
[Constructable]
public SamuraiTabi( int hue ) : base( 0x2796, hue )
{
Weight = 2.0;
}
public SamuraiTabi( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
[Flipable( 0x2796, 0x27E1 )]
public class Waraji : BaseShoes
{
[Constructable]
public Waraji() : this( 0 )
{
}
[Constructable]
public Waraji( int hue ) : base( 0x2796, hue )
{
Weight = 2.0;
}
public Waraji( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
[FlipableAttribute( 0x2FC4, 0x317A )]
public class ElvenBoots : BaseShoes
{
public override CraftResource DefaultResource{ get{ return CraftResource.RegularLeather; } }
//public override Race RequiredRace { get { return Race.Elf; } }
[Constructable]
public ElvenBoots() : this( 0 )
{
}
[Constructable]
public ElvenBoots( int hue ) : base( 0x2FC4, hue )
{
Weight = 2.0;
Name = "Bottes exotiques";
}
public ElvenBoots( Serial serial ) : base( serial )
{
}
public override bool Dye( Mobile from, DyeTub sender )
{
return false;
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.WriteEncodedInt( 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadEncodedInt();
}
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gax = Google.Api.Gax;
using sys = System;
namespace Google.Ads.GoogleAds.V8.Resources
{
/// <summary>Resource name for the <c>AdGroupAd</c> resource.</summary>
public sealed partial class AdGroupAdName : gax::IResourceName, sys::IEquatable<AdGroupAdName>
{
/// <summary>The possible contents of <see cref="AdGroupAdName"/>.</summary>
public enum ResourceNameType
{
/// <summary>An unparsed resource name.</summary>
Unparsed = 0,
/// <summary>
/// A resource name with pattern <c>customers/{customer_id}/adGroupAds/{ad_group_id}~{ad_id}</c>.
/// </summary>
CustomerAdGroupAd = 1,
}
private static gax::PathTemplate s_customerAdGroupAd = new gax::PathTemplate("customers/{customer_id}/adGroupAds/{ad_group_id_ad_id}");
/// <summary>Creates a <see cref="AdGroupAdName"/> containing an unparsed resource name.</summary>
/// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param>
/// <returns>
/// A new instance of <see cref="AdGroupAdName"/> containing the provided
/// <paramref name="unparsedResourceName"/>.
/// </returns>
public static AdGroupAdName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) =>
new AdGroupAdName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName)));
/// <summary>
/// Creates a <see cref="AdGroupAdName"/> with the pattern
/// <c>customers/{customer_id}/adGroupAds/{ad_group_id}~{ad_id}</c>.
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="adGroupId">The <c>AdGroup</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="adId">The <c>Ad</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>A new instance of <see cref="AdGroupAdName"/> constructed from the provided ids.</returns>
public static AdGroupAdName FromCustomerAdGroupAd(string customerId, string adGroupId, string adId) =>
new AdGroupAdName(ResourceNameType.CustomerAdGroupAd, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), adGroupId: gax::GaxPreconditions.CheckNotNullOrEmpty(adGroupId, nameof(adGroupId)), adId: gax::GaxPreconditions.CheckNotNullOrEmpty(adId, nameof(adId)));
/// <summary>
/// Formats the IDs into the string representation of this <see cref="AdGroupAdName"/> with pattern
/// <c>customers/{customer_id}/adGroupAds/{ad_group_id}~{ad_id}</c>.
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="adGroupId">The <c>AdGroup</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="adId">The <c>Ad</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="AdGroupAdName"/> with pattern
/// <c>customers/{customer_id}/adGroupAds/{ad_group_id}~{ad_id}</c>.
/// </returns>
public static string Format(string customerId, string adGroupId, string adId) =>
FormatCustomerAdGroupAd(customerId, adGroupId, adId);
/// <summary>
/// Formats the IDs into the string representation of this <see cref="AdGroupAdName"/> with pattern
/// <c>customers/{customer_id}/adGroupAds/{ad_group_id}~{ad_id}</c>.
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="adGroupId">The <c>AdGroup</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="adId">The <c>Ad</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="AdGroupAdName"/> with pattern
/// <c>customers/{customer_id}/adGroupAds/{ad_group_id}~{ad_id}</c>.
/// </returns>
public static string FormatCustomerAdGroupAd(string customerId, string adGroupId, string adId) =>
s_customerAdGroupAd.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), $"{(gax::GaxPreconditions.CheckNotNullOrEmpty(adGroupId, nameof(adGroupId)))}~{(gax::GaxPreconditions.CheckNotNullOrEmpty(adId, nameof(adId)))}");
/// <summary>Parses the given resource name string into a new <see cref="AdGroupAdName"/> instance.</summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>customers/{customer_id}/adGroupAds/{ad_group_id}~{ad_id}</c></description></item>
/// </list>
/// </remarks>
/// <param name="adGroupAdName">The resource name in string form. Must not be <c>null</c>.</param>
/// <returns>The parsed <see cref="AdGroupAdName"/> if successful.</returns>
public static AdGroupAdName Parse(string adGroupAdName) => Parse(adGroupAdName, false);
/// <summary>
/// Parses the given resource name string into a new <see cref="AdGroupAdName"/> instance; optionally allowing
/// an unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>customers/{customer_id}/adGroupAds/{ad_group_id}~{ad_id}</c></description></item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="adGroupAdName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <returns>The parsed <see cref="AdGroupAdName"/> if successful.</returns>
public static AdGroupAdName Parse(string adGroupAdName, bool allowUnparsed) =>
TryParse(adGroupAdName, allowUnparsed, out AdGroupAdName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern.");
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="AdGroupAdName"/> instance.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>customers/{customer_id}/adGroupAds/{ad_group_id}~{ad_id}</c></description></item>
/// </list>
/// </remarks>
/// <param name="adGroupAdName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="result">
/// When this method returns, the parsed <see cref="AdGroupAdName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string adGroupAdName, out AdGroupAdName result) => TryParse(adGroupAdName, false, out result);
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="AdGroupAdName"/> instance; optionally
/// allowing an unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>customers/{customer_id}/adGroupAds/{ad_group_id}~{ad_id}</c></description></item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="adGroupAdName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <param name="result">
/// When this method returns, the parsed <see cref="AdGroupAdName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string adGroupAdName, bool allowUnparsed, out AdGroupAdName result)
{
gax::GaxPreconditions.CheckNotNull(adGroupAdName, nameof(adGroupAdName));
gax::TemplatedResourceName resourceName;
if (s_customerAdGroupAd.TryParseName(adGroupAdName, out resourceName))
{
string[] split1 = ParseSplitHelper(resourceName[1], new char[] { '~', });
if (split1 == null)
{
result = null;
return false;
}
result = FromCustomerAdGroupAd(resourceName[0], split1[0], split1[1]);
return true;
}
if (allowUnparsed)
{
if (gax::UnparsedResourceName.TryParse(adGroupAdName, out gax::UnparsedResourceName unparsedResourceName))
{
result = FromUnparsed(unparsedResourceName);
return true;
}
}
result = null;
return false;
}
private static string[] ParseSplitHelper(string s, char[] separators)
{
string[] result = new string[separators.Length + 1];
int i0 = 0;
for (int i = 0; i <= separators.Length; i++)
{
int i1 = i < separators.Length ? s.IndexOf(separators[i], i0) : s.Length;
if (i1 < 0 || i1 == i0)
{
return null;
}
result[i] = s.Substring(i0, i1 - i0);
i0 = i1 + 1;
}
return result;
}
private AdGroupAdName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string adId = null, string adGroupId = null, string customerId = null)
{
Type = type;
UnparsedResource = unparsedResourceName;
AdId = adId;
AdGroupId = adGroupId;
CustomerId = customerId;
}
/// <summary>
/// Constructs a new instance of a <see cref="AdGroupAdName"/> class from the component parts of pattern
/// <c>customers/{customer_id}/adGroupAds/{ad_group_id}~{ad_id}</c>
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="adGroupId">The <c>AdGroup</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="adId">The <c>Ad</c> ID. Must not be <c>null</c> or empty.</param>
public AdGroupAdName(string customerId, string adGroupId, string adId) : this(ResourceNameType.CustomerAdGroupAd, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), adGroupId: gax::GaxPreconditions.CheckNotNullOrEmpty(adGroupId, nameof(adGroupId)), adId: gax::GaxPreconditions.CheckNotNullOrEmpty(adId, nameof(adId)))
{
}
/// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary>
public ResourceNameType Type { get; }
/// <summary>
/// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an
/// unparsed resource name.
/// </summary>
public gax::UnparsedResourceName UnparsedResource { get; }
/// <summary>
/// The <c>Ad</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string AdId { get; }
/// <summary>
/// The <c>AdGroup</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string AdGroupId { get; }
/// <summary>
/// The <c>Customer</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string CustomerId { get; }
/// <summary>Whether this instance contains a resource name with a known pattern.</summary>
public bool IsKnownPattern => Type != ResourceNameType.Unparsed;
/// <summary>The string representation of the resource name.</summary>
/// <returns>The string representation of the resource name.</returns>
public override string ToString()
{
switch (Type)
{
case ResourceNameType.Unparsed: return UnparsedResource.ToString();
case ResourceNameType.CustomerAdGroupAd: return s_customerAdGroupAd.Expand(CustomerId, $"{AdGroupId}~{AdId}");
default: throw new sys::InvalidOperationException("Unrecognized resource-type.");
}
}
/// <summary>Returns a hash code for this resource name.</summary>
public override int GetHashCode() => ToString().GetHashCode();
/// <inheritdoc/>
public override bool Equals(object obj) => Equals(obj as AdGroupAdName);
/// <inheritdoc/>
public bool Equals(AdGroupAdName other) => ToString() == other?.ToString();
/// <inheritdoc/>
public static bool operator ==(AdGroupAdName a, AdGroupAdName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false);
/// <inheritdoc/>
public static bool operator !=(AdGroupAdName a, AdGroupAdName b) => !(a == b);
}
public partial class AdGroupAd
{
/// <summary>
/// <see cref="AdGroupAdName"/>-typed view over the <see cref="ResourceName"/> resource name property.
/// </summary>
internal AdGroupAdName ResourceNameAsAdGroupAdName
{
get => string.IsNullOrEmpty(ResourceName) ? null : AdGroupAdName.Parse(ResourceName, allowUnparsed: true);
set => ResourceName = value?.ToString() ?? "";
}
/// <summary>
/// <see cref="AdGroupName"/>-typed view over the <see cref="AdGroup"/> resource name property.
/// </summary>
internal AdGroupName AdGroupAsAdGroupName
{
get => string.IsNullOrEmpty(AdGroup) ? null : AdGroupName.Parse(AdGroup, allowUnparsed: true);
set => AdGroup = value?.ToString() ?? "";
}
/// <summary>
/// <see cref="AdGroupAdLabelName"/>-typed view over the <see cref="Labels"/> resource name property.
/// </summary>
internal gax::ResourceNameList<AdGroupAdLabelName> LabelsAsAdGroupAdLabelNames
{
get => new gax::ResourceNameList<AdGroupAdLabelName>(Labels, s => string.IsNullOrEmpty(s) ? null : AdGroupAdLabelName.Parse(s, allowUnparsed: true));
}
}
}
| |
using Shouldly;
using Veggerby.Algorithm.Calculus;
using Xunit;
namespace Veggerby.Algorithm.Tests.Calculus
{
public class OperandTests
{
[Fact]
public void Should_return_addition()
{
// arrange
var left = ValueConstant.Create(3);
var right = Variable.x;
// act
var actual = left + right;
// assert
actual.ShouldBeOfType<Addition>();
((Addition)actual).Operands.ShouldBe(new Operand[] { left, right });
}
[Fact]
public void Should_return_constant_from_addition()
{
// arrange
var left = ValueConstant.Create(3);
var right = ValueConstant.Create(6);
// act
var actual = left + right;
// assert
actual.ShouldBe(ValueConstant.Create(9));
}
[Fact]
public void Should_return_subtraction()
{
// arrange
var left = ValueConstant.Create(3);
var right = Variable.x;
// act
var actual = left - right;
// assert
actual.ShouldBeOfType<Subtraction>();
((BinaryOperation)actual).Left.ShouldBe(left);
((BinaryOperation)actual).Right.ShouldBe(right);
}
[Fact]
public void Should_return_constant_from_subtraction()
{
// arrange
var left = ValueConstant.Create(3);
var right = ValueConstant.Create(6);
// act
var actual = left - right;
// assert
actual.ShouldBe(ValueConstant.Create(-3));
}
[Fact]
public void Should_return_multiplication()
{
// arrange
var left = ValueConstant.Create(3);
var right = Variable.x;
// act
var actual = left * right;
// assert
actual.ShouldBeOfType<Multiplication>();
((Multiplication)actual).Operands.ShouldBe(new Operand[] { left, right });
}
[Fact]
public void Should_return_constant_from_multiplication()
{
// arrange
var left = ValueConstant.Create(3);
var right = ValueConstant.Create(6);
// act
var actual = left * right;
// assert
actual.ShouldBe(ValueConstant.Create(18));
}
[Fact]
public void Should_return_division()
{
// arrange
var left = ValueConstant.Create(3);
var right = Variable.x;
// act
var actual = left / right;
// assert
actual.ShouldBeOfType<Division>();
((BinaryOperation)actual).Left.ShouldBe(left);
((BinaryOperation)actual).Right.ShouldBe(right);
}
[Fact]
public void Should_return_fraction_from_constants_division()
{
// arrange
var left = ValueConstant.Create(1);
var right = ValueConstant.Create(3);
// act
var actual = Division.Create(left, right);
// assert
actual.ShouldBe(Fraction.Create(1, 3));
}
[Fact]
public void Should_return_power()
{
// arrange
var left = ValueConstant.Create(3);
var right = Variable.x;
// act
var actual = left ^ right;
// assert
actual.ShouldBeOfType<Power>();
((BinaryOperation)actual).Left.ShouldBe(left);
((BinaryOperation)actual).Right.ShouldBe(right);
}
[Fact]
public void Should_return_constant_from_power()
{
// arrange
var left = ValueConstant.Create(2);
var right = ValueConstant.Create(3);
// act
var actual = left ^ right;
// assert
actual.ShouldBe(ValueConstant.Create(8));
}
[Fact]
public void Should_return_one_for_one_as_base()
{
// arrange
var left = ValueConstant.One;
var right = Variable.x;
// act
var actual = left ^ right;
// assert
actual.ShouldBe(ValueConstant.One);
}
[Fact]
public void Should_return_one_for_zero_as_exponent()
{
// arrange
var left = Variable.x;
var right = ValueConstant.Zero;
// act
var actual = left ^ right;
// assert
actual.ShouldBe(ValueConstant.One);
}
[Fact]
public void Should_return_baze_for_one_as_exponent()
{
// arrange
var left = Variable.x;
var right = ValueConstant.One;
// act
var actual = left ^ right;
// assert
actual.ShouldBe(left);
}
[Fact]
public void Should_create_constant_from_int()
{
// arrange
// act
var actual = (ValueConstant)3;
// assert
actual.Value.ShouldBe(3);
}
[Fact]
public void Should_create_constant_from_double()
{
// arrange
// act
var actual = (ValueConstant)3.4;
// assert
actual.Value.ShouldBe(3.4);
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Reflection;
namespace WebApiSample.Mvvm.WebApi.Areas.HelpPage
{
/// <summary>
/// This class will create an object of a given type and populate it with sample data.
/// </summary>
public class ObjectGenerator
{
internal const int DefaultCollectionSize = 2;
private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator();
/// <summary>
/// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types:
/// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc.
/// Complex types: POCO types.
/// Nullables: <see cref="Nullable{T}"/>.
/// Arrays: arrays of simple types or complex types.
/// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/>
/// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc
/// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>.
/// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>.
/// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>An object of the given type.</returns>
public object GenerateObject(Type type)
{
return GenerateObject(type, new Dictionary<Type, object>());
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")]
private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
try
{
if (SimpleTypeObjectGenerator.CanGenerateObject(type))
{
return SimpleObjectGenerator.GenerateObject(type);
}
if (type.IsArray)
{
return GenerateArray(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsGenericType)
{
return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IDictionary))
{
return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IDictionary).IsAssignableFrom(type))
{
return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IList) ||
type == typeof(IEnumerable) ||
type == typeof(ICollection))
{
return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IList).IsAssignableFrom(type))
{
return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IQueryable))
{
return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsEnum)
{
return GenerateEnum(type);
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
}
catch
{
// Returns null if anything fails
return null;
}
return null;
}
private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences)
{
Type genericTypeDefinition = type.GetGenericTypeDefinition();
if (genericTypeDefinition == typeof(Nullable<>))
{
return GenerateNullable(type, createdObjectReferences);
}
if (genericTypeDefinition == typeof(KeyValuePair<,>))
{
return GenerateKeyValuePair(type, createdObjectReferences);
}
if (IsTuple(genericTypeDefinition))
{
return GenerateTuple(type, createdObjectReferences);
}
Type[] genericArguments = type.GetGenericArguments();
if (genericArguments.Length == 1)
{
if (genericTypeDefinition == typeof(IList<>) ||
genericTypeDefinition == typeof(IEnumerable<>) ||
genericTypeDefinition == typeof(ICollection<>))
{
Type collectionType = typeof(List<>).MakeGenericType(genericArguments);
return GenerateCollection(collectionType, collectionSize, createdObjectReferences);
}
if (genericTypeDefinition == typeof(IQueryable<>))
{
return GenerateQueryable(type, collectionSize, createdObjectReferences);
}
Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]);
if (closedCollectionType.IsAssignableFrom(type))
{
return GenerateCollection(type, collectionSize, createdObjectReferences);
}
}
if (genericArguments.Length == 2)
{
if (genericTypeDefinition == typeof(IDictionary<,>))
{
Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments);
return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences);
}
Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]);
if (closedDictionaryType.IsAssignableFrom(type))
{
return GenerateDictionary(type, collectionSize, createdObjectReferences);
}
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
return null;
}
private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = type.GetGenericArguments();
object[] parameterValues = new object[genericArgs.Length];
bool failedToCreateTuple = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < genericArgs.Length; i++)
{
parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences);
failedToCreateTuple &= parameterValues[i] == null;
}
if (failedToCreateTuple)
{
return null;
}
object result = Activator.CreateInstance(type, parameterValues);
return result;
}
private static bool IsTuple(Type genericTypeDefinition)
{
return genericTypeDefinition == typeof(Tuple<>) ||
genericTypeDefinition == typeof(Tuple<,>) ||
genericTypeDefinition == typeof(Tuple<,,>) ||
genericTypeDefinition == typeof(Tuple<,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,,>);
}
private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = keyValuePairType.GetGenericArguments();
Type typeK = genericArgs[0];
Type typeV = genericArgs[1];
ObjectGenerator objectGenerator = new ObjectGenerator();
object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences);
object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences);
if (keyObject == null && valueObject == null)
{
// Failed to create key and values
return null;
}
object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject);
return result;
}
private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = arrayType.GetElementType();
Array result = Array.CreateInstance(type, size);
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
result.SetValue(element, i);
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type typeK = typeof(object);
Type typeV = typeof(object);
if (dictionaryType.IsGenericType)
{
Type[] genericArgs = dictionaryType.GetGenericArguments();
typeK = genericArgs[0];
typeV = genericArgs[1];
}
object result = Activator.CreateInstance(dictionaryType);
MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd");
MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey");
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences);
if (newKey == null)
{
// Cannot generate a valid key
return null;
}
bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey });
if (!containsKey)
{
object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences);
addMethod.Invoke(result, new object[] { newKey, newValue });
}
}
return result;
}
private static object GenerateEnum(Type enumType)
{
Array possibleValues = Enum.GetValues(enumType);
if (possibleValues.Length > 0)
{
return possibleValues.GetValue(0);
}
return null;
}
private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences)
{
bool isGeneric = queryableType.IsGenericType;
object list;
if (isGeneric)
{
Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments());
list = GenerateCollection(listType, size, createdObjectReferences);
}
else
{
list = GenerateArray(typeof(object[]), size, createdObjectReferences);
}
if (list == null)
{
return null;
}
if (isGeneric)
{
Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments());
MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType });
return asQueryableMethod.Invoke(null, new[] { list });
}
return Queryable.AsQueryable((IEnumerable)list);
}
private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = collectionType.IsGenericType ?
collectionType.GetGenericArguments()[0] :
typeof(object);
object result = Activator.CreateInstance(collectionType);
MethodInfo addMethod = collectionType.GetMethod("Add");
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
addMethod.Invoke(result, new object[] { element });
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences)
{
Type type = nullableType.GetGenericArguments()[0];
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type, createdObjectReferences);
}
private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
object result = null;
if (createdObjectReferences.TryGetValue(type, out result))
{
// The object has been created already, just return it. This will handle the circular reference case.
return result;
}
if (type.IsValueType)
{
result = Activator.CreateInstance(type);
}
else
{
ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes);
if (defaultCtor == null)
{
// Cannot instantiate the type because it doesn't have a default constructor
return null;
}
result = defaultCtor.Invoke(new object[0]);
}
createdObjectReferences.Add(type, result);
SetPublicProperties(type, result, createdObjectReferences);
SetPublicFields(type, result, createdObjectReferences);
return result;
}
private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (PropertyInfo property in properties)
{
if (property.CanWrite)
{
object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences);
property.SetValue(obj, propertyValue, null);
}
}
}
private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (FieldInfo field in fields)
{
object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences);
field.SetValue(obj, fieldValue);
}
}
private class SimpleTypeObjectGenerator
{
private long _index = 0;
private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators();
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")]
private static Dictionary<Type, Func<long, object>> InitializeGenerators()
{
return new Dictionary<Type, Func<long, object>>
{
{ typeof(Boolean), index => true },
{ typeof(Byte), index => (Byte)64 },
{ typeof(Char), index => (Char)65 },
{ typeof(DateTime), index => DateTime.Now },
{ typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) },
{ typeof(DBNull), index => DBNull.Value },
{ typeof(Decimal), index => (Decimal)index },
{ typeof(Double), index => (Double)(index + 0.1) },
{ typeof(Guid), index => Guid.NewGuid() },
{ typeof(Int16), index => (Int16)(index % Int16.MaxValue) },
{ typeof(Int32), index => (Int32)(index % Int32.MaxValue) },
{ typeof(Int64), index => (Int64)index },
{ typeof(Object), index => new object() },
{ typeof(SByte), index => (SByte)64 },
{ typeof(Single), index => (Single)(index + 0.1) },
{
typeof(String), index =>
{
return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index);
}
},
{
typeof(TimeSpan), index =>
{
return TimeSpan.FromTicks(1234567);
}
},
{ typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) },
{ typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) },
{ typeof(UInt64), index => (UInt64)index },
{
typeof(Uri), index =>
{
return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index));
}
},
};
}
public static bool CanGenerateObject(Type type)
{
return DefaultGenerators.ContainsKey(type);
}
public object GenerateObject(Type type)
{
return DefaultGenerators[type](++_index);
}
}
}
}
| |
using Newtonsoft.Json.Linq;
using System;
using System.Collections.Generic;
using System.Linq;
namespace ShareDB.RichText
{
public class Delta
{
private static readonly Lazy<DeltaEqualityComparer> _equalityComparer = new Lazy<DeltaEqualityComparer>();
public static DeltaEqualityComparer EqualityComparer => _equalityComparer.Value;
public const string InsertType = "insert";
public const string DeleteType = "delete";
public const string RetainType = "retain";
public const string Attributes = "attributes";
public static Delta New()
{
return new Delta();
}
private readonly List<JToken> _ops;
public Delta()
{
_ops = new List<JToken>();
}
public Delta(IEnumerable<JToken> ops)
{
_ops = ops.ToList();
}
public Delta(Delta delta)
{
_ops = delta._ops.Select(op => op.DeepClone()).ToList();
}
public IReadOnlyList<JToken> Ops => _ops;
public Delta Insert(JToken text, JToken attributes = null)
{
if (text.Type == JTokenType.String && ((string) text).Length == 0)
return this;
var newOp = new JObject(new JProperty(InsertType, text));
if (attributes != null && attributes.HasValues)
newOp[Attributes] = attributes;
return Add(newOp);
}
public Delta Delete(int length)
{
if (length <= 0)
return this;
return Add(new JObject(new JProperty(DeleteType, length)));
}
public Delta Retain(int length, JToken attributes = null)
{
if (length <= 0)
return this;
var newOp = new JObject(new JProperty(RetainType, length));
if (attributes != null && attributes.HasValues)
newOp[Attributes] = attributes;
return Add(newOp);
}
public Delta Chop()
{
JToken lastOp = _ops.Count == 0 ? null : _ops[_ops.Count - 1];
if (lastOp != null && lastOp[RetainType] != null && lastOp[Attributes] == null)
_ops.RemoveAt(_ops.Count - 1);
return this;
}
public Delta Compose(Delta other)
{
var thisIter = new OpIterator(_ops);
var otherIter = new OpIterator(other._ops);
var delta = new Delta();
while (thisIter.HasNext() || otherIter.HasNext())
{
if (otherIter.PeekType() == InsertType)
{
delta.Add(otherIter.Next());
}
else if (thisIter.PeekType() == DeleteType)
{
delta.Add(thisIter.Next());
}
else
{
int length = Math.Min(thisIter.PeekLength(), otherIter.PeekLength());
JToken thisOp = thisIter.Next(length);
JToken otherOp = otherIter.Next(length);
if (otherOp.OpType() == RetainType)
{
var newOp = new JObject();
if (thisOp.OpType() == RetainType)
newOp[RetainType] = length;
else
newOp[InsertType] = thisOp[InsertType];
JToken attributes = ComposeAttributes(thisOp[Attributes], otherOp[Attributes],
thisOp.OpType() == RetainType);
if (attributes != null)
newOp[Attributes] = attributes;
delta.Add(newOp);
}
else if (otherOp.OpType() == DeleteType && thisOp.OpType() == RetainType)
{
delta.Add(otherOp);
}
}
}
return delta.Chop();
}
public int GetLength()
{
return _ops.Sum(op => op.OpLength());
}
public bool DeepEquals(Delta other)
{
return _ops.SequenceEqual(other._ops, JToken.EqualityComparer);
}
private Delta Add(JToken newOp)
{
int index = _ops.Count;
JToken lastOp = _ops.Count == 0 ? null : _ops[_ops.Count - 1];
newOp = (JObject) newOp.DeepClone();
if (lastOp != null && lastOp.Type == JTokenType.Object)
{
if (newOp.OpType() == DeleteType && lastOp.OpType() == DeleteType)
{
int delete = (int) lastOp[DeleteType] + (int) newOp[DeleteType];
_ops[index - 1] = new JObject(new JProperty(DeleteType, delete));
return this;
}
if (lastOp.OpType() == DeleteType && newOp.OpType() == InsertType)
{
index -= 1;
lastOp = index == 0 ? null : _ops[index - 1];
if (lastOp?.Type != JTokenType.Object)
{
_ops.Insert(0, newOp);
return this;
}
}
if (JToken.DeepEquals(newOp[Attributes], lastOp[Attributes]))
{
if (newOp[InsertType]?.Type == JTokenType.String && lastOp[InsertType]?.Type == JTokenType.String)
{
string insert = (string) lastOp[InsertType] + (string) newOp[InsertType];
var op = new JObject(new JProperty(InsertType, insert));
if (newOp[Attributes]?.Type == JTokenType.Object)
op[Attributes] = newOp[Attributes];
_ops[index - 1] = op;
return this;
}
else if (newOp.OpType() == RetainType && lastOp.OpType() == RetainType)
{
int retain = (int) lastOp[RetainType] + (int) newOp[RetainType];
var op = new JObject(new JProperty(RetainType, retain));
if (newOp[Attributes]?.Type == JTokenType.Object)
op[Attributes] = newOp[Attributes];
_ops[index - 1] = op;
return this;
}
}
}
_ops.Insert(index, newOp);
return this;
}
private static JToken ComposeAttributes(JToken a, JToken b, bool keepNull)
{
JObject aObj = a?.Type == JTokenType.Object ? (JObject) a : new JObject();
JObject bObj = b?.Type == JTokenType.Object ? (JObject) b : new JObject();
JObject attributes = (JObject) bObj.DeepClone();
if (!keepNull)
attributes = new JObject(attributes.Properties().Where(p => p.Value.Type != JTokenType.Null));
foreach (JProperty prop in aObj.Properties())
{
if (aObj[prop.Name] != null && bObj[prop.Name] == null)
attributes.Add(prop);
}
return attributes.HasValues ? attributes : null;
}
private class OpIterator
{
private readonly IReadOnlyList<JToken> _ops;
private int _index;
private int _offset;
public OpIterator(IReadOnlyList<JToken> ops)
{
_ops = ops;
}
public bool HasNext()
{
return PeekLength() < int.MaxValue;
}
public JToken Next(int length = int.MaxValue)
{
if (_index >= _ops.Count)
return new JObject(new JProperty(RetainType, int.MaxValue));
JToken nextOp = _ops[_index];
int offset = _offset;
int opLength = nextOp.OpLength();
if (length >= opLength - offset)
{
length = opLength - offset;
_index++;
_offset = 0;
}
else
{
_offset += length;
}
if (nextOp.OpType() == DeleteType)
return new JObject(new JProperty(DeleteType, length));
var retOp = new JObject();
if (nextOp[Attributes] != null)
retOp[Attributes] = nextOp[Attributes];
if (nextOp.OpType() == RetainType)
retOp[RetainType] = length;
else if (nextOp[InsertType]?.Type == JTokenType.String)
retOp[InsertType] = ((string) nextOp[InsertType]).Substring(offset, length);
else
retOp[InsertType] = nextOp[InsertType];
return retOp;
}
public JToken Peek()
{
return _index >= _ops.Count ? null : _ops[_index];
}
public int PeekLength()
{
if (_index >= _ops.Count)
return int.MaxValue;
return _ops[_index].OpLength() - _offset;
}
public string PeekType()
{
if (_index >= _ops.Count)
return RetainType;
JToken nextOp = _ops[_index];
return nextOp.OpType();
}
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Collections.ObjectModel;
using System.Diagnostics;
using Microsoft.VisualStudio.Debugger.Clr;
using Microsoft.VisualStudio.Debugger.Evaluation;
using Microsoft.VisualStudio.Debugger.Evaluation.ClrCompilation;
using Type = Microsoft.VisualStudio.Debugger.Metadata.Type;
namespace Microsoft.CodeAnalysis.ExpressionEvaluator
{
internal sealed class ResultsViewExpansion : Expansion
{
private const string ResultsFormatSpecifier = "results";
internal static ResultsViewExpansion CreateExpansion(DkmInspectionContext inspectionContext, DkmClrValue value, Formatter formatter)
{
var enumerableType = GetEnumerableType(value);
if (enumerableType == null)
{
return null;
}
return CreateExpansion(inspectionContext, value, enumerableType, formatter);
}
internal static EvalResultDataItem CreateResultsOnlyRow(
DkmInspectionContext inspectionContext,
string name,
DkmClrType declaredType,
DkmClrValue value,
Formatter formatter)
{
string errorMessage;
if (value.IsError())
{
errorMessage = (string)value.HostObjectValue;
}
else if (value.HasExceptionThrown(parent: null))
{
errorMessage = value.GetExceptionMessage(name, formatter);
}
else
{
var enumerableType = GetEnumerableType(value);
if (enumerableType != null)
{
var expansion = CreateExpansion(inspectionContext, value, enumerableType, formatter);
if (expansion != null)
{
return expansion.CreateResultsViewRow(inspectionContext, name, declaredType.GetLmrType(), value, includeResultsFormatSpecifier: true, formatter: formatter);
}
errorMessage = Resources.ResultsViewNoSystemCore;
}
else
{
errorMessage = Resources.ResultsViewNotEnumerable;
}
}
Debug.Assert(errorMessage != null);
return new EvalResultDataItem(name, errorMessage);
}
/// <summary>
/// Generate a Results Only row if the value is a synthesized
/// value declared as IEnumerable or IEnumerable<T>.
/// </summary>
internal static EvalResultDataItem CreateResultsOnlyRowIfSynthesizedEnumerable(
DkmInspectionContext inspectionContext,
string name,
DkmClrType declaredType,
DkmClrValue value,
Formatter formatter)
{
if ((value.ValueFlags & DkmClrValueFlags.Synthetic) == 0)
{
return null;
}
// Must be declared as IEnumerable or IEnumerable<T>, not a derived type.
var enumerableType = GetEnumerableType(value, declaredType, requireExactInterface: true);
if (enumerableType == null)
{
return null;
}
var expansion = CreateExpansion(inspectionContext, value, enumerableType, formatter);
if (expansion == null)
{
return null;
}
return expansion.CreateResultsViewRow(inspectionContext, name, declaredType.GetLmrType(), value, includeResultsFormatSpecifier: false, formatter: formatter);
}
private static DkmClrType GetEnumerableType(DkmClrValue value)
{
return GetEnumerableType(value, value.Type, requireExactInterface: false);
}
private static bool IsEnumerableCandidate(DkmClrValue value)
{
Debug.Assert(!value.IsError());
if (value.IsNull)
{
return false;
}
// Do not support Results View for strings
// or arrays. (Matches legacy EE.)
var type = value.Type.GetLmrType();
return !type.IsString() && !type.IsArray;
}
private static DkmClrType GetEnumerableType(DkmClrValue value, DkmClrType valueType, bool requireExactInterface)
{
if (!IsEnumerableCandidate(value))
{
return null;
}
var type = valueType.GetLmrType();
Type enumerableType;
if (requireExactInterface)
{
if (!type.IsIEnumerable() && !type.IsIEnumerableOfT())
{
return null;
}
enumerableType = type;
}
else
{
enumerableType = type.GetIEnumerableImplementationIfAny();
if (enumerableType == null)
{
return null;
}
}
return DkmClrType.Create(valueType.AppDomain, enumerableType);
}
private static ResultsViewExpansion CreateExpansion(DkmInspectionContext inspectionContext, DkmClrValue value, DkmClrType enumerableType, Formatter formatter)
{
var proxyValue = value.InstantiateResultsViewProxy(inspectionContext, enumerableType);
// InstantiateResultsViewProxy may return null
// (if assembly is missing for instance).
if (proxyValue == null)
{
return null;
}
var proxyMembers = MemberExpansion.CreateExpansion(
inspectionContext,
proxyValue.Type.GetLmrType(),
proxyValue,
ExpansionFlags.None,
TypeHelpers.IsPublic,
formatter);
return new ResultsViewExpansion(proxyValue, proxyMembers);
}
private readonly DkmClrValue _proxyValue;
private readonly Expansion _proxyMembers;
private ResultsViewExpansion(DkmClrValue proxyValue, Expansion proxyMembers)
{
Debug.Assert(proxyValue != null);
Debug.Assert(proxyMembers != null);
_proxyValue = proxyValue;
_proxyMembers = proxyMembers;
}
internal override void GetRows(
ResultProvider resultProvider,
ArrayBuilder<EvalResultDataItem> rows,
DkmInspectionContext inspectionContext,
EvalResultDataItem parent,
DkmClrValue value,
int startIndex,
int count,
bool visitAll,
ref int index)
{
if (InRange(startIndex, count, index))
{
rows.Add(CreateResultsViewRow(inspectionContext, parent, resultProvider.Formatter));
}
index++;
}
private EvalResultDataItem CreateResultsViewRow(DkmInspectionContext inspectionContext, EvalResultDataItem parent, Formatter formatter)
{
Debug.Assert(parent != null);
var proxyType = _proxyValue.Type.GetLmrType();
var fullName = parent.ChildFullNamePrefix;
var childFullNamePrefix = (fullName == null) ?
null :
formatter.GetObjectCreationExpression(formatter.GetTypeName(proxyType, escapeKeywordIdentifiers: true), fullName);
return new EvalResultDataItem(
ExpansionKind.ResultsView,
Resources.ResultsView,
typeDeclaringMember: null,
declaredType: proxyType,
parent: null,
value: _proxyValue,
displayValue: Resources.ResultsViewValueWarning,
expansion: _proxyMembers,
childShouldParenthesize: false,
fullName: fullName,
childFullNamePrefixOpt: childFullNamePrefix,
formatSpecifiers: Formatter.AddFormatSpecifier(parent.FormatSpecifiers, ResultsFormatSpecifier),
category: DkmEvaluationResultCategory.Method,
flags: DkmEvaluationResultFlags.ReadOnly,
editableValue: null,
inspectionContext: inspectionContext);
}
private EvalResultDataItem CreateResultsViewRow(
DkmInspectionContext inspectionContext,
string name,
Type declaredType,
DkmClrValue value,
bool includeResultsFormatSpecifier,
Formatter formatter)
{
var proxyType = _proxyValue.Type.GetLmrType();
ReadOnlyCollection<string> formatSpecifiers;
var fullName = formatter.TrimAndGetFormatSpecifiers(name, out formatSpecifiers);
if (includeResultsFormatSpecifier)
{
formatSpecifiers = Formatter.AddFormatSpecifier(formatSpecifiers, ResultsFormatSpecifier);
}
var childFullNamePrefix = formatter.GetObjectCreationExpression(formatter.GetTypeName(proxyType, escapeKeywordIdentifiers: true), fullName);
return new EvalResultDataItem(
ExpansionKind.Default,
name,
typeDeclaringMember: null,
declaredType: declaredType,
parent: null,
value: value,
displayValue: name,
expansion: new IndirectExpansion(_proxyValue, _proxyMembers),
childShouldParenthesize: false,
fullName: fullName,
childFullNamePrefixOpt: childFullNamePrefix,
formatSpecifiers: formatSpecifiers,
category: DkmEvaluationResultCategory.Method,
flags: DkmEvaluationResultFlags.ReadOnly,
editableValue: null,
inspectionContext: inspectionContext);
}
private sealed class IndirectExpansion : Expansion
{
private readonly DkmClrValue _proxyValue;
private readonly Expansion _expansion;
internal IndirectExpansion(DkmClrValue proxyValue, Expansion expansion)
{
_proxyValue = proxyValue;
_expansion = expansion;
}
internal override void GetRows(
ResultProvider resultProvider,
ArrayBuilder<EvalResultDataItem> rows,
DkmInspectionContext inspectionContext,
EvalResultDataItem parent,
DkmClrValue value,
int startIndex,
int count,
bool visitAll,
ref int index)
{
_expansion.GetRows(resultProvider, rows, inspectionContext, parent, _proxyValue, startIndex, count, visitAll, ref index);
}
}
}
}
| |
using System;
using System.Threading;
using System.Threading.Tasks;
using Robust.Shared.Log;
using Robust.Shared.Timing;
using Robust.Shared.Utility;
namespace Content.Server.GameObjects.EntitySystems.JobQueues
{
/// <summary>
/// CPU-intensive job that can be suspended and resumed on the main thread
/// </summary>
/// <remarks>
/// Implementations should overload <see cref="Process"/>.
/// Inside <see cref="Process"/>, implementations should only await on <see cref="SuspendNow"/>,
/// <see cref="SuspendIfOutOfTime"/>, or <see cref="WaitAsyncTask"/>.
/// </remarks>
/// <typeparam name="T">The type of result this job generates</typeparam>
public abstract class Job<T> : IJob
{
public JobStatus Status { get; private set; } = JobStatus.Pending;
/// <summary>
/// Represents the status of this job as a regular task.
/// </summary>
public Task<T> AsTask { get; }
public T Result { get; private set; }
public Exception Exception { get; private set; }
protected CancellationToken Cancellation { get; }
public double DebugTime { get; private set; }
private readonly double _maxTime;
protected readonly IStopwatch StopWatch;
// TCS for the Task property.
private readonly TaskCompletionSource<T> _taskTcs;
// TCS to call to resume the suspended job.
private TaskCompletionSource<object> _resume;
private Task _workInProgress;
protected Job(double maxTime, CancellationToken cancellation = default)
: this(maxTime, new Stopwatch(), cancellation)
{
}
protected Job(double maxTime, IStopwatch stopwatch, CancellationToken cancellation = default)
{
_maxTime = maxTime;
StopWatch = stopwatch;
Cancellation = cancellation;
_taskTcs = new TaskCompletionSource<T>();
AsTask = _taskTcs.Task;
}
/// <summary>
/// Suspends the current task immediately, yielding to other running jobs.
/// </summary>
/// <remarks>
/// This does not stop the job queue from un-suspending the current task immediately again,
/// if there is still time left over.
/// </remarks>
protected Task SuspendNow()
{
DebugTools.AssertNull(_resume);
_resume = new TaskCompletionSource<object>();
Status = JobStatus.Paused;
DebugTime += StopWatch.Elapsed.TotalSeconds;
return _resume.Task;
}
protected ValueTask SuspendIfOutOfTime()
{
DebugTools.AssertNull(_resume);
// ReSharper disable once CompareOfFloatsByEqualityOperator
if (StopWatch.Elapsed.TotalSeconds <= _maxTime || _maxTime == 0.0)
{
return new ValueTask();
}
return new ValueTask(SuspendNow());
}
/// <summary>
/// Wrapper to await on an external task.
/// </summary>
protected async Task<TTask> WaitAsyncTask<TTask>(Task<TTask> task)
{
DebugTools.AssertNull(_resume);
Status = JobStatus.Waiting;
DebugTime += StopWatch.Elapsed.TotalSeconds;
var result = await task;
// Immediately block on resume so that everything stays correct.
Status = JobStatus.Paused;
_resume = new TaskCompletionSource<object>();
await _resume.Task;
return result;
}
/// <summary>
/// Wrapper to safely await on an external task.
/// </summary>
protected async Task WaitAsyncTask(Task task)
{
DebugTools.AssertNull(_resume);
Status = JobStatus.Waiting;
DebugTime += StopWatch.Elapsed.TotalSeconds;
await task;
// Immediately block on resume so that everything stays correct.
_resume = new TaskCompletionSource<object>();
Status = JobStatus.Paused;
await _resume.Task;
}
public void Run()
{
StopWatch.Restart();
_workInProgress ??= ProcessWrap();
if (Status == JobStatus.Finished)
{
return;
}
DebugTools.Assert(_resume != null,
"Run() called without resume. Was this called while the job is in Waiting state?");
var resume = _resume;
_resume = null;
Status = JobStatus.Running;
if (Cancellation.IsCancellationRequested)
{
resume.TrySetCanceled();
}
else
{
resume.SetResult(null);
}
if (Status != JobStatus.Finished && Status != JobStatus.Waiting)
{
DebugTools.Assert(_resume != null,
"Job suspended without _resume set. Did you await on an external task without using WaitAsyncTask?");
}
}
protected abstract Task<T> Process();
private async Task ProcessWrap()
{
try
{
Cancellation.ThrowIfCancellationRequested();
// Making sure that the task starts inside the Running block,
// where the stopwatch is correctly set and such.
await SuspendNow();
Result = await Process();
// TODO: not sure if it makes sense to connect Task directly up
// to the return value of this method/Process.
// Maybe?
_taskTcs.TrySetResult(Result);
}
catch (TaskCanceledException)
{
_taskTcs.TrySetCanceled();
}
catch (Exception e)
{
// TODO: Should this be exposed differently?
// I feel that people might forget to check whether the job failed.
Logger.ErrorS("job", "Job failed on exception:\n{0}", e);
Exception = e;
_taskTcs.TrySetException(e);
}
finally
{
if (Status != JobStatus.Waiting)
{
// If we're blocked on waiting and the waiting task goes cancel/exception,
// this timing info would not be correct.
DebugTime += StopWatch.Elapsed.TotalSeconds;
}
Status = JobStatus.Finished;
}
}
}
public enum JobStatus
{
/// <summary>
/// Job has been created and has not been ran yet.
/// </summary>
Pending,
/// <summary>
/// Job is currently (yes, right now!) executing.
/// </summary>
Running,
/// <summary>
/// Job is paused due to CPU limits.
/// </summary>
Paused,
/// <summary>
/// Job is paused because of waiting on external task.
/// </summary>
Waiting,
/// <summary>
/// Job is done.
/// </summary>
// TODO: Maybe have a different status code for cancelled/failed on exception?
Finished,
}
}
| |
#define TEST_DIRECT
using System;
using System.Diagnostics;
using System.Collections.Generic;
using ProtoCore;
namespace DesignScript.Parser.Associative
{
public abstract class AssociativeNode : DesignScript.Parser.Node
{
//private static int sID = 0;
//allow the assignment node to be part of dependency struture?
//this lead to entiely different set of results in optimization
//protected static bool AssignNodeDependencyEnabled = true;
//even if the '=' is not a link between LHS and RHS, can we keep it in dependency graph?
//protected static bool AssignNodeDependencyEnabledLame = true;
/* public virtual void GenerateDependencyGraph(DependencyTracker tracker)
{
tracker.AddNode(this);//get rid of this later
IEnumerable<Node> contingents = getContingents();
foreach (Node node in contingents)
{
tracker.AddNode(node);
if (node == null)
continue;
tracker.AddDirectContingent(this, node);
tracker.AddDirectDependent(node, this);
node.GenerateDependencyGraph(tracker);
}
}*/
//public virtual IEnumerable<Node> getContingents()
//{
// return new List<Node>();
//}
//public virtual void ConsolidateNames(ref Dictionary<string, List<Node>> names)
//{
//}
//protected static void Consolidate(ref Dictionary<string, List<Node>> names, ref IdentifierNode node)
//{
// if (null != node.Name)
// {
// if (names.ContainsKey(node.Name))
// {
// List<Node> candidates = names[node.Name];
// node = candidates[candidates.Count - 1] as IdentifierNode;
// }
// else
// {
// //symbol not defined.
// //should we ignore this until somebody else defines a symbol?
// //or add the symbol?
// //throw new KeyNotFoundException();
// List<Node> candidates = new List<Node>();
// candidates.Add(node);
// names.Add(node.Name, candidates);
// }
// }
//}
}
public class IDEHelpNode : AssociativeNode
{
public IDEHelpNode(NodeType _type)
{ Type = _type; Value = null; }
public enum NodeType { PunctuationNode, IdentNode, TypeNode, KeywordNode, NumberNode, TextNode}
public NodeType Type { get; set; }
public string Value { get; set; }
public void SetValue(string _value, int _line, int _col)
{ Value = _value; Line = _line; Col = _col; }
}
public class CommentNode : AssociativeNode
{
public enum CommentType { Inline, Block }
public CommentType Type { get; private set; }
public string Value { get; private set; }
public CommentNode(int col, int line, string value, CommentType type)
{
Col = col;
Line = line;
Value = value;
Type = type;
}
}
public class LanguageBlockNode : AssociativeNode
{
public LanguageBlockNode()
{
languageblock = new LanguageCodeBlock();
openBracket = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
language = new IDEHelpNode(IDEHelpNode.NodeType.KeywordNode);
closeBracket = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
property = new List<IDEHelpNode>();
propertyValue = new List<StringNode>();
openBrace = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
closeBrace = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
}
public ProtoCore.LanguageCodeBlock languageblock;
public Node code { get; set; }
public void AddProperty(string _comma, int comma_line, int comma_col, string _name, int name_line, int name_col, string _assign, int assign_line, int assign_col, StringNode _value)
{
property.Add(new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode) { Value = _comma, Line = comma_line, Col = comma_col });
property.Add(new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode) { Value = _assign, Line = assign_line, Col = assign_col });
property.Add(new IDEHelpNode(IDEHelpNode.NodeType.IdentNode) { Value = _name, Line = name_line, Col = name_col });
propertyValue.Add(_value);
}
public IDEHelpNode openBracket { get; set; }
public IDEHelpNode language { get; set; }
public IDEHelpNode closeBracket { get; set; }
public List<IDEHelpNode> property { get; private set; }
public List<StringNode> propertyValue { get; private set; }
public IDEHelpNode openBrace { get; set; }
public IDEHelpNode closeBrace { get; set; }
}
/// <summary>
/// This node will be used by the optimiser
/// </summary>
public class MergeNode : AssociativeNode
{
public List<Node> MergedNodes
{
get;
private set;
}
public MergeNode()
{
MergedNodes = new List<Node>();
}
//public override IEnumerable<Node> getContingents()
//{
// return MergedNodes;
//}
/* public override void ConsolidateNames(ref Dictionary<string, List<Node>> names)
{
foreach (Node node in MergedNodes)
node.ConsolidateNames(ref(names));
}*/
}
public class ReplicationGuideNode : AssociativeNode
{
public ReplicationGuideNode()
{
brackets = new List<IDEHelpNode>();
}
public List<Node> ReplicationGuides { get; set; }
public List<IDEHelpNode> brackets { get; set; }
public Node factor { get; set; }
public void AddBrackets(string _open, int open_line, int open_col, string _num, int num_line, int num_col, string _close, int close_line, int close_col)
{
brackets.Add(new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode) { Value = _open, Line = open_line, Col = open_col });
brackets.Add(new IDEHelpNode(IDEHelpNode.NodeType.NumberNode) { Value = _num, Line = num_line, Col = num_col });
brackets.Add(new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode) { Value = _close, Line = close_line, Col = close_col });
}
public IDEHelpNode openParen { get; set; }
public IDEHelpNode closeParen { get; set; }
}
public class IdentifierNode : AssociativeNode
{
public IdentifierNode()
{
IdentValueReturn = new IDEHelpNode(IDEHelpNode.NodeType.KeywordNode);
IdentValue = new IDEHelpNode(IDEHelpNode.NodeType.IdentNode);
colon = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
typeName = new IDEHelpNode(IDEHelpNode.NodeType.TypeNode);
typeName_kw = new IDEHelpNode(IDEHelpNode.NodeType.KeywordNode);
}
public IDEHelpNode IdentValue { get; set; }
public IDEHelpNode IdentValueReturn { get; set; }
public ArrayNode ArrayDimensions
{
get;
set;
}
public IDEHelpNode colon { get; set; }
public IDEHelpNode typeName { get; set; }
public IDEHelpNode typeName_kw { get; set; }
public void BuildInTypeSetValue(string _value, int _line, int _col)
{
typeName_kw = new IDEHelpNode(IDEHelpNode.NodeType.KeywordNode) { Value = _value, Line = _line, Col = _col };
}
public void UserDefinedTypeSetValue(string _value, int _line, int _col)
{
typeName = new IDEHelpNode(IDEHelpNode.NodeType.TypeNode) { Value = _value, Line = _line, Col = _col };
}
//public override void ConsolidateNames(ref Dictionary<string, List<Node>> names)
//{
// throw new NotImplementedException(); //we should not be here at all. the parent node should take care.
// //disabling execption as functioncalls will still need to add the nodes to
//}
}
public class IdentifierListNode : AssociativeNode
{
public IdentifierListNode()
{
InitializeIDEHelpNode();
}
public Node LeftNode
{
get;
set;
}
public ProtoCore.DSASM.Operator Optr
{
get;
set;
}
void InitializeIDEHelpNode()
{
dot = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
}
public IDEHelpNode dot { get; set; }
public Node RightNode
{
get;
set;
}
}
public class IntNode : AssociativeNode
{
public IntNode()
{ sign = new IDEHelpNode(IDEHelpNode.NodeType.NumberNode); IDEValue = new IDEHelpNode(IDEHelpNode.NodeType.NumberNode); }
public IDEHelpNode sign { get; set; }
public IDEHelpNode IDEValue { get; set; }
public string value { get; set; }
}
public class DoubleNode : AssociativeNode
{
public DoubleNode()
{ sign = new IDEHelpNode(IDEHelpNode.NodeType.NumberNode); IDEValue = new IDEHelpNode(IDEHelpNode.NodeType.NumberNode); }
public IDEHelpNode sign { get; set; }
public IDEHelpNode IDEValue { get; set; }
public string value { get; set; }
}
public class BooleanNode : AssociativeNode
{
public string value { get; set; }
}
public class CharNode : AssociativeNode
{
public CharNode()
{
IDEValue = new IDEHelpNode(IDEHelpNode.NodeType.TextNode);
}
public IDEHelpNode IDEValue { get; set; }
public string value { get; set; }
}
public class StringNode : AssociativeNode
{
public StringNode()
{
IDEValue = new IDEHelpNode(IDEHelpNode.NodeType.TextNode);
}
public IDEHelpNode IDEValue { get; set; }
public string value { get; set; }
}
public class NullNode : AssociativeNode
{
public string value { get; set; }
}
public class ReturnNode : AssociativeNode
{
public Node ReturnExpr
{
get;
set;
}
}
public class FunctionCallNode : AssociativeNode
{
public Node Function
{
get;
set;
}
public List<Node> FormalArguments
{
get;
set;
}
public FunctionCallNode()
{
FormalArguments = new List<Node>();
InitializeIDEHelpNode();
}
//public override IEnumerable<Node> getContingents()
//{
// List<Node> contingents = new List<Node>(FormalArguments);
// contingents.Add(Function);
// return contingents;
//}
/*
public override void ConsolidateNames(ref Dictionary<string, List<Node>> names)
{
List<Node> newFormalArguments = new List<Node>();
//replace the names in arguments by current names in calling context
foreach (Node argument in FormalArguments)
{
Node newArgument = argument;
IdentifierNode terminalNode = newArgument as IdentifierNode;
if (terminalNode != null)
{
Consolidate(ref(names), ref(terminalNode));
newArgument = terminalNode;
}
else
{
argument.ConsolidateNames(ref(names));
}
newFormalArguments.Add(newArgument);
}
FormalArguments = newFormalArguments;
}*/
public void AddComma(string _value, int _line, int _col)
{
comma.Add(new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode) { Value = _value, Line = _line, Col = _col });
}
void InitializeIDEHelpNode()
{
openParen = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
closeParen = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
comma = new List<IDEHelpNode>();
}
public IDEHelpNode openParen { get; set; }
public IDEHelpNode closeParen { get; set; }
public List<IDEHelpNode> comma { get; set; }
}
public class Pattern : AssociativeNode
{
public Pattern()
{
InitializeIDEHelpNode();
}
public Node Expression
{
get;
set;
}
//public override IEnumerable<Node> getContingents()
//{
// List<Node> contingents = new List<Node>(1);
// contingents.Add(Expression);
// return contingents;
//}
/*
public override void ConsolidateNames(ref Dictionary<string, List<Node>> names)
{
Expression.ConsolidateNames(ref(names));
}*/
void InitializeIDEHelpNode()
{
bitOr = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
}
public IDEHelpNode bitOr { get; set; }
}
public class QualifiedNode : AssociativeNode
{
public Node Value
{
get;
set;
}
public List<Node> ReplicationGuides
{
get;
set;
}
//public override IEnumerable<Node> getContingents()
//{
// List<Node> contingents = new List<Node>(ReplicationGuides);
// contingents.Add(Value);
// return contingents;
//}
/*
public override void ConsolidateNames(ref Dictionary<string, List<Node>> names)
{
Value.ConsolidateNames(ref(names));
}
*/
}
public class VarDeclNode : AssociativeNode
{
public VarDeclNode()
{
name = new IDEHelpNode(IDEHelpNode.NodeType.IdentNode);
equal = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
KwStatic = new IDEHelpNode(IDEHelpNode.NodeType.KeywordNode);
}
//public ProtoCore.DSASM.MemoryRegion memregion
//{
// get;
// set;
//}
//public ProtoCore.Type ArgumentType
//{
// get;
// set;
//}
public Node NameNode
{
get;
set;
}
////public override void ConsolidateNames(ref Dictionary<string, List<Node>> names)
////{
//// if (names.ContainsKey(NameNode.Name))
//// throw new Exception();
//// List<Node> records = new List<Node>();
//// records.Add(NameNode);
//// names.Add(NameNode.Name, records);
//// Dictionary<string, List<Node>> localnames = new Dictionary<string, List<Node>>();
//// localnames.Add(NameNode.Name, records);
//}
public IDEHelpNode KwStatic { get; set; }
public IDEHelpNode name { get; set; }
public TypeNode IDEArgumentType { get; set; }
public IDEHelpNode equal { get; set; }
}
public class ArgumentSignatureNode : AssociativeNode
{
public ArgumentSignatureNode()
{
Arguments = new List<Node>();
InitializeHelpNode();
}
public List<Node> Arguments
{
get;
set;
}
public void AddArgument(Node arg)
{
Arguments.Add(arg);
}
//public List<KeyValuePair<ProtoCore.Type, Pattern>> ArgumentStructure
//{
// get
// {
// List<KeyValuePair<ProtoCore.Type, Pattern>> argStructure = new List<KeyValuePair<ProtoCore.Type, Pattern>>();
// foreach (VarDeclNode i in Arguments)
// {
// argStructure.Add(new KeyValuePair<ProtoCore.Type, Pattern>(i.ArgumentType, i.Pattern));
// }
// return argStructure;
// }
//}
//public override IEnumerable<Node> getContingents()
//{
// List<Node> contingents = new List<Node>(Arguments);
// return contingents;
//}
//public override void ConsolidateNames(ref Dictionary<string, List<Node>> names)
//{
// foreach (Node node in Arguments)
// node.ConsolidateNames(ref(names));
//}
public void AddComma(string _comma, int _line, int _col)
{
comma.Add(new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode) { Value = _comma, Line = _line, Col = _col });
}
void InitializeHelpNode()
{
openBrace = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
comma = new List<IDEHelpNode>();
closeBrace = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
}
public IDEHelpNode openBrace { get; set; }
public List<IDEHelpNode> comma { get; private set; }
public IDEHelpNode closeBrace { get; set; }
}
public class CodeBlockNode : AssociativeNode
{
public ProtoCore.DSASM.SymbolTable symbols { get; set; }
public ProtoCore.DSASM.ProcedureTable procTable { get; set; }
public CodeBlockNode()
{
Body = new List<Node>();
symbols = new ProtoCore.DSASM.SymbolTable("AST generated", ProtoCore.DSASM.Constants.kInvalidIndex);
//procTable = new ProtoCore.DSASM.ProcedureTable(ProtoCore.DSASM.Constants.kInvalidIndex);
InitializeIDEHelpNode();
}
public List<Node> Body
{
get;
set;
}
//public override IEnumerable<Node> getContingents()
//{
// return new List<Node>(Body);
//}
//public override void ConsolidateNames(ref Dictionary<string, List<Node>> names)
//{
// //todo make a decision whether to pass out the local names.
// foreach (Node node in Body)
// {
// node.ConsolidateNames(ref(names));
// }
//}
void InitializeIDEHelpNode()
{
openBrace = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
closeBrace = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
}
public IDEHelpNode openBrace { get; set; }
public IDEHelpNode closeBrace { get; set; }
}
public class ClassDeclNode : AssociativeNode
{
public ClassDeclNode()
{
varlist = new List<Node>();
funclist = new List<Node>();
InitializeIDEHelpNode();
}
// utilities added to store the pos info of tokens, for IDE use
void InitializeIDEHelpNode()
{
Kwclass = new IDEHelpNode(IDEHelpNode.NodeType.KeywordNode);
ClassName = new IDEHelpNode(IDEHelpNode.NodeType.TypeNode);
SuperClass = new List<IDEHelpNode>();
Kwextend = new IDEHelpNode(IDEHelpNode.NodeType.KeywordNode);
openBrace = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
closeBrace = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
VarDeclCommas = new List<IDEHelpNode>();
accessLabel = new List<IDEHelpNode>();
}
public void AddExt(string _name, int _line, int _col)
{
SuperClass.Add(new IDEHelpNode(IDEHelpNode.NodeType.TypeNode) { Value = _name, Type = IDEHelpNode.NodeType.TypeNode, Line = _line, Col = _col });
}
public void AddVarDeclComma(string _value, int _line, int _col)
{
VarDeclCommas.Add(new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode) { Value = _value, Col = _col, Line = _line });
}
public void AddAccessLabel(string _value, int _line, int _col)
{
accessLabel.Add(new IDEHelpNode(IDEHelpNode.NodeType.KeywordNode) { Value = _value, Line = _line, Col = _col });
}
public IDEHelpNode Kwclass { get; set; }
public IDEHelpNode ClassName { get; set; }
public IDEHelpNode Kwextend { get; set; }
public List<IDEHelpNode> SuperClass { get; private set; }
public IDEHelpNode openBrace { get; set; }
public IDEHelpNode closeBrace { get; set; }
public List<IDEHelpNode> VarDeclCommas { get; private set; }
public List<IDEHelpNode> accessLabel { get; set; }
public List<Node> varlist { get; set; }
public List<Node> funclist { get; set; }
}
public class ConstructorDefinitionNode : AssociativeNode
{
private DesignScript.Parser.Associative.FunctionCallNode baseConstr = null;
public ConstructorDefinitionNode()
{
InitializeIDEHelpNode();
}
public ArgumentSignatureNode Signature
{
get;
set;
}
public Pattern Pattern
{
get;
set;
}
public TypeNode IDEReturnType { get; set; }
public CodeBlockNode FunctionBody
{
get;
set;
}
void InitializeIDEHelpNode()
{
Kwconstructor = new IDEHelpNode(IDEHelpNode.NodeType.KeywordNode);
name = new IDEHelpNode(IDEHelpNode.NodeType.IdentNode);
KwStatic = new IDEHelpNode(IDEHelpNode.NodeType.KeywordNode);
KwBase = new IDEHelpNode(IDEHelpNode.NodeType.KeywordNode);
}
public void CreateBaseConstructorNode()
{
this.baseConstr = new DesignScript.Parser.Associative.FunctionCallNode();
}
public void SetBaseConstructor(Node bnode)
{
if(null != this.baseConstr)
this.baseConstr.Function = bnode;
}
public void SetColonToken(Token token)
{
this.Colon = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
this.Colon.SetValue(token.val, token.line, token.col);
}
public void SetDotToken(Token token)
{
this.Dot = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
this.Dot.SetValue(token.val, token.line, token.col);
}
public IDEHelpNode KwStatic { get; set; }
public IDEHelpNode Kwconstructor { get; set; }
public IDEHelpNode name { get; set; }
public IDEHelpNode Colon { get; set; }
public IDEHelpNode Dot { get; set; }
public IDEHelpNode KwBase { get; set; }
public DesignScript.Parser.Associative.FunctionCallNode BaseConstructorNode { get { return this.baseConstr; } }
}
public class TypeNode : AssociativeNode
{
public TypeNode()
{
InitializeIDEHelpNode();
}
void InitializeIDEHelpNode()
{
colon = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
brackets = new List<IDEHelpNode>();
multiDim = new List<IDEHelpNode>();
op = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
}
public void AddBracket(string _open, int open_line, int open_col, string _close, int close_line, int close_col)
{
brackets.Add(new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode) { Value = _open, Col = open_col, Line = open_line });
brackets.Add(new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode) { Value = _close, Col = close_col, Line = close_line });
}
public void AddMultiDimNodes(string _mdopen, int mdopen_line, int mdopen_col, string _mdclose, int mdclose_line, int mdclose_col)
{
multiDim.Add(new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode) { Value = _mdopen, Col = mdopen_col, Line = mdopen_line });
multiDim.Add(new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode) { Value = _mdclose, Col = mdclose_col, Line = mdclose_line });
}
public void BuildInTypeSetValue(string _value, int _line, int _col)
{
typeName = new IDEHelpNode(IDEHelpNode.NodeType.KeywordNode) { Value = _value, Line = _line, Col = _col };
}
public void UserDefinedTypeSetValue(string _value, int _line, int _col)
{
typeName = new IDEHelpNode(IDEHelpNode.NodeType.TypeNode) { Value = _value, Line = _line, Col = _col };
}
public IDEHelpNode colon { get; set; }
public IDEHelpNode typeName { get; set; }
public IDEHelpNode varName { get; set; }
public IDEHelpNode op { get; set; }
public List<IDEHelpNode> brackets { get; set; }
public List<IDEHelpNode> multiDim { get; set; }
}
public class FunctionDefinitionNode : AssociativeNode
{
public FunctionDefinitionNode()
{
InitializeIDEHelpNode();
}
public CodeBlockNode FunctionBody
{
get;
set;
}
public TypeNode IDEReturnType { get; set; }
public ProtoCore.Type ReturnType
{
get;
set;
}
public ArgumentSignatureNode Singnature
{
get;
set;
}
public Node Pattern
{
get;
set;
}
public bool IsExternLib
{
get;
set;
}
public bool IsDNI
{
get;
set;
}
public string ExternLibName
{
get;
set;
}
//public override IEnumerable<Node> getContingents()
//{
// List<Node> contingents = new List<Node>();
// contingents.Add(FunctionBody);
// contingents.Add(Singnature);
// contingents.Add(Pattern);
// return contingents;
//}
//public override void ConsolidateNames(ref Dictionary<string, List<Node>> names)
//{
// Dictionary<string, List<Node>> localNames = new Dictionary<string, List<Node>>();
// Singnature.ConsolidateNames(ref(localNames));
// //Pattern.ConsolidateNames(ref(localNames));
// FunctionBody.ConsolidateNames(ref(localNames));
// if (names.ContainsKey(Name))
// {
// throw new Exception();
// }
// List<Node> namelist = new List<Node>();
// namelist.Add(this);
// names.Add(Name, namelist);
//}
void InitializeIDEHelpNode()
{
KwStatic = new IDEHelpNode(IDEHelpNode.NodeType.KeywordNode);
Kwexternal = new IDEHelpNode(IDEHelpNode.NodeType.KeywordNode);
Kwnative = new IDEHelpNode(IDEHelpNode.NodeType.KeywordNode);
libOpenBrace = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
libCloseBrace = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
Kwdef = new IDEHelpNode(IDEHelpNode.NodeType.KeywordNode);
name = new IDEHelpNode(IDEHelpNode.NodeType.IdentNode);
endLine = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
}
public IDEHelpNode KwStatic { get; set; }
public IDEHelpNode Kwexternal { get; set; }
public IDEHelpNode Kwnative { get; set; }
public IDEHelpNode libOpenBrace { get; set; }
public IDEHelpNode libOpenQuote { get; set; }
public IDEHelpNode libCloseQuote { get; set; }
public StringNode libName { get; set; }
public IDEHelpNode libCloseBrace { get; set; }
public IDEHelpNode Kwdef { get; set; }
public IDEHelpNode name { get; set; }
public IDEHelpNode endLine { get; set; }
}
public class BinaryExpressionNode : AssociativeNode
{
public BinaryExpressionNode()
{
op = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
}
public Node LeftNode
{
get;
set;
}
public Node RightNode
{
get;
set;
}
public IDEHelpNode op { get; set; }
//public override IEnumerable<Node> getContingents()
//{
// List<Node> contingents = new List<Node>();
// if (Optr != ProtoCore.DSASM.Operator.assign)
// {
// contingents.Add(LeftNode);
// }
// //if we have enabled the '=' node to be a part of depencency, then we return RHS, no matter what
// if (AssignNodeDependencyEnabled || Optr != ProtoCore.DSASM.Operator.assign)
// {
// contingents.Add(RightNode);
// }
// return contingents;
//}
//public override void GenerateDependencyGraph(DependencyTracker tracker)
//{
// base.GenerateDependencyGraph(tracker);
// if (Optr == ProtoCore.DSASM.Operator.assign)
// {
// //so do we set dependency between LeftNode and '=' or LeftNode and RightNode : may be later is better
// if (AssignNodeDependencyEnabled)
// {
// //if we have enabled the '=' node to be a part of depencency, then we already handled RHS as a contingent
// //so skip it
// tracker.AddNode(LeftNode);
// tracker.AddDirectContingent(LeftNode, this);
// tracker.AddDirectDependent(this, LeftNode);
// }
// else
// {
// //if(AssignNodeDependencyEnabledLame)
// //{
// // tracker.AddDirectContingent(this, RightNode); //? still keep in dependency?
// // tracker.AddDirectContingent(LeftNode, RightNode);
// //}
// tracker.AddNode(RightNode);
// tracker.AddNode(LeftNode);
// tracker.AddDirectContingent(LeftNode, RightNode);
// tracker.AddDirectDependent(RightNode, LeftNode);
// RightNode.GenerateDependencyGraph(tracker);
// }
// }
//}
// public override void ConsolidateNames(ref Dictionary<string, List<Node>> names)
// {
// IdentifierNode rightTerminalNode = RightNode as IdentifierNode;
// if (rightTerminalNode != null)
// {
// if (Optr != ProtoCore.DSASM.Operator.dot)
// {
// //replace RHS
// Consolidate(ref(names), ref(rightTerminalNode));
// RightNode = rightTerminalNode;
// }
// }
// else
// {
// RightNode.ConsolidateNames(ref(names));
// }
// //left has to be done 2nd, because in case of modifiers, we dont want to
// //replace the node on RHS by a node on LHS. So a modifier stack name is not unique.
// IdentifierNode leftTerminalNode = LeftNode as IdentifierNode;
// if (leftTerminalNode != null)
// {
// if (Optr != ProtoCore.DSASM.Operator.assign)
// {
// //replace LHS
// Consolidate(ref(names), ref(leftTerminalNode));
// LeftNode = leftTerminalNode;
// }
// else
// {
// if (leftTerminalNode.Name != null)
// {
// if (names.ContainsKey(leftTerminalNode.Name))
// {
// List<Node> candidates = names[leftTerminalNode.Name];
// candidates.Add(leftTerminalNode);
// }
// else
// {
// //append LHS
// List<Node> candidates = new List<Node>();
// candidates.Add(leftTerminalNode);
// names.Add(leftTerminalNode.Name, candidates);
// }
// }
// }
// }
// else
// {
// LeftNode.ConsolidateNames(ref(names));
// }
// }
//}
}
public class ParenExpressionNode : AssociativeNode
{
public ParenExpressionNode()
{
openParen = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
closeParen = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
}
public Node expression { get; set; }
public IDEHelpNode openParen { get; set; }
public IDEHelpNode closeParen { get; set; }
}
public class UnaryExpressionNode : AssociativeNode
{
public UnaryExpressionNode()
{
op = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
}
public Node Expression
{
get;
set;
}
public IDEHelpNode op { get; set; }
//public override IEnumerable<Node> getContingents()
//{
// List<Node> contingents = new List<Node>(1);
// contingents.Add(Expression);
// return contingents;
//}
//public override void ConsolidateNames(ref Dictionary<string, List<Node>> names)
//{
// Expression.ConsolidateNames(ref(names));
//}
}
public class InlineConditionalNode : AssociativeNode
{
public InlineConditionalNode()
{
Question = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
Colon = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
}
public IDEHelpNode Question { get; set; }
public IDEHelpNode Colon { get; set; }
public Node ConditionExpression { get; set; }
public Node TrueExpression { get; set; }
public Node FalseExpression { get; set; }
}
public class ModifierStackNode : AssociativeNode
{
public ModifierStackNode()
{
ElementNodes = new List<Node>();
AtNames = new Dictionary<string, Node>();
InitializeIDEHelpNode();
}
public void AddEndLine(string _value, int _line, int _col)
{
endline.Add(new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode) { Value = _value, Col = _col, Line = _line });
}
public void AddArrow(string _arr_value, int _arr_line, int _arr_col, string _label_value, int _label_line, int _label_col)
{
arrow.Add(new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode) { Value = _arr_value, Line = _arr_line, Col = _arr_col });
arrow.Add(new IDEHelpNode(IDEHelpNode.NodeType.IdentNode) { Value = _label_value, Line = _label_line, Col = _label_col });
}
void InitializeIDEHelpNode()
{
openSharpBrace = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
endline = new List<IDEHelpNode>();
arrow = new List<IDEHelpNode>();
closeBrace = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
}
public IDEHelpNode openSharpBrace { get; set; }
public List<IDEHelpNode> endline { get; private set; }
public List<IDEHelpNode> arrow { get; private set; }
public IDEHelpNode closeBrace { get; set; }
public void AddElementNode(Node n, string name)
{
ElementNodes.Add(n);
if (!string.IsNullOrEmpty(name))
{
if (!AtNames.ContainsKey(name))
{
AtNames.Add(name, n);
BinaryExpressionNode o = n as BinaryExpressionNode;
IdentifierNode t = o.LeftNode as IdentifierNode;
BinaryExpressionNode e = new BinaryExpressionNode();
e.LeftNode = new IdentifierNode() { Name = name };
e.RightNode = t;
ElementNodes.Add(e);
}
}
}
public List<Node> ElementNodes
{
get;
private set;
}
public Node ReturnNode
{
get;
set;
}
public Dictionary<string, Node> AtNames
{
get;
private set;
}
//public override IEnumerable<Node> getContingents()
//{
// List<Node> contingents = new List<Node>(ElementNodes);
// contingents.Add(ReturnNode);
// return contingents;
//}
}
public class RangeExprNode : AssociativeNode
{
public RangeExprNode()
{
IntNode defaultStep = new IntNode();
defaultStep.value = "1";
StepNode = defaultStep;
InitializeIDEHelpNode();
}
public Node FromNode { get; set; }
public Node ToNode { get; set; }
public Node StepNode { get; set; }
void InitializeIDEHelpNode()
{
op = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
stepOp = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
stepOp2 = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
}
public IDEHelpNode op { get; set; }
public IDEHelpNode stepOp { get; set; }
public IDEHelpNode stepOp2 { get; set; }
}
public class ExprListNode : AssociativeNode
{
public ExprListNode()
{
list = new List<Node>();
InitializeIDEHelpNode();
}
public List<Node> list
{
get;
set;
}
public void AddComma(string _value, int _line, int _col)
{
comma.Add(new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode) { Value = _value, Line = _line, Col = _col });
}
void InitializeIDEHelpNode()
{
openBrace = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
comma = new List<IDEHelpNode>();
closeBrace = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
}
public IDEHelpNode openBrace { get; set; }
public List<IDEHelpNode> comma { get; private set; }
public IDEHelpNode closeBrace { get; set; }
}
public class ForLoopNode : AssociativeNode
{
public ForLoopNode()
{
InitializeIDEHelpNode();
}
public Node id
{
get;
set;
}
public Node expression
{
get;
set;
}
public List<Node> body
{
get;
set;
}
void InitializeIDEHelpNode()
{
Kwfor = new IDEHelpNode(IDEHelpNode.NodeType.KeywordNode);
openParen = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
Kwin = new IDEHelpNode(IDEHelpNode.NodeType.KeywordNode);
closeParen = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
openBrace = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
closeBrace = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
}
public IDEHelpNode Kwfor { get; set; }
public IDEHelpNode openParen { get; set; }
public IDEHelpNode Kwin { get; set; }
public IDEHelpNode closeParen { get; set; }
public IDEHelpNode openBrace { get; set; }
public IDEHelpNode closeBrace { get; set; }
}
public class StatementNode : AssociativeNode
{
public StatementNode()
{
Statement = null;
endLine = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
}
public Node Statement { get; set; }
public IDEHelpNode endLine { get; set; }
}
public class ArrayNode : AssociativeNode
{
public ArrayNode()
{
Expr = null;
Ident = null;
Type = null;
InitializeIDEHelpNode();
}
public Node Ident { get; set; }
public Node Expr { get; set; }
public Node Type { get; set; }
void InitializeIDEHelpNode()
{
openBracket = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
closeBracket = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
}
public IDEHelpNode openBracket { get; set; }
public IDEHelpNode closeBracket { get; set; }
}
public class ImportNode : AssociativeNode
{
public ImportNode()
{
OpenParen = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
CloseParen = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
KwImport = new IDEHelpNode(IDEHelpNode.NodeType.KeywordNode);
KwFrom = new IDEHelpNode(IDEHelpNode.NodeType.KeywordNode);
Identifier = new IDEHelpNode(IDEHelpNode.NodeType.IdentNode);
KwPrefix = new IDEHelpNode(IDEHelpNode.NodeType.KeywordNode);
PrefixIdent = new IDEHelpNode(IDEHelpNode.NodeType.IdentNode);
EndLine = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
HasBeenImported = false;
}
public CodeBlockNode CodeNode { get; set; }
public bool HasBeenImported { get; set; }
public IDEHelpNode OpenParen { get; set; }
public IDEHelpNode CloseParen { get; set; }
public IDEHelpNode KwImport { get; set; }
public IDEHelpNode KwFrom { get; set; }
public IDEHelpNode KwPrefix { get; set; }
public IDEHelpNode PrefixIdent { get; set; }
public StringNode Path { get; set; }
public IDEHelpNode Identifier { get; set; }
public IDEHelpNode EndLine { get; set; }
}
public class PostFixNode : AssociativeNode
{
public PostFixNode()
{
OperatorPos = new IDEHelpNode(IDEHelpNode.NodeType.PunctuationNode);
}
public IDEHelpNode OperatorPos { get; set; }
public Node Identifier { get; set; }
}
}
| |
#region Apache Notice
/*****************************************************************************
* $Header: $
* $Revision: 513043 $
* $Date: 2007-02-28 15:56:03 -0700 (Wed, 28 Feb 2007) $
*
* iBATIS.NET Data Mapper
* Copyright (C) 2004 - Gilles Bayon
*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
********************************************************************************/
#endregion
#region Imports
using System;
using System.Data;
using System.Reflection;
using IBatisNet.Common;
using IBatisNet.Common.Logging;
using IBatisNet.DataAccess.Exceptions;
#endregion
namespace IBatisNet.DataAccess.DaoSessionHandlers
{
/// <summary>
/// An ADO.NET implementation of the DataAccess Session .
/// </summary>
public class SimpleDaoSession : DaoSession
{
#region Fields
private static readonly ILog _logger = LogManager.GetLogger( MethodBase.GetCurrentMethod().DeclaringType );
private IDataSource _dataSource = null;
private bool _isTransactionOpen = false;
private bool _consistent = false;
/// <summary>
/// Holds value of connection
/// </summary>
private IDbConnection _connection = null;
/// <summary>
/// Holds value of transaction
/// </summary>
private IDbTransaction _transaction = null;
#endregion
#region Properties
/// <summary>
/// The data source use by the session.
/// </summary>
/// <value></value>
public override IDataSource DataSource
{
get { return _dataSource; }
}
/// <summary>
/// The Connection use by the session.
/// </summary>
/// <value></value>
public override IDbConnection Connection
{
get { return _connection; }
}
/// <summary>
/// The Transaction use by the session.
/// </summary>
/// <value></value>
public override IDbTransaction Transaction
{
get { return _transaction; }
}
/// <summary>
/// Indicates if a transaction is open on
/// the session.
/// </summary>
/// <value></value>
public override bool IsTransactionStart
{
get { return _isTransactionOpen; }
}
/// <summary>
/// Changes the vote for transaction to commit (true) or to abort (false).
/// </summary>
private bool Consistent
{
set { _consistent = value; }
}
#endregion
#region Constructor (s) / Destructor
/// <summary>
///
/// </summary>
/// <param name="daoManager"></param>
/// <param name="dataSource"></param>
public SimpleDaoSession(DaoManager daoManager, DataSource dataSource):base(daoManager)
{
_dataSource = dataSource;
}
#endregion
#region Methods
/// <summary>
/// Complete (commit) a transaction
/// </summary>
/// <remarks>
/// Use in 'using' syntax.
/// </remarks>
public override void Complete()
{
this.Consistent = true;
}
/// <summary>
/// Opens a database connection.
/// </summary>
public override void OpenConnection()
{
this.OpenConnection(_dataSource.ConnectionString);
}
/// <summary>
/// Open a connection, on the specified connection string.
/// </summary>
/// <param name="connectionString">The connection string</param>
public override void OpenConnection(string connectionString)
{
if (_connection == null)
{
_connection = _dataSource.DbProvider.CreateConnection();
_connection.ConnectionString = connectionString;
try
{
_connection.Open();
if (_logger.IsDebugEnabled)
{
_logger.Debug(string.Format("Open Connection \"{0}\" to \"{1}\".", _connection.GetHashCode().ToString(), _dataSource.DbProvider.Description) );
}
}
catch(Exception ex)
{
throw new DataAccessException( string.Format("Unable to open connection to \"{0}\".", _dataSource.DbProvider.Description), ex );
}
}
else if (_connection.State != ConnectionState.Open)
{
try
{
_connection.Open();
if (_logger.IsDebugEnabled)
{
_logger.Debug(string.Format("Open Connection \"{0}\" to \"{1}\".", _connection.GetHashCode().ToString(), _dataSource.DbProvider.Description) );
}
}
catch(Exception ex)
{
throw new DataAccessException(string.Format("Unable to open connection to \"{0}\".", _dataSource.DbProvider.Description), ex );
}
}
}
/// <summary>
/// Closes the connection
/// </summary>
public override void CloseConnection()
{
if ( (_connection != null) && (_connection.State == ConnectionState.Open) )
{
_connection.Close();
if (_logger.IsDebugEnabled)
{
_logger.Debug(string.Format("Close Connection \"{0}\" to \"{1}\".", _connection.GetHashCode().ToString(), _dataSource.DbProvider.Description));
}
}
_connection = null;
}
/// <summary>
/// Begins a transaction.
/// </summary>
/// <remarks>
/// Oepn a connection.
/// </remarks>
public override void BeginTransaction()
{
this.BeginTransaction( _dataSource.ConnectionString );
}
/// <summary>
/// Open a connection and begin a transaction on the specified connection string.
/// </summary>
/// <param name="connectionString">The connection string</param>
public override void BeginTransaction(string connectionString)
{
if (_connection == null || _connection.State != ConnectionState.Open)
{
this.OpenConnection( connectionString );
}
_transaction = _connection.BeginTransaction();
if (_logger.IsDebugEnabled)
{
_logger.Debug("Begin Transaction.");
}
_isTransactionOpen = true;
}
/// <summary>
/// Begins a database transaction
/// </summary>
/// <param name="openConnection">Open a connection.</param>
public override void BeginTransaction(bool openConnection)
{
if (openConnection)
{
this.BeginTransaction();
}
else
{
if (_connection == null || _connection.State != ConnectionState.Open)
{
throw new DataAccessException("SimpleDaoSession could not invoke BeginTransaction(). A Connection must be started. Call OpenConnection() first.");
}
_transaction = _connection.BeginTransaction();
if (_logger.IsDebugEnabled)
{
_logger.Debug("Begin Transaction.");
}
_isTransactionOpen = true;
}
}
/// <summary>
/// Begins a transaction at the data source with the specified IsolationLevel value.
/// </summary>
/// <param name="isolationLevel">The transaction isolation level for this connection.</param>
public override void BeginTransaction(IsolationLevel isolationLevel)
{
this.BeginTransaction( _dataSource.ConnectionString, isolationLevel );
}
/// <summary>
/// Open a connection and begin a transaction on the specified connection string.
/// </summary>
/// <param name="connectionString">The connection string</param>
/// <param name="isolationLevel">The transaction isolation level for this connection.</param>
public override void BeginTransaction(string connectionString, IsolationLevel isolationLevel)
{
if (_connection == null || _connection.State != ConnectionState.Open)
{
this.OpenConnection( connectionString );
}
_transaction = _connection.BeginTransaction(isolationLevel);
if (_logger.IsDebugEnabled)
{
_logger.Debug("Begin Transaction.");
}
_isTransactionOpen = true;
}
/// <summary>
/// Begins a transaction on the current connection
/// with the specified IsolationLevel value.
/// </summary>
/// <param name="isolationLevel">The transaction isolation level for this connection.</param>
/// <param name="openConnection">Open the connection ?</param>
public override void BeginTransaction(bool openConnection, IsolationLevel isolationLevel)
{
this.BeginTransaction( _dataSource.ConnectionString, openConnection, isolationLevel );
}
/// <summary>
/// Begins a transaction on the current connection
/// with the specified IsolationLevel value.
/// </summary>
/// <param name="isolationLevel">The transaction isolation level for this connection.</param>
/// <param name="connectionString">The connection string</param>
/// <param name="openConnection">Open a connection.</param>
public override void BeginTransaction(string connectionString, bool openConnection, IsolationLevel isolationLevel)
{
if (openConnection)
{
this.BeginTransaction(connectionString, isolationLevel);
}
else
{
if (_connection == null || _connection.State != ConnectionState.Open)
{
throw new DataAccessException("SimpleDaoSession could not invoke StartTransaction(). A Connection must be started. Call OpenConnection() first.");
}
_transaction = _connection.BeginTransaction(isolationLevel);
if (_logger.IsDebugEnabled)
{
_logger.Debug("Begin Transaction.");
}
_isTransactionOpen = true;
}
}
/// <summary>
/// Commits the database transaction.
/// </summary>
/// <remarks>
/// Will close the connection.
/// </remarks>
public override void CommitTransaction()
{
if (_logger.IsDebugEnabled)
{
_logger.Debug("Commit Transaction");
}
_transaction.Commit();
_transaction.Dispose();
_transaction= null;
_isTransactionOpen = false;
if (_connection.State != ConnectionState.Closed)
{
CloseConnection();
}
}
/// <summary>
/// Commits the database transaction.
/// </summary>
/// <param name="closeConnection">Close the connection</param>
public override void CommitTransaction(bool closeConnection)
{
if (_logger.IsDebugEnabled)
{
_logger.Debug("Commit Transaction");
}
_transaction.Commit();
_transaction.Dispose();
_transaction= null;
_isTransactionOpen = false;
if (closeConnection)
{
if (_connection.State != ConnectionState.Closed)
{
CloseConnection();
}
}
}
/// <summary>
/// Rolls back a transaction from a pending state.
/// </summary>
/// <remarks>
/// Will close the connection.
/// </remarks>
public override void RollBackTransaction()
{
if (_logger.IsDebugEnabled)
{
_logger.Debug("RollBack Transaction");
}
_transaction.Rollback();
_transaction.Dispose();
_transaction = null;
_isTransactionOpen = false;
if (_connection.State != ConnectionState.Closed)
{
CloseConnection();
}
}
/// <summary>
/// Rolls back a transaction from a pending state.
/// </summary>
/// <param name="closeConnection">Close the connection</param>
public override void RollBackTransaction(bool closeConnection)
{
if (_logger.IsDebugEnabled)
{
_logger.Debug("RollBack Transaction");
}
_transaction.Rollback();
_transaction.Dispose();
_transaction = null;
_isTransactionOpen = false;
if (closeConnection)
{
if (_connection.State != ConnectionState.Closed)
{
CloseConnection();
}
}
}
/// <summary>
///
/// </summary>
/// <param name="commandType"></param>
/// <returns></returns>
public override IDbCommand CreateCommand(CommandType commandType)
{
IDbCommand command = null;
command = _dataSource.DbProvider.CreateCommand();
command.CommandType = commandType;
command.Connection = _connection;
// Assign transaction
if (_transaction != null)
{
try
{
command.Transaction = _transaction;
}
catch
{}
}
// Assign connection timeout
if (_connection!= null)
{
try // MySql provider doesn't suppport it !
{
command.CommandTimeout = _connection.ConnectionTimeout;
}
catch(NotSupportedException e)
{
_logger.Info(e.Message);
}
}
return command;
}
/// <summary>
/// Create an IDataParameter
/// </summary>
/// <returns>An IDataParameter.</returns>
public override IDbDataParameter CreateDataParameter()
{
IDbDataParameter dataParameter = _dataSource.DbProvider.CreateDataParameter();
return dataParameter;
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public override IDbDataAdapter CreateDataAdapter()
{
return _dataSource.DbProvider.CreateDataAdapter();
}
/// <summary>
///
/// </summary>
/// <param name="command"></param>
/// <returns></returns>
public override IDbDataAdapter CreateDataAdapter(IDbCommand command)
{
IDbDataAdapter dataAdapter = null;
dataAdapter = _dataSource.DbProvider.CreateDataAdapter();
dataAdapter.SelectCommand = command;
return dataAdapter;
}
#endregion
#region IDisposable Members
/// <summary>
///
/// </summary>
public override void Dispose()
{
if (_logger.IsDebugEnabled)
{
_logger.Debug("Dispose DaoSession");
}
if (_isTransactionOpen == false)
{
if (_connection.State != ConnectionState.Closed)
{
daoManager.CloseConnection();
}
}
else
{
if (_consistent)
{
daoManager.CommitTransaction();
_isTransactionOpen = false;
}
else
{
if (_connection.State != ConnectionState.Closed)
{
daoManager.RollBackTransaction();
_isTransactionOpen = false;
}
}
}
}
#endregion
}
}
| |
namespace Microsoft.Azure.Management.Dns
{
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
/// <summary>
/// RecordSetsOperations operations.
/// </summary>
public partial interface IRecordSetsOperations
{
/// <summary>
/// Updates a RecordSet within a DNS zone.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='zoneName'>
/// The name of the zone without a terminating dot.
/// </param>
/// <param name='relativeRecordSetName'>
/// The name of the RecordSet, relative to the name of the zone.
/// </param>
/// <param name='recordType'>
/// The type of DNS record. Possible values include: 'A', 'AAAA',
/// 'CNAME', 'MX', 'NS', 'PTR', 'SOA', 'SRV', 'TXT'
/// </param>
/// <param name='parameters'>
/// Parameters supplied to the Update operation.
/// </param>
/// <param name='ifMatch'>
/// The etag of Zone.
/// </param>
/// <param name='ifNoneMatch'>
/// Defines the If-None-Match condition. Set to '*' to force
/// Create-If-Not-Exist. Other values will be ignored.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<RecordSet>> UpdateWithHttpMessagesAsync(string resourceGroupName, string zoneName, string relativeRecordSetName, RecordType recordType, RecordSet parameters, string ifMatch = default(string), string ifNoneMatch = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Creates or Updates a RecordSet within a DNS zone.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='zoneName'>
/// The name of the zone without a terminating dot.
/// </param>
/// <param name='relativeRecordSetName'>
/// The name of the RecordSet, relative to the name of the zone.
/// </param>
/// <param name='recordType'>
/// The type of DNS record. Possible values include: 'A', 'AAAA',
/// 'CNAME', 'MX', 'NS', 'PTR', 'SOA', 'SRV', 'TXT'
/// </param>
/// <param name='parameters'>
/// Parameters supplied to the CreateOrUpdate operation.
/// </param>
/// <param name='ifMatch'>
/// The etag of Recordset.
/// </param>
/// <param name='ifNoneMatch'>
/// Defines the If-None-Match condition. Set to '*' to force
/// Create-If-Not-Exist. Other values will be ignored.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<RecordSet>> CreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string zoneName, string relativeRecordSetName, RecordType recordType, RecordSet parameters, string ifMatch = default(string), string ifNoneMatch = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Removes a RecordSet from a DNS zone.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='zoneName'>
/// The name of the zone without a terminating dot.
/// </param>
/// <param name='relativeRecordSetName'>
/// The name of the RecordSet, relative to the name of the zone.
/// </param>
/// <param name='recordType'>
/// The type of DNS record. Possible values include: 'A', 'AAAA',
/// 'CNAME', 'MX', 'NS', 'PTR', 'SOA', 'SRV', 'TXT'
/// </param>
/// <param name='ifMatch'>
/// Defines the If-Match condition. The delete operation will be
/// performed only if the ETag of the zone on the server matches this
/// value.
/// </param>
/// <param name='ifNoneMatch'>
/// Defines the If-None-Match condition. The delete operation will be
/// performed only if the ETag of the zone on the server does not
/// match this value.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string zoneName, string relativeRecordSetName, RecordType recordType, string ifMatch = default(string), string ifNoneMatch = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets a RecordSet.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='zoneName'>
/// The name of the zone without a terminating dot.
/// </param>
/// <param name='relativeRecordSetName'>
/// The name of the RecordSet, relative to the name of the zone.
/// </param>
/// <param name='recordType'>
/// The type of DNS record. Possible values include: 'A', 'AAAA',
/// 'CNAME', 'MX', 'NS', 'PTR', 'SOA', 'SRV', 'TXT'
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<RecordSet>> GetWithHttpMessagesAsync(string resourceGroupName, string zoneName, string relativeRecordSetName, RecordType recordType, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Lists the RecordSets of a specified type in a DNS zone.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the zone.
/// </param>
/// <param name='zoneName'>
/// The name of the zone from which to enumerate RecordsSets.
/// </param>
/// <param name='recordType'>
/// The type of record sets to enumerate. Possible values include:
/// 'A', 'AAAA', 'CNAME', 'MX', 'NS', 'PTR', 'SOA', 'SRV', 'TXT'
/// </param>
/// <param name='top'>
/// Query parameters. If null is passed returns the default number of
/// zones.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<RecordSet>>> ListByTypeWithHttpMessagesAsync(string resourceGroupName, string zoneName, RecordType recordType, string top = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Lists all RecordSets in a DNS zone.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the zone.
/// </param>
/// <param name='zoneName'>
/// The name of the zone from which to enumerate RecordSets.
/// </param>
/// <param name='top'>
/// Query parameters. If null is passed returns the default number of
/// zones.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<RecordSet>>> ListAllInResourceGroupWithHttpMessagesAsync(string resourceGroupName, string zoneName, string top = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Lists the RecordSets of a specified type in a DNS zone.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<RecordSet>>> ListByTypeNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Lists all RecordSets in a DNS zone.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<RecordSet>>> ListAllInResourceGroupNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
}
}
| |
// ***********************************************************************
// Copyright (c) 2007 Charlie Poole
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTIONA
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ***********************************************************************
using System;
using System.Collections;
using NUnit.TestUtilities.Collections;
namespace NUnit.Framework.Assertions
{
/// <summary>
/// Summary description for ArrayEqualTests.
/// </summary>
[TestFixture]
public class ArrayEqualsFixture : AssertionHelper
{
#pragma warning disable 183, 184 // error number varies in different runtimes
// Used to detect runtimes where ArraySegments implement IEnumerable
private static readonly bool ArraySegmentImplementsIEnumerable = new ArraySegment<int>() is IEnumerable;
#pragma warning restore 183, 184
[Test]
public void ArrayIsEqualToItself()
{
string[] array = { "one", "two", "three" };
Assert.That( array, Is.SameAs(array) );
Assert.AreEqual( array, array );
Expect(array, EqualTo(array));
}
[Test]
public void ArraysOfString()
{
string[] array1 = { "one", "two", "three" };
string[] array2 = { "one", "two", "three" };
Assert.IsFalse( array1 == array2 );
Assert.AreEqual(array1, array2);
Expect(array1, EqualTo(array2));
Assert.AreEqual(array2, array1);
Expect(array2, EqualTo(array1));
}
[Test]
public void ArraysOfInt()
{
int[] a = new int[] { 1, 2, 3 };
int[] b = new int[] { 1, 2, 3 };
Assert.AreEqual(a, b);
Assert.AreEqual(b, a);
Expect(a, EqualTo(b));
Expect(b, EqualTo(a));
}
[Test]
public void ArraysOfDouble()
{
double[] a = new double[] { 1.0, 2.0, 3.0 };
double[] b = new double[] { 1.0, 2.0, 3.0 };
Assert.AreEqual(a, b);
Assert.AreEqual(b, a);
Expect(a, EqualTo(b));
Expect(b, EqualTo(a));
}
[Test]
public void ArraysOfDecimal()
{
decimal[] a = new decimal[] { 1.0m, 2.0m, 3.0m };
decimal[] b = new decimal[] { 1.0m, 2.0m, 3.0m };
Assert.AreEqual(a, b);
Assert.AreEqual(b, a);
Expect(a, EqualTo(b));
Expect(b, EqualTo(a));
}
[Test]
public void ArrayOfIntAndArrayOfDouble()
{
int[] a = new int[] { 1, 2, 3 };
double[] b = new double[] { 1.0, 2.0, 3.0 };
Assert.AreEqual(a, b);
Assert.AreEqual(b, a);
Expect(a, EqualTo(b));
Expect(b, EqualTo(a));
}
[Test]
public void ArraysDeclaredAsDifferentTypes()
{
string[] array1 = { "one", "two", "three" };
object[] array2 = { "one", "two", "three" };
Assert.AreEqual( array1, array2, "String[] not equal to Object[]" );
Assert.AreEqual( array2, array1, "Object[] not equal to String[]" );
Expect(array1, EqualTo(array2), "String[] not equal to Object[]");
Expect(array2, EqualTo(array1), "Object[] not equal to String[]");
}
[Test]
public void ArraysOfMixedTypes()
{
DateTime now = DateTime.Now;
object[] array1 = new object[] { 1, 2.0f, 3.5d, 7.000m, "Hello", now };
object[] array2 = new object[] { 1.0d, 2, 3.5, 7, "Hello", now };
Assert.AreEqual( array1, array2 );
Assert.AreEqual(array2, array1);
Expect(array1, EqualTo(array2));
Expect(array2, EqualTo(array1));
}
[Test]
public void DoubleDimensionedArrays()
{
int[,] a = new int[,] { { 1, 2, 3 }, { 4, 5, 6 }, { 7, 8, 9 } };
int[,] b = new int[,] { { 1, 2, 3 }, { 4, 5, 6 }, { 7, 8, 9 } };
Assert.AreEqual(a, b);
Assert.AreEqual(b, a);
Expect(a, EqualTo(b));
Expect(b, EqualTo(a));
}
[Test]
public void TripleDimensionedArrays()
{
int[, ,] expected = new int[,,] { { { 1, 2 }, { 3, 4 } }, { { 5, 6 }, { 7, 8 } } };
int[,,] actual = new int[,,] { { { 1, 2 }, { 3, 4 } }, { { 5, 6 }, { 7, 8 } } };
Assert.AreEqual(expected, actual);
Expect(actual, EqualTo(expected));
}
[Test]
public void FiveDimensionedArrays()
{
int[, , , ,] expected = new int[2, 2, 2, 2, 2] { { { { { 1, 2 }, { 3, 4 } }, { { 5, 6 }, { 7, 8 } } }, { { { 1, 2 }, { 3, 4 } }, { { 5, 6 }, { 7, 8 } } } }, { { { { 1, 2 }, { 3, 4 } }, { { 5, 6 }, { 7, 8 } } }, { { { 1, 2 }, { 3, 4 } }, { { 5, 6 }, { 7, 8 } } } } };
int[, , , ,] actual = new int[2, 2, 2, 2, 2] { { { { { 1, 2 }, { 3, 4 } }, { { 5, 6 }, { 7, 8 } } }, { { { 1, 2 }, { 3, 4 } }, { { 5, 6 }, { 7, 8 } } } }, { { { { 1, 2 }, { 3, 4 } }, { { 5, 6 }, { 7, 8 } } }, { { { 1, 2 }, { 3, 4 } }, { { 5, 6 }, { 7, 8 } } } } };
Assert.AreEqual(expected, actual);
Expect(actual, EqualTo(expected));
}
[Test]
public void ArraysOfArrays()
{
int[][] a = new int[][] { new int[] { 1, 2, 3 }, new int[] { 4, 5, 6 }, new int[] { 7, 8, 9 } };
int[][] b = new int[][] { new int[] { 1, 2, 3 }, new int[] { 4, 5, 6 }, new int[] { 7, 8, 9 } };
Assert.AreEqual(a, b);
Assert.AreEqual(b, a);
Expect(a, EqualTo(b));
Expect(b, EqualTo(a));
}
[Test]
public void JaggedArrays()
{
int[][] expected = new int[][] { new int[] { 1, 2, 3 }, new int[] { 4, 5, 6, 7 }, new int[] { 8, 9 } };
int[][] actual = new int[][] { new int[] { 1, 2, 3 }, new int[] { 4, 5, 6, 7 }, new int[] { 8, 9 } };
Assert.AreEqual(expected, actual);
Expect(actual, EqualTo(expected));
}
[Test]
public void ArraysPassedAsObjects()
{
object a = new int[] { 1, 2, 3 };
object b = new double[] { 1.0, 2.0, 3.0 };
Assert.AreEqual(a, b);
Assert.AreEqual(b, a);
Expect(a, EqualTo(b));
Expect(b, EqualTo(a));
}
[Test]
public void ArrayAndCollection()
{
int[] a = new int[] { 1, 2, 3 };
ICollection b = new SimpleObjectCollection( 1, 2, 3 );
Assert.AreEqual(a, b);
Assert.AreEqual(b, a);
Expect(a, EqualTo(b));
Expect(b, EqualTo(a));
}
[Test]
public void ArraysWithDifferentRanksComparedAsCollection()
{
int[] expected = new int[] { 1, 2, 3, 4 };
int[,] actual = new int[,] { { 1, 2 }, { 3, 4 } };
Assert.AreNotEqual(expected, actual);
Expect(actual, Not.EqualTo(expected));
Expect(actual, EqualTo(expected).AsCollection);
}
[Test]
public void ArraysWithDifferentDimensionsMatchedAsCollection()
{
int[,] expected = new int[,] { { 1, 2, 3 }, { 4, 5, 6 } };
int[,] actual = new int[,] { { 1, 2 }, { 3, 4 }, { 5, 6 } };
Assert.AreNotEqual(expected, actual);
Expect(actual, Not.EqualTo(expected));
Expect(actual, EqualTo(expected).AsCollection);
}
#if !NETCF && !SILVERLIGHT && !PORTABLE
private static int[] underlyingArray = new int[] { 1, 2, 3, 4, 5 };
[Test]
public void ArraySegmentAndArray()
{
Assume.That(ArraySegmentImplementsIEnumerable);
Assert.That(new ArraySegment<int>(underlyingArray, 1, 3), Is.EqualTo(new int[] { 2, 3, 4 }));
}
[Test]
public void EmptyArraySegmentAndArray()
{
Assume.That(ArraySegmentImplementsIEnumerable);
Assert.That(new ArraySegment<int>(), Is.Not.EqualTo(new int[] { 2, 3, 4 }));
}
[Test]
public void ArrayAndArraySegment()
{
Assume.That(ArraySegmentImplementsIEnumerable);
Assert.That(new int[] { 2, 3, 4 }, Is.EqualTo(new ArraySegment<int>(underlyingArray, 1, 3)));
}
[Test]
public void ArrayAndEmptyArraySegment()
{
Assume.That(ArraySegmentImplementsIEnumerable);
Assert.That(new int[] { 2, 3, 4 }, Is.Not.EqualTo(new ArraySegment<int>()));
}
[Test]
public void TwoArraySegments()
{
Assume.That(ArraySegmentImplementsIEnumerable);
Assert.That(new ArraySegment<int>(underlyingArray, 1, 3), Is.EqualTo(new ArraySegment<int>(underlyingArray, 1, 3)));
}
[Test]
public void TwoEmptyArraySegments()
{
Assume.That(ArraySegmentImplementsIEnumerable);
Assert.That(new ArraySegment<int>(), Is.EqualTo(new ArraySegment<int>()));
}
#endif
}
}
| |
// (c) Copyright Esri, 2010 - 2013
// This source is subject to the Apache 2.0 License.
// Please see http://www.apache.org/licenses/LICENSE-2.0.html for details.
// All other rights reserved.
using System;
using System.Collections.Generic;
using System.Text;
using System.Runtime.InteropServices;
using ESRI.ArcGIS.Geoprocessing;
using ESRI.ArcGIS.Geodatabase;
using System.Resources;
using ESRI.ArcGIS.esriSystem;
using Microsoft.Win32;
using System.IO;
using System.Xml;
using System.Reflection;
namespace ESRI.ArcGIS.OSM.GeoProcessing
{
[Guid("5c1a4cc1-5cf3-474d-a209-d6cd05e0effc")]
[ClassInterface(ClassInterfaceType.None)]
[ProgId("OSMEditor.OSMGPFactory")]
public sealed class OSMGPFactory : ESRI.ArcGIS.Geoprocessing.IGPFunctionFactory
{
//private static readonly OSMGPFactory factoryInstance = new OSMGPFactory();
string m_FactoryName = "OpenStreetMap Tools";
string m_FactoryAlias = "OSMTools";
ResourceManager resourceManager = null;
string m_DataManagementCategory = "Data Management";
string m_CombineLayersDisplayName = "Combine Layers";
internal const string m_CombineLayersName = "GPCombineLayers";
string m_CombineLayersDescription = "Combines multiple layers into a single group layer.";
string m_Category = "OpenStreetMap Tools";
string m_CopyLayerExtensionDisplayName = "Copy Layer Extensions";
internal const string m_CopyLayerExtensionName = "GPCopyLayerExtensions";
string m_CopyLayerExtensionDescription = "Copies all layer extensions from the source layer to the target layer.";
string m_AddExtensionDisplayName = "Add OSM Editor Extension";
internal const string m_AddExtensionName = "OSMGPAddExtension";
string m_AddExtensionDescription = "Adds the OSM feature class extension.";
string m_AttributeSelectorDisplayName = "OSM Attribute Selector";
internal const string m_AttributeSelectorName = "OSMGPAttributeSelector";
string m_AttributeSelectorDescription = "Selects tags from the tag collection and stores them as individual attributes in the feature class.";
string m_CombineAttributesDisplayName = "Combine OSM Attributes";
internal const string m_CombineAttributesName = "OSMGPCombineAttributes";
string m_CombineAttributesCategory = "Data Management";
string m_CombineAttributesDescription = "Combines attributes to a collection of OSM tags.";
string m_DiffLoaderDisplayName = "Load OSM Diff Files";
internal const string m_DiffLoaderName = "OSMGPDiffLoader";
string m_DiffLoaderCategory = "OpenStreetMap Tools";
string m_DiffLoaderDescription = "Loads OSM Planet Diff Files.";
string m_DownloadDataDisplayName = "Download OSM Data";
internal const string m_DownloadDataName = "OSMGPDownload";
string m_DownloadDataCategory = "OpenStreetMap Tools";
string m_DownloadDataDescription = "Downloads data from a specified OSM Server";
string m_Export2OSMDisplayName = "Export to OSM file";
internal const string m_Export2OSMName = "OSMGPExport2OSM";
string m_Export2OSMCategory = "OpenStreetMap Tools";
string m_Export2OSMDescription = "Export OpenStreetMap feature classes into the OSM XML format.";
string m_FeatureComparisonDisplayName = "Feature Comparison";
internal const string m_FeatureComparisonName = "OSMGPFeatureComparison";
string m_FeatureComparisonCategory = "OpenStreetMap Tools";
string m_FeatureComparisonDescription = "Allows the comparison of two feature classes to discover geometric similarities.";
string m_FileLoaderDisplayName = "Load OSM File";
internal const string m_FileLoaderName = "OSMGPFileLoader";
string m_FileLoaderCategory = "OpenStreetMap Tools";
string m_FileLoaderDescription = "Loads stand-alone OSM file (planet files, etc.) into a geodatabase.";
string m_UploadDataDisplayName = "Upload OSM Data";
internal const string m_UploadDataName = "OSMGPUpload";
string m_UploadDataCategory = "OpenStreetMap Tools";
string m_UploadDataDescription = "Uploads current edits to the specified OSM server.";
string m_RemoveExtensionDisplayName = "Remove OSM Editor Extension";
internal const string m_RemoveExtensionName = "OSMGPRemoveExtension";
string m_RemoveExtensionCategory = "Data Management";
string m_RemoveExtensionDescription = "Removes the OSM feature class extension.";
string m_FeatureSymbolizerDisplayName = "OSM Feature Symbolizer";
internal const string m_FeatureSymbolizerName = "OSMGPSymbolizer";
string m_FeatureSymbolizerCategory = "Data Management";
string m_FeatureSymbolizerDescription = "Assigns a set of predefined symbology and prepares the edit templates.";
string m_CreateNetworkDatasetDisplayName = "Create OSM Network Dataset";
internal const string m_CreateNetworkDatasetName = "OSMGPCreateNetworkDataset";
string m_CreateNetworkDatasetCategory = "Data Management";
string m_CreateNetworkDatasetDescription = "Create a network dataset from a given OSM dataset.";
#region "Component Category Registration"
[ComRegisterFunction()]
[ComVisible(false)]
static void RegisterFunction(Type registerType)
{
// Required for ArcGIS Component Category Registrar support
ArcGISCategoryRegistration(registerType);
}
[ComUnregisterFunction()]
[ComVisible(false)]
static void UnregisterFunction(Type registerType)
{
// Required for ArcGIS Component Category Registrar support
ArcGISCategoryUnregistration(registerType);
}
#region ArcGIS Component Category Registrar generated code
/// <summary>
/// Required method for ArcGIS Component Category registration -
/// Do not modify the contents of this method with the code editor.
/// </summary>
private static void ArcGISCategoryRegistration(Type registerType)
{
string regKey = string.Format("HKEY_CLASSES_ROOT\\CLSID\\{{{0}}}", registerType.GUID);
Registry.ClassesRoot.CreateSubKey(regKey.Substring(18) + "\\Implemented Categories\\{FD939A4A-955D-4094-B440-77083E410F41}");
}
/// <summary>
/// Required method for ArcGIS Component Category unregistration -
/// Do not modify the contents of this method with the code editor.
/// </summary>
private static void ArcGISCategoryUnregistration(Type registerType)
{
string regKey = string.Format("HKEY_CLASSES_ROOT\\CLSID\\{{{0}}}", registerType.GUID);
Registry.ClassesRoot.DeleteSubKey(regKey.Substring(18) + "\\Implemented Categories\\{FD939A4A-955D-4094-B440-77083E410F41}");
}
#endregion
#endregion
public OSMGPFactory()
{
try
{
resourceManager = new ResourceManager("ESRI.ArcGIS.OSM.GeoProcessing.OSMGPToolsStrings", this.GetType().Assembly);
m_FactoryName = resourceManager.GetString("GPTools_factoryname");
m_Category = resourceManager.GetString("GPTools_OSMGPAttributeSelector_categoryName");
m_CombineLayersDisplayName = resourceManager.GetString("GPTools_GPCombineLayers_displayname");
m_CombineLayersDescription = resourceManager.GetString("GPTools_GPCombineLayers_desc");
m_CopyLayerExtensionDisplayName = resourceManager.GetString("GPTools_GPCopyLayerExtension_desc");
m_CopyLayerExtensionDescription = resourceManager.GetString("GPTools_GPCopyLayerExtension_displayname");
m_DataManagementCategory = resourceManager.GetString("GPTools_GPCopyLayerExtension_categoryname");
m_AddExtensionDisplayName = resourceManager.GetString("GPTools_OSMGPAddExtension_displayName");
m_AddExtensionDescription = resourceManager.GetString("GPTools_OSMGPAddExtension_desc");
m_AttributeSelectorDisplayName = resourceManager.GetString("GPTools_OSMGPAttributeSelector_displayName");
m_AttributeSelectorDescription = resourceManager.GetString("GPTools_OSMGPAttributeSelector_desc");
m_CombineAttributesCategory = resourceManager.GetString("GPTools_OSMGPAttributeSelector_categoryName");
m_CombineAttributesDisplayName = resourceManager.GetString("GPTools_OSMGPCombineAttributes_displayName");
m_CombineAttributesDescription = resourceManager.GetString("GPTools_OSMGPCombineAttributes_desc");
m_DiffLoaderCategory = resourceManager.GetString("GPTools_OSMGPDownload_categoryName");
m_DiffLoaderDisplayName = resourceManager.GetString("GPTools_OSMGPDiffLoader_displayname");
m_DiffLoaderDescription = resourceManager.GetString("GPTools_OSMGPDiffLoader_desc");
m_DownloadDataCategory = resourceManager.GetString("GPTools_OSMGPDownload_categoryName");
m_DownloadDataDisplayName = resourceManager.GetString("GPTools_OSMGPDownload_displayName");
m_DownloadDataDescription = resourceManager.GetString("GPTools_OSMGPDownload_desc");
m_Export2OSMCategory = resourceManager.GetString("GPTools_OSMGPAttributeSelector_categoryName");
m_Export2OSMDisplayName = resourceManager.GetString("GPTools_OSMGPExport2OSM_displayname");
m_Export2OSMDescription = resourceManager.GetString("GPTools_OSMGPExport2OSM_desc");
m_FeatureComparisonCategory = resourceManager.GetString("GPTools_OSMGPDownload_categoryName");
m_FeatureComparisonDisplayName = resourceManager.GetString("GPTools_OSMGPFeatureComparison_displayname");
m_FeatureComparisonDescription = resourceManager.GetString("GPTools_OSMGPFeatureComparison_desc");
m_FileLoaderCategory = resourceManager.GetString("GPTools_OSMGPDownload_categoryName");
m_FileLoaderDisplayName = resourceManager.GetString("GPTools_OSMGPFileReader_diplayname");
m_FileLoaderDescription = resourceManager.GetString("GPTools_OSMGPFileReader_desc");
m_UploadDataCategory = resourceManager.GetString("GPTools_OSMGPDownload_categoryName");
m_UploadDataDisplayName = resourceManager.GetString("GPTools_OSMGPUpload_displayName");
m_UploadDataDescription = resourceManager.GetString("GPTools_OSMGPUpload_desc");
m_RemoveExtensionCategory = resourceManager.GetString("GPTools_OSMGPAttributeSelector_categoryName");
m_RemoveExtensionDisplayName = resourceManager.GetString("GPTools_OSMGPRemoveExtension_displayName");
m_RemoveExtensionDescription = resourceManager.GetString("GPTools_OSMGPRemoveExtension_desc");
m_FeatureSymbolizerCategory = resourceManager.GetString("GPTools_OSMGPAttributeSelector_categoryName");
m_FeatureSymbolizerDisplayName = resourceManager.GetString("GPTools_OSMGPSymbolizer_displayName");
m_FeatureSymbolizerDescription = resourceManager.GetString("GPTools_OSMGPSymbolizer_desc");
m_CreateNetworkDatasetCategory = resourceManager.GetString("GPTools_OSMGPAttributeSelector_categoryName");
m_CreateNetworkDatasetDisplayName = resourceManager.GetString("GPTools_OSMGPCreateNetworkDataset_displayname");
m_CreateNetworkDatasetDescription = resourceManager.GetString("GPTools_OSMGPCreateNetworkDataset_desc");
}
catch (Exception ex)
{
System.Diagnostics.Debug.WriteLine(ex.Message);
}
}
#region "IGPFunctionFactory Implementations"
public string Alias
{
get
{
return m_FactoryAlias;
}
}
public ESRI.ArcGIS.esriSystem.UID CLSID
{
get
{
UID gpFactory = new UIDClass();
gpFactory.Value = "{5c1a4cc1-5cf3-474d-a209-d6cd05e0effc}";
return gpFactory;
}
}
public ESRI.ArcGIS.Geoprocessing.IGPFunction GetFunction(string Name)
{
switch (Name)
{
case m_DownloadDataName:
IGPFunction osmGPDownloadFunction = new OSMGPDownload() as IGPFunction;
return osmGPDownloadFunction;
case m_UploadDataName:
IGPFunction osmGPUploadFunction = new OSMGPUpload() as IGPFunction;
return osmGPUploadFunction;
case m_AttributeSelectorName:
IGPFunction osmGPAttributeSelector = new OSMGPAttributeSelector() as IGPFunction;
return osmGPAttributeSelector;
case m_FeatureSymbolizerName:
IGPFunction osmGPSymbolizer = new OSMGPSymbolizer() as IGPFunction;
return osmGPSymbolizer;
case m_AddExtensionName:
IGPFunction osmGPAddExtension = new OSMGPAddExtension() as IGPFunction;
return osmGPAddExtension;
case m_RemoveExtensionName:
IGPFunction osmGPRemoveExtension = new OSMGPRemoveExtension() as IGPFunction;
return osmGPRemoveExtension;
case m_FileLoaderName:
IGPFunction osmGPFileLoader = new OSMGPFileLoader() as IGPFunction;
return osmGPFileLoader;
case m_CombineLayersName:
IGPFunction combineLayers = new GPCombineLayers() as IGPFunction;
return combineLayers;
case m_CombineAttributesName:
IGPFunction osmGPCombineAttributes = new OSMGPCombineAttributes() as IGPFunction;
return osmGPCombineAttributes;
case m_CopyLayerExtensionName:
IGPFunction gpCopyLayerExtensions = new GPCopyLayerExtensions() as IGPFunction;
return gpCopyLayerExtensions;
case m_DiffLoaderName:
IGPFunction osmGPDiffLoader = new OSMGPDiffLoader() as IGPFunction;
return osmGPDiffLoader;
case m_Export2OSMName:
IGPFunction osmGPExport2OSM = new OSMGPExport2OSM() as IGPFunction;
return osmGPExport2OSM;
case m_FeatureComparisonName:
IGPFunction osmGPFeatureComparison = new OSMGPFeatureComparison() as IGPFunction;
return osmGPFeatureComparison;
case m_CreateNetworkDatasetName:
IGPFunction osmGPCreateNetworkDataset = new OSMGPCreateNetworkDataset() as IGPFunction;
return osmGPCreateNetworkDataset;
default:
return null;
}
}
public ESRI.ArcGIS.Geoprocessing.IEnumGPEnvironment GetFunctionEnvironments()
{
return default(ESRI.ArcGIS.Geoprocessing.IEnumGPEnvironment);
}
public ESRI.ArcGIS.Geodatabase.IGPName GetFunctionName(string Name)
{
IGPName toolGPName = new GPFunctionNameClass() as IGPName;
toolGPName.Factory = (IGPFunctionFactory)this;
toolGPName.Category = m_Category;
switch (Name)
{
case m_DownloadDataName:
toolGPName.Name = m_DownloadDataName;
toolGPName.DisplayName = m_DownloadDataDisplayName;
toolGPName.Category = m_DownloadDataCategory;
toolGPName.Description = m_DownloadDataDescription;
break;
case m_UploadDataName:
toolGPName.Name = m_UploadDataName;
toolGPName.DisplayName = m_UploadDataDisplayName;
toolGPName.Category = m_UploadDataCategory;
toolGPName.Description = m_UploadDataDescription;
break;
case m_AttributeSelectorName:
toolGPName.Name = m_AttributeSelectorName;
toolGPName.DisplayName = m_AttributeSelectorDisplayName;
toolGPName.Category = m_DataManagementCategory;
toolGPName.Description = m_AttributeSelectorDescription;
break;
//case m_FeatureSymbolizerName:
// toolGPName.Name = m_FeatureSymbolizerName;
// toolGPName.DisplayName = m_FeatureSymbolizerDisplayName;
// toolGPName.Category = m_FeatureSymbolizerCategory;
// toolGPName.Description = m_FeatureSymbolizerDescription;
// break;
case m_AddExtensionName:
toolGPName.Name = m_AddExtensionName;
toolGPName.DisplayName = m_AddExtensionDisplayName;
toolGPName.Category = m_DataManagementCategory;
toolGPName.Description = m_AddExtensionDescription;
break;
case m_RemoveExtensionName:
toolGPName.Name = m_RemoveExtensionName;
toolGPName.DisplayName = m_RemoveExtensionDisplayName;
toolGPName.Category = m_RemoveExtensionCategory;
toolGPName.Description = m_RemoveExtensionDescription;
break;;
case m_FileLoaderName:
toolGPName.Name = m_FileLoaderName;
toolGPName.DisplayName = m_FileLoaderDisplayName;
toolGPName.Category = m_FileLoaderCategory;
toolGPName.Description = m_FileLoaderDescription;
break;
case m_CombineLayersName:
toolGPName.Name = m_CombineLayersName;
toolGPName.DisplayName = m_CombineLayersDisplayName;
toolGPName.Category = m_DataManagementCategory;
toolGPName.Description = m_CombineLayersDescription;
break;
case m_CombineAttributesName:
toolGPName.Name = m_CombineAttributesName;
toolGPName.DisplayName = m_CombineAttributesDisplayName;
toolGPName.Category = m_DataManagementCategory;
toolGPName.Description = m_CombineAttributesDescription;
break;
case m_CopyLayerExtensionName:
toolGPName.Name = m_CopyLayerExtensionName;
toolGPName.DisplayName = m_CopyLayerExtensionDisplayName;
toolGPName.Category = m_DataManagementCategory;
toolGPName.Description = m_CopyLayerExtensionDescription;
break;
case m_DiffLoaderName:
toolGPName.Name = m_DiffLoaderName;
toolGPName.DisplayName = m_DiffLoaderDisplayName;
toolGPName.Category = m_DiffLoaderCategory;
toolGPName.Description = m_DiffLoaderDescription;
break;
case m_Export2OSMName:
toolGPName.Name = m_Export2OSMName;
toolGPName.DisplayName = m_Export2OSMDisplayName;
toolGPName.Category = m_Export2OSMCategory;
toolGPName.Description = m_Export2OSMDescription;
break;
case m_FeatureComparisonName:
toolGPName.Name = m_FeatureComparisonName;
toolGPName.DisplayName = m_FeatureComparisonDisplayName;
toolGPName.Category = m_FeatureComparisonCategory;
toolGPName.Description = m_FeatureComparisonDescription;
break;
case m_CreateNetworkDatasetName:
toolGPName.Name = m_CreateNetworkDatasetName;
toolGPName.DisplayName = m_CreateNetworkDatasetDisplayName;
toolGPName.Category = m_CreateNetworkDatasetCategory;
toolGPName.Description = m_CreateNetworkDatasetDescription;
break;
default:
return null;
}
return toolGPName;
}
public ESRI.ArcGIS.Geodatabase.IEnumGPName GetFunctionNames()
{
IArray allGPFunctionNames = new EnumGPNameClass();
allGPFunctionNames.Add(this.GetFunctionName(m_DownloadDataName));
allGPFunctionNames.Add(this.GetFunctionName(m_UploadDataName));
allGPFunctionNames.Add(this.GetFunctionName(m_AttributeSelectorName));
//allGPFunctionNames.Add(this.GetFunctionName(m_FeatureSymbolizerName));
allGPFunctionNames.Add(this.GetFunctionName(m_AddExtensionName));
allGPFunctionNames.Add(this.GetFunctionName(m_RemoveExtensionName));
allGPFunctionNames.Add(this.GetFunctionName(m_FileLoaderName));
allGPFunctionNames.Add(this.GetFunctionName(m_CombineLayersName));
allGPFunctionNames.Add(this.GetFunctionName(m_CombineAttributesName));
//allGPFunctionNames.Add(this.GetFunctionName(m_CopyLayerExtensionName));
allGPFunctionNames.Add(this.GetFunctionName(m_DiffLoaderName));
allGPFunctionNames.Add(this.GetFunctionName(m_FeatureComparisonName));
allGPFunctionNames.Add(this.GetFunctionName(m_Export2OSMName));
allGPFunctionNames.Add(this.GetFunctionName(m_CreateNetworkDatasetName));
return (IEnumGPName)allGPFunctionNames;
}
public string Name
{
get
{
return m_FactoryName;
}
}
#endregion
/// <summary>
/// reads the ArcGIS runtime information from the registry
/// </summary>
/// <returns></returns>
public static string GetArcGIS10InstallLocation()
{
System.Object installationDirectory = null;
string m_foundInstallationDirectory = String.Empty;
IEnumerable<ESRI.ArcGIS.RuntimeInfo> installedRuntimes = null;
try
{
//bool succeeded = ESRI.ArcGIS.RuntimeManager.Bind(ProductCode.EngineOrDesktop);
//if (succeeded)
//{
// RuntimeInfo activeRunTimeInfo = RuntimeManager.ActiveRuntime;
// System.Diagnostics.Debug.Print(activeRunTimeInfo.Product.ToString());
//}
// Get installed runtimes
installedRuntimes = ESRI.ArcGIS.RuntimeManager.InstalledRuntimes;
// Check for Desktop first
foreach (RuntimeInfo item in installedRuntimes)
{
if (String.Compare(item.Product.ToString(), "Desktop", true) == 0)
installationDirectory = item.Path;
}
// Check for Engine if installationDirectory is null
if (installationDirectory == null)
{
foreach (RuntimeInfo item in installedRuntimes)
{
if (String.Compare(item.Product.ToString(), "Engine", true) == 0)
installationDirectory = item.Path;
}
}
// Check for Server if installationDirectory is null
if (installationDirectory == null)
{
foreach (RuntimeInfo item in installedRuntimes)
{
if (String.Compare(item.Product.ToString(), "Server", true) == 0)
installationDirectory = item.Path;
}
}
if (installationDirectory != null)
m_foundInstallationDirectory = Convert.ToString(installationDirectory);
}
catch
{
}
return m_foundInstallationDirectory;
}
public static Dictionary<string, string> ReadOSMEditorSettings()
{
Dictionary<string, string> configurationSettings = new Dictionary<string, string>();
try
{
string osmEditorFolder = Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData) + System.IO.Path.DirectorySeparatorChar + "ESRI" + System.IO.Path.DirectorySeparatorChar + "OSMEditor";
string configurationfile = osmEditorFolder + System.IO.Path.DirectorySeparatorChar + "osmeditor.config";
if (!File.Exists(configurationfile))
configurationfile = "c:\\inetpub\\wwwroot\\osm\\" + "osmeditor.config";
// Setting the path for when the code runs inside VS2010 without being installed.
if (!File.Exists(configurationfile))
{
configurationfile = System.AppDomain.CurrentDomain.BaseDirectory + "osmeditor.config";
osmEditorFolder = System.AppDomain.CurrentDomain.BaseDirectory;
}
if (File.Exists(configurationfile))
{
using (XmlReader configurationFileReader = XmlReader.Create(configurationfile))
{
while (configurationFileReader.Read())
{
if (configurationFileReader.IsStartElement())
{
switch (configurationFileReader.Name)
{
case "osmbaseurl":
configurationFileReader.Read();
configurationSettings.Add("osmbaseurl", configurationFileReader.Value.Trim());
break;
case "osmdomainsfilepath":
configurationFileReader.Read();
configurationSettings.Add("osmdomainsfilepath", configurationFileReader.Value.Trim());
break;
case "osmfeaturepropertiesfilepath":
configurationFileReader.Read();
configurationSettings.Add("osmfeaturepropertiesfilepath", configurationFileReader.Value.Trim());
break;
default:
break;
}
}
}
}
}
//else
// throw new Exception("Config file missing osmeditor.config at osm directory");
if (configurationSettings.ContainsKey("osmbaseurl") == false)
{
// let's start with the very first default settings
configurationSettings.Add("osmbaseurl", "http://www.openstreetmap.org");
}
if (configurationSettings.ContainsKey("osmdiffsurl") == false)
{
// let's start with the very first default settings
configurationSettings.Add("osmdiffsurl", "http://planet.openstreetmap.org/replication");
}
string path = Assembly.GetExecutingAssembly().Location;
FileInfo executingAssembly = new FileInfo(path);
if (configurationSettings.ContainsKey("osmdomainsfilepath") == false)
{
// initialize with the default configuration files
if (File.Exists(executingAssembly.Directory.FullName + System.IO.Path.DirectorySeparatorChar + "osm_domains.xml"))
{
configurationSettings.Add("osmdomainsfilepath", executingAssembly.Directory.FullName + System.IO.Path.DirectorySeparatorChar + "osm_domains.xml");
}
}
if (configurationSettings.ContainsKey("osmfeaturepropertiesfilepath") == false)
{
if (File.Exists(executingAssembly.Directory.FullName + System.IO.Path.DirectorySeparatorChar + "OSMFeaturesProperties.xml"))
{
configurationSettings.Add("osmfeatureporpertiesfilepath", executingAssembly.Directory.FullName + System.IO.Path.DirectorySeparatorChar + "OSMFeaturesProperties.xml");
}
}
}
catch { }
return configurationSettings;
}
public static void StoreOSMEditorSettings(Dictionary<string, string> inputConfigurations)
{
try
{
string osmEditorFolder = Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData) + System.IO.Path.DirectorySeparatorChar + "ESRI" + System.IO.Path.DirectorySeparatorChar + "OSMEditor";
if (Directory.Exists(osmEditorFolder) == false)
{
try
{
Directory.CreateDirectory(osmEditorFolder);
}
catch
{
return;
}
}
string configurationfile = osmEditorFolder + System.IO.Path.DirectorySeparatorChar + "osmeditor.config";
System.IO.FileStream configurationFileWriter = null;
try
{
if (File.Exists(configurationfile))
{
try
{
File.Delete(configurationfile);
}
catch { }
}
configurationFileWriter = File.Create(configurationfile);
MemoryStream memoryStream = new MemoryStream();
XmlWriterSettings xmlWriterSettings = new XmlWriterSettings();
xmlWriterSettings.Indent = true;
using (XmlWriter xmlWriter = XmlWriter.Create(memoryStream, xmlWriterSettings))
{
xmlWriter.WriteStartDocument();
xmlWriter.WriteStartElement("OSMEditor");
foreach (KeyValuePair<string, string> configurationItem in inputConfigurations)
{
xmlWriter.WriteElementString(configurationItem.Key, configurationItem.Value);
}
xmlWriter.WriteEndElement();
xmlWriter.WriteEndDocument();
xmlWriter.Close();
}
configurationFileWriter.Write(memoryStream.GetBuffer(), 0, Convert.ToInt32(memoryStream.Length));
memoryStream.Close();
}
catch { }
finally
{
if (configurationFileWriter != null)
{
configurationFileWriter.Close();
}
}
}
catch { }
}
}
}
| |
// SF API version v50.0
// Custom fields included: False
// Relationship objects included: True
using System;
using NetCoreForce.Client.Models;
using NetCoreForce.Client.Attributes;
using Newtonsoft.Json;
namespace NetCoreForce.Models
{
///<summary>
/// Holiday
///<para>SObject Name: Holiday</para>
///<para>Custom Object: False</para>
///</summary>
public class SfHoliday : SObject
{
[JsonIgnore]
public static string SObjectTypeName
{
get { return "Holiday"; }
}
///<summary>
/// Holiday ID
/// <para>Name: Id</para>
/// <para>SF Type: id</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "id")]
[Updateable(false), Createable(false)]
public string Id { get; set; }
///<summary>
/// Holiday Name
/// <para>Name: Name</para>
/// <para>SF Type: string</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "name")]
public string Name { get; set; }
///<summary>
/// Description
/// <para>Name: Description</para>
/// <para>SF Type: string</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "description")]
public string Description { get; set; }
///<summary>
/// All Day
/// <para>Name: IsAllDay</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "isAllDay")]
public bool? IsAllDay { get; set; }
///<summary>
/// Holiday Date
/// <para>Name: ActivityDate</para>
/// <para>SF Type: date</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "activityDate")]
public DateTime? ActivityDate { get; set; }
///<summary>
/// Start Time In Minutes From Midnight
/// <para>Name: StartTimeInMinutes</para>
/// <para>SF Type: int</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "startTimeInMinutes")]
public int? StartTimeInMinutes { get; set; }
///<summary>
/// End Time In Minutes From Midnight
/// <para>Name: EndTimeInMinutes</para>
/// <para>SF Type: int</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "endTimeInMinutes")]
public int? EndTimeInMinutes { get; set; }
///<summary>
/// Created Date
/// <para>Name: CreatedDate</para>
/// <para>SF Type: datetime</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "createdDate")]
[Updateable(false), Createable(false)]
public DateTimeOffset? CreatedDate { get; set; }
///<summary>
/// Created By ID
/// <para>Name: CreatedById</para>
/// <para>SF Type: reference</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "createdById")]
[Updateable(false), Createable(false)]
public string CreatedById { get; set; }
///<summary>
/// ReferenceTo: User
/// <para>RelationshipName: CreatedBy</para>
///</summary>
[JsonProperty(PropertyName = "createdBy")]
[Updateable(false), Createable(false)]
public SfUser CreatedBy { get; set; }
///<summary>
/// Last Modified Date
/// <para>Name: LastModifiedDate</para>
/// <para>SF Type: datetime</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "lastModifiedDate")]
[Updateable(false), Createable(false)]
public DateTimeOffset? LastModifiedDate { get; set; }
///<summary>
/// Last Modified By ID
/// <para>Name: LastModifiedById</para>
/// <para>SF Type: reference</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "lastModifiedById")]
[Updateable(false), Createable(false)]
public string LastModifiedById { get; set; }
///<summary>
/// ReferenceTo: User
/// <para>RelationshipName: LastModifiedBy</para>
///</summary>
[JsonProperty(PropertyName = "lastModifiedBy")]
[Updateable(false), Createable(false)]
public SfUser LastModifiedBy { get; set; }
///<summary>
/// System Modstamp
/// <para>Name: SystemModstamp</para>
/// <para>SF Type: datetime</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "systemModstamp")]
[Updateable(false), Createable(false)]
public DateTimeOffset? SystemModstamp { get; set; }
///<summary>
/// Recurring Holiday
/// <para>Name: IsRecurrence</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "isRecurrence")]
public bool? IsRecurrence { get; set; }
///<summary>
/// Recurrence Start
/// <para>Name: RecurrenceStartDate</para>
/// <para>SF Type: date</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "recurrenceStartDate")]
public DateTime? RecurrenceStartDate { get; set; }
///<summary>
/// Recurrence End
/// <para>Name: RecurrenceEndDateOnly</para>
/// <para>SF Type: date</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "recurrenceEndDateOnly")]
public DateTime? RecurrenceEndDateOnly { get; set; }
///<summary>
/// Recurrence Type
/// <para>Name: RecurrenceType</para>
/// <para>SF Type: picklist</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "recurrenceType")]
public string RecurrenceType { get; set; }
///<summary>
/// Recurrence Interval
/// <para>Name: RecurrenceInterval</para>
/// <para>SF Type: int</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "recurrenceInterval")]
public int? RecurrenceInterval { get; set; }
///<summary>
/// Recurrence Day of Week Mask
/// <para>Name: RecurrenceDayOfWeekMask</para>
/// <para>SF Type: int</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "recurrenceDayOfWeekMask")]
public int? RecurrenceDayOfWeekMask { get; set; }
///<summary>
/// Recurrence Day of Month
/// <para>Name: RecurrenceDayOfMonth</para>
/// <para>SF Type: int</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "recurrenceDayOfMonth")]
public int? RecurrenceDayOfMonth { get; set; }
///<summary>
/// Recurrence Instance
/// <para>Name: RecurrenceInstance</para>
/// <para>SF Type: picklist</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "recurrenceInstance")]
public string RecurrenceInstance { get; set; }
///<summary>
/// Recurrence Month of Year
/// <para>Name: RecurrenceMonthOfYear</para>
/// <para>SF Type: picklist</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "recurrenceMonthOfYear")]
public string RecurrenceMonthOfYear { get; set; }
}
}
| |
//! \file ArcWAG.cs
//! \date Tue Aug 11 08:28:28 2015
//! \brief Xuse/Eternal resource archive.
//
// Copyright (C) 2015-2016 by morkt
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//
using System;
using System.Collections.Generic;
using System.ComponentModel.Composition;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using GameRes.Utility;
namespace GameRes.Formats.Xuse
{
internal class WagArchive : ArcFile
{
public readonly byte[] Key;
public WagArchive (ArcView arc, ArchiveFormat impl, ICollection<Entry> dir, byte[] key)
: base (arc, impl, dir)
{
Key = key;
}
}
[Export(typeof(ArchiveFormat))]
public class WagOpener : ArchiveFormat
{
public override string Tag { get { return "WAG"; } }
public override string Description { get { return "Xuse/Eternal resource archive"; } }
public override uint Signature { get { return 0x40474157; } } // 'WAG@'
public override bool IsHierarchic { get { return true; } }
public override bool CanWrite { get { return false; } }
public WagOpener ()
{
Extensions = new string[] { "wag", "4ag", "004" };
Signatures = new uint[] { 0x40474157, 0x34464147 }; // 'GAF4'
}
public override ArcFile TryOpen (ArcView file)
{
int version = file.View.ReadUInt16 (4);
if (0x300 != version && 0x200 != version)
return null;
int count = file.View.ReadInt32 (0x46);
if (!IsSaneCount (count))
return null;
var reader = new IndexReader (file, version, count);
var dir = reader.ReadIndex();
if (null == dir)
return null;
return new WagArchive (file, this, dir, reader.DataKey);
}
public override Stream OpenEntry (ArcFile arc, Entry entry)
{
var warc = arc as WagArchive;
if (null == warc)
return arc.File.CreateStream (entry.Offset, entry.Size);
var data = arc.File.View.ReadBytes (entry.Offset, entry.Size);
Decrypt ((uint)entry.Offset, warc.Key, data);
return new BinMemoryStream (data, entry.Name);
}
static private byte[] GenerateKey (byte[] keyword)
{
return GenerateKey (keyword, keyword.Length);
}
static private byte[] GenerateKey (byte[] keyword, int length)
{
int hash = 0;
for (int i = 0; i < length; ++i)
hash = (((sbyte)keyword[i] + i) ^ hash) + length;
int key_length = (hash & 0xFF) + 0x40;
for (int i = 0; i < length; ++i)
hash += (sbyte)keyword[i];
byte[] key = new byte[key_length--];
key[1] = (byte)(hash >> 8);
hash &= 0xF;
key[0] = (byte)hash;
key[2] = 0x46;
key[3] = 0x88;
for (int i = 4; i < key_length; ++i)
{
hash += (((sbyte)keyword[i % length] ^ hash) + i) & 0xFF;
key[i] = (byte)hash;
}
return key;
}
static private void Decrypt (uint offset, byte[] key, byte[] index)
{
Decrypt (offset, key, index, 0, index.Length);
}
static private void Decrypt (uint offset, byte[] key, byte[] index, int pos, int length)
{
uint key_last = (uint)key.Length-1;
for (uint i = 0; i < length; ++i)
index[pos+i] ^= key[(offset + i) % key_last];
}
internal class IndexReader
{
ArcView m_file;
int m_version;
int m_count;
byte[] m_data_key;
public byte[] DataKey { get { return m_data_key; } }
public IndexReader (ArcView file, int version, int count)
{
m_file = file;
m_version = version;
m_count = count;
}
byte[] m_chunk_buf = new byte[0x40];
public List<Entry> ReadIndex ()
{
byte[] title = m_file.View.ReadBytes (6, 0x40);
int title_length = Array.IndexOf<byte> (title, 0);
if (-1 == title_length)
title_length = title.Length;
string arc_filename = Path.GetFileName (m_file.Name);
if (0x200 != m_version)
arc_filename = arc_filename.ToLowerInvariant();
string base_filename = Path.GetFileNameWithoutExtension (arc_filename);
byte[] name_key = GenerateKey (Encodings.cp932.GetBytes (arc_filename));
uint index_offset = 0x200 + (uint)name_key.Select (x => (int)x).Sum();
for (int i = 0; i < name_key.Length; ++i)
{
index_offset ^= name_key[i];
index_offset = Binary.RotR (index_offset, 1);
}
for (int i = 0; i < name_key.Length; ++i)
{
index_offset ^= name_key[i];
index_offset = Binary.RotR (index_offset, 1);
}
index_offset %= 0x401;
index_offset += 0x4A;
byte[] index = m_file.View.ReadBytes (index_offset, (uint)(4*m_count));
if (index.Length != 4*m_count)
return null;
byte[] index_key = new byte[index.Length];
for (int i = 0; i < index_key.Length; ++i)
{
int v = name_key[(i+1) % name_key.Length] ^ (name_key[i % name_key.Length] + i);
index_key[i] = (byte)(m_count + v);
}
Decrypt (index_offset, index_key, index);
m_data_key = GenerateKey (title, title_length);
var dir = new List<Entry> (m_count);
int current_offset = 0;
uint next_offset = LittleEndian.ToUInt32 (index, current_offset);
for (int i = 0; i < m_count; ++i)
{
current_offset += 4;
uint entry_offset = next_offset;
if (entry_offset >= m_file.MaxOffset)
return null;
if (i + 1 == m_count)
next_offset = (uint)m_file.MaxOffset;
else
next_offset = LittleEndian.ToUInt32 (index, current_offset);
uint entry_size = next_offset - entry_offset;
var entry = ParseEntry (entry_offset, entry_size);
if (string.IsNullOrEmpty (entry.Name))
entry.Name = string.Format ("{0}#{1:D4}", base_filename, i);
dir.Add (entry);
}
return dir;
}
Entry ParseEntry (uint entry_offset, uint entry_size)
{
ReadChunk (entry_offset, 8);
if (0x200 == m_version)
return ParseEntryV2 (entry_offset, entry_size);
else
return ParseEntryV3 (entry_offset, entry_size);
}
void ReadChunk (uint offset, int chunk_size)
{
if (chunk_size > m_chunk_buf.Length)
m_chunk_buf = new byte[chunk_size];
if (chunk_size != m_file.View.Read (offset, m_chunk_buf, 0, (uint)chunk_size))
throw new InvalidFormatException();
Decrypt (offset, m_data_key, m_chunk_buf, 0, chunk_size);
}
Entry ParseEntryV2 (uint entry_offset, uint entry_size)
{
var data_size = LittleEndian.ToUInt32 (m_chunk_buf, 0);
if (data_size >= entry_size)
throw new InvalidFormatException();
var name_length = LittleEndian.ToInt32 (m_chunk_buf, 4);
entry_offset += 0x10;
var entry = new Entry();
if (name_length > 0)
{
ReadChunk (entry_offset+data_size, name_length);
if ('|' == m_chunk_buf[name_length-1])
--name_length;
entry.Name = Encodings.cp932.GetString (m_chunk_buf, 0, name_length);
entry.Type = FormatCatalog.Instance.GetTypeFromName (entry.Name);
}
entry.Offset = entry_offset;
entry.Size = data_size;
return entry;
}
Entry ParseEntryV3 (uint entry_offset, uint entry_size)
{
if (!Binary.AsciiEqual (m_chunk_buf, "DSET"))
throw new InvalidFormatException();
uint chunk_offset = entry_offset + 10;
int chunk_count = LittleEndian.ToInt32 (m_chunk_buf, 4);
var entry = new Entry();
string filename = null;
for (int chunk = 0; chunk < chunk_count; ++chunk)
{
ReadChunk (chunk_offset, 8);
int chunk_size = LittleEndian.ToInt32 (m_chunk_buf, 4);
if (chunk_size <= 0)
throw new InvalidFormatException();
if (Binary.AsciiEqual (m_chunk_buf, "PICT"))
{
if (string.IsNullOrEmpty (entry.Type))
{
entry.Type = "image";
entry_offset = chunk_offset + 0x10;
entry_size = (uint)chunk_size - 6;
}
}
else if (null == filename && Binary.AsciiEqual (m_chunk_buf, "FTAG"))
{
ReadChunk (chunk_offset+10, chunk_size-2);
filename = Encodings.cp932.GetString (m_chunk_buf, 0, chunk_size-2);
}
chunk_offset += 10 + (uint)chunk_size;
}
if (!string.IsNullOrEmpty (filename))
entry.Name = DriveRe.Replace (filename, "");
entry.Offset = entry_offset;
entry.Size = entry_size;
return entry;
}
static readonly Regex DriveRe = new Regex (@"^(?:.+:)?\\+");
}
}
}
| |
namespace Nancy.Tests.Unit.Conventions
{
using System;
using System.Collections.Generic;
using Nancy.Conventions;
using Nancy.ViewEngines;
using Xunit;
public class DefaultViewLocationConventionsFixture
{
private readonly NancyConventions conventions;
private readonly DefaultViewLocationConventions viewLocationConventions;
public DefaultViewLocationConventionsFixture()
{
this.conventions = new NancyConventions();
this.viewLocationConventions = new DefaultViewLocationConventions();
}
[Fact]
public void Should_not_be_valid_when_view_location_conventions_is_null()
{
// Given
this.conventions.ViewLocationConventions = null;
// When
var result = this.viewLocationConventions.Validate(this.conventions);
// Then
result.Item1.ShouldBeFalse();
}
[Fact]
public void Should_return_correct_error_message_when_not_valid_because_view_location_conventions_is_null()
{
// Given
this.conventions.ViewLocationConventions = null;
// When
var result = this.viewLocationConventions.Validate(this.conventions);
// Then
result.Item2.ShouldEqual("The view conventions cannot be null.");
}
[Fact]
public void Should_not_be_valid_when_view_location_conventions_is_empty()
{
// Given
this.conventions.ViewLocationConventions = new List<Func<string, dynamic, ViewLocationContext, string>>();
// When
var result = this.viewLocationConventions.Validate(this.conventions);
// Then
result.Item1.ShouldBeFalse();
}
[Fact]
public void Should_return_correct_error_message_when_not_valid_because_view_location_conventions_is_empty()
{
// Given
this.conventions.ViewLocationConventions = new List<Func<string, dynamic, ViewLocationContext, string>>();
// When
var result = this.viewLocationConventions.Validate(this.conventions);
// Then
result.Item2.ShouldEqual("The view conventions cannot be empty.");
}
[Fact]
public void Should_be_valid_when_view_conventions_is_not_empty()
{
// Given
this.conventions.ViewLocationConventions =
new List<Func<string, dynamic, ViewLocationContext, string>>
{
(viewName, model, viewLocationContext) => {
return string.Empty;
}
};
// When
var result = this.viewLocationConventions.Validate(this.conventions);
// Then
result.Item1.ShouldBeTrue();
}
[Fact]
public void Should_return_empty_error_message_when_valid()
{
// Given
this.conventions.ViewLocationConventions =
new List<Func<string, dynamic, ViewLocationContext, string>>
{
(viewName, model, viewLocationContext) => {
return string.Empty;
}
};
// When
var result = this.viewLocationConventions.Validate(this.conventions);
// Then
result.Item2.ShouldBeEmpty();
}
[Fact]
public void Should_add_conventions_when_initialised()
{
// Given, When
this.viewLocationConventions.Initialise(this.conventions);
// Then
this.conventions.ViewLocationConventions.Count.ShouldBeGreaterThan(0);
}
[Fact]
public void Should_define_convention_that_returns_viewname()
{
// Given
this.viewLocationConventions.Initialise(this.conventions);
var convention = this.conventions.ViewLocationConventions[0];
// When
var result = convention.Invoke(
"viewname",
null,
new ViewLocationContext { ModulePath = string.Empty });
// Then
result.ShouldEqual("viewname");
}
[Fact]
public void Should_define_convention_that_returns_viewname_in_views_folder()
{
// Given
this.viewLocationConventions.Initialise(this.conventions);
var convention = this.conventions.ViewLocationConventions[1];
// When
var result = convention.Invoke(
"viewname",
null,
new ViewLocationContext { ModulePath = string.Empty });
// Then
result.ShouldEqual("views/viewname");
}
[Fact]
public void Should_define_convention_that_returns_viewname_in_modulepath_subfolder_of_views_folder()
{
// Given
this.viewLocationConventions.Initialise(this.conventions);
var convention = this.conventions.ViewLocationConventions[2];
// When
var result = convention.Invoke(
"viewname",
null,
new ViewLocationContext { ModulePath = "modulepath" });
// Then
result.ShouldEqual("views/modulepath/viewname");
}
[Fact]
public void Should_define_convention_that_returns_viewname_in_modulepath_subfolder_of_views_folder_when_modulepath_contains_leading_slash()
{
// Given
this.viewLocationConventions.Initialise(this.conventions);
var convention = this.conventions.ViewLocationConventions[2];
// When
var result = convention.Invoke(
"viewname",
null,
new ViewLocationContext { ModulePath = "/modulepath" });
// Then
result.ShouldEqual("views/modulepath/viewname");
}
[Fact]
public void Should_define_convention_that_returns_viewname_in_modulepath_folder()
{
// Given
this.viewLocationConventions.Initialise(this.conventions);
var convention = this.conventions.ViewLocationConventions[3];
// When
var result = convention.Invoke(
"viewname",
null,
new ViewLocationContext { ModulePath = "modulepath" });
// Then
result.ShouldEqual("modulepath/viewname");
}
[Fact]
public void Should_define_convention_that_returns_viewname_in_modulepath_folder_when_modulepath_contains_leading_slash()
{
// Given
this.viewLocationConventions.Initialise(this.conventions);
var convention = this.conventions.ViewLocationConventions[3];
// When
var result = convention.Invoke(
"viewname",
null,
new ViewLocationContext { ModulePath = "/modulepath" });
// Then
result.ShouldEqual("modulepath/viewname");
}
[Fact]
public void Should_define_convention_that_returns_viewname_in_modulename_subfolder_of_views_folder()
{
// Given
this.viewLocationConventions.Initialise(this.conventions);
var convention = this.conventions.ViewLocationConventions[4];
// When
var result = convention.Invoke(
"viewname",
null,
new ViewLocationContext { ModuleName = "modulename" });
// Then
result.ShouldEqual("views/modulename/viewname");
}
[Fact]
public void Should_define_convention_that_returns_viewname_in_modulename_folder()
{
// Given
this.viewLocationConventions.Initialise(this.conventions);
var convention = this.conventions.ViewLocationConventions[5];
// When
var result = convention.Invoke(
"viewname",
null,
new ViewLocationContext { ModuleName = "modulename" });
// Then
result.ShouldEqual("modulename/viewname");
}
}
}
| |
using System;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.Reflection;
using System.Security.Cryptography;
using ToolKit.Cryptography;
using Xunit;
namespace UnitTests.Cryptography
{
[SuppressMessage(
"StyleCop.CSharp.DocumentationRules",
"SA1600:ElementsMustBeDocumented",
Justification = "Test Suites do not need XML Documentation.")]
public class RsaEncryptionTests
{
private const string _targetString =
"The instinct of nearly all societies is to lock up anybody who is truly free. "
+ "First, society begins by trying to beat you up. If this fails, they try to poison you. "
+ "If this fails too, they finish by loading honors on your head."
+ " - Jean Cocteau (1889-1963)";
private static readonly string _assemblyPath =
Path.GetDirectoryName(Assembly.GetAssembly(typeof(RsaEncryptionTests)).Location)
+ Path.DirectorySeparatorChar;
public static string Secret { get; } = SHA256Hash.Create().Compute(_targetString);
[Fact]
public void Decrypt_Should_ReturnExpectedResult_When_KeyIsStoredInCertificate()
{
// Arrange
var cert = $"{_assemblyPath}RsaEncrypt";
var publicKey = RsaPublicKey.LoadFromCertificateFile(cert + ".cer");
var privateKey = RsaPrivateKey.LoadFromCertificateFile(cert + ".pfx", "password");
var e1 = new RsaEncryption();
var e2 = new RsaEncryption();
// Act
var encryptedData = e1.Encrypt(new EncryptionData(Secret), publicKey);
var decryptedData = e2.Decrypt(encryptedData, privateKey);
// Assert
Assert.Equal(decryptedData.Text, Secret);
}
[Fact]
public void Decrypt_Should_ReturnExpectedResult_When_KeyIsStoredInConfig()
{
// Arrange
AddKeysToEnvironment();
var publicKey = RsaPublicKey.LoadFromEnvironment();
var privateKey = RsaPrivateKey.LoadFromEnvironment();
var e1 = new RsaEncryption();
var e2 = new RsaEncryption();
// Act
var encryptedData = e1.Encrypt(new EncryptionData(Secret), publicKey);
var decryptedData = e2.Decrypt(encryptedData, privateKey);
// Assert
Assert.Equal(decryptedData.Text, Secret);
RemoveKeysToEnvironment();
}
[Fact]
public void Decrypt_Should_ReturnExpectedResult_When_KeyIsStoredInXml()
{
// Arrange
const string publicKeyXml = "<RSAKeyValue>" +
"<Modulus>0D59Km2Eo9oopcm7Y2wOXx0TRRXQFybl9HHe/ve47Qcf2EoKbs9nkuMmhCJlJ" +
"zrq6ZJzgQSEbpVyaWn8OHq0I50rQ13dJsALEquhlfwVWw6Hit7qRvveKlOAGfj8xdkaXJ" +
"LYS1tA06tKHfYxgt6ysMBZd0DIedYoE1fe3VlLZyE=</Modulus>" +
"<Exponent>AQAB</Exponent>" +
"</RSAKeyValue>";
const string privateKeyXml = "<RSAKeyValue>" +
"<Modulus>0D59Km2Eo9oopcm7Y2wOXx0TRRXQFybl9HHe/ve47Qcf2EoKbs9nkuMmhCJlJ" +
"zrq6ZJzgQSEbpVyaWn8OHq0I50rQ13dJsALEquhlfwVWw6Hit7qRvveKlOAGfj8xdkaXJ" +
"LYS1tA06tKHfYxgt6ysMBZd0DIedYoE1fe3VlLZyE=</Modulus>" +
"<Exponent>AQAB</Exponent>" +
"<P>/1cvDks8qlF1IXKNwcXW8tjTlhjidjGtbT9k7FCYug+P6ZBDfqhUqfvjgLFF" +
"/+dAkoofNqliv89b8DRy4gS4qQ==</P>" +
"<Q>0Mgq7lyvmVPR1r197wnba1bWbJt8W2Ki8ilUN6lX6Lkk04ds9y3A0txy0ESya7dyg" +
"9NLscfU3NQMH8RRVnJtuQ==</Q>" +
"<DP>+uwfRumyxSDlfSgInFqh/+YKD5+GtGXfKtO4hu4xF+8BGqJ1YXtkL" +
"+Njz2zmADOt5hOr1tigPSQ2EhhIqUnAeQ==</DP>" +
"<DQ>M5Ofd28SOjCIwCHjwG+Q8v1qzz3CBNljI6uuEGoXO3ix" +
"bkggVRfKcMzg2C6AXTfeZE6Ifoy9OyhvLlHTPiXakQ==</DQ>" +
"<InverseQ>yQIJMLdL6kU4VK7M5b5PqWS8XzkgxfnaowRs9mhSEDdwwWPtUXO8aQ9G3" +
"zuiDUqNq9j5jkdt77+c2stBdV97ew==</InverseQ>" +
"<D>HOpQXu/OFyJXuo2EY43BgRt8bX9V4aEZFRQqrqSfHOp8VYASasiJzS+VTYupGAVqUP" +
"xw5V1HNkOyG0kIKJ+BG6BpIwLIbVKQn/ROs7E3/vBdg2+QXKhikMz/4gY" +
"x2oEsXW2kzN1GMRop2lrrJZJNGE/eG6i4lQ1/inj1Tk/sqQE=</D>" +
"</RSAKeyValue>";
var publicKey = new RsaPublicKey(publicKeyXml);
var privateKey = new RsaPrivateKey(privateKeyXml);
var e1 = new RsaEncryption();
var e2 = new RsaEncryption();
// Act
var encryptedData = e1.Encrypt(new EncryptionData(Secret), publicKey);
var decryptedData = e2.Decrypt(encryptedData, privateKey);
// Assert
Assert.Equal(decryptedData.Text, Secret);
}
[Fact]
public void Decrypt_Should_ReturnExpectedResult_When_UsingDefaultKeys()
{
// Arrange
AddKeysToEnvironment();
var e1 = new RsaEncryption();
var e2 = new RsaEncryption();
// Act
var encryptedData = e1.Encrypt(new EncryptionData(Secret));
var decryptedData = e2.Decrypt(encryptedData);
// Assert
Assert.Equal(decryptedData.Text, Secret);
RemoveKeysToEnvironment();
}
[Fact]
public void Decrypt_Should_ReturnExpectedResult_When_UsingDefaultKeySizeAndGeneratedKeys()
{
// Arrange
var publicKey = new RsaPublicKey();
var privateKey = new RsaPrivateKey();
var e1 = new RsaEncryption();
var e2 = new RsaEncryption();
e1.GenerateNewKeyset(ref publicKey, ref privateKey);
// Act
var encryptedData = e1.Encrypt(new EncryptionData(Secret), publicKey);
var decryptedData = e2.Decrypt(encryptedData, privateKey);
// Assert
Assert.Equal(decryptedData.Text, Secret);
}
[Fact]
public void Decrypt_Should_ReturnExpectedResult_When_UsingExplicitKeySizeAndGeneratedKeys()
{
// Arrange
var publicKey = new RsaPublicKey();
var privateKey = new RsaPrivateKey();
var e1 = new RsaEncryption(4096);
var e2 = new RsaEncryption(4096);
e1.GenerateNewKeyset(ref publicKey, ref privateKey);
// Act
var encryptedData = e1.Encrypt(new EncryptionData(Secret), publicKey);
var decryptedData = e2.Decrypt(encryptedData, privateKey);
// Assert
Assert.Equal(decryptedData.Text, Secret);
}
[Fact]
public void DefaultPrivateKey_Should_ReturnExpectedResult()
{
// Arrange
AddKeysToEnvironment();
const string expected = "AQAB";
// Act
var actual = RsaEncryption.DefaultPublicKey.Exponent;
// Assert
Assert.Equal(expected, actual);
RemoveKeysToEnvironment();
}
[Fact]
public void DefaultPublicKey_Should_ReturnExpectedResult()
{
// Arrange
AddKeysToEnvironment();
const string expected = "ksvo/EqBn9XRzvH826npSQdCYv1G5gyEnzQeC4qPidEm"
+ "Ub6Yan12cWYlt4CsK5umYGwWmRSL20Ufc+gnZQo6Pw==";
// Act
var actual = RsaEncryption.DefaultPrivateKey.PrimeExponentP;
// Assert
Assert.Equal(expected, actual);
RemoveKeysToEnvironment();
}
[Fact]
public void Encrypt_Should_ThrowException_When_DataIsNull()
{
// Arrange
var publicKey = new RsaPublicKey();
var privateKey = new RsaPrivateKey();
var e1 = new RsaEncryption();
e1.GenerateNewKeyset(ref publicKey, ref privateKey);
EncryptionData data = null;
// Act & Assert
Assert.Throws<ArgumentNullException>(() => e1.Encrypt(data, publicKey));
}
[Fact]
public void Encrypt_Should_ThrowException_When_EncryptingToMuchData()
{
// Arrange
var publicKey = new RsaPublicKey();
var privateKey = new RsaPrivateKey();
var e1 = new RsaEncryption();
// Act
e1.GenerateNewKeyset(ref publicKey, ref privateKey);
// Assert
Assert.Throws<CryptographicException>(() => e1.Encrypt(new EncryptionData(_targetString), publicKey));
}
[Fact]
public void GenerateNewKeyset_Should_ThrowException_When_PrivateKeyIsNull()
{
// Arrange
var key = new RsaEncryption();
var publicKey = new RsaPublicKey();
RsaPrivateKey privateKey = null;
// Act & Assert
Assert.Throws<ArgumentNullException>(() => key.GenerateNewKeyset(ref publicKey, ref privateKey));
}
[Fact]
public void GenerateNewKeyset_Should_ThrowException_When_PublicKeyIsNull()
{
// Arrange
var key = new RsaEncryption();
var privateKey = new RsaPrivateKey();
RsaPublicKey publicKey = null;
// Act & Assert
Assert.Throws<ArgumentNullException>(() => key.GenerateNewKeyset(ref publicKey, ref privateKey));
}
[Fact]
public void KeySizeMaxBits_Should_ExpectedResult()
{
// Arrange
const int expected = 16384;
// Act
var rsa = new RsaEncryption();
// Assert
Assert.Equal(expected, rsa.KeySizeMaxBits);
}
[Fact]
public void KeySizeMinBits_Should_ExpectedResult()
{
// Arrange
const int expected = 384;
// Act
var rsa = new RsaEncryption();
// Assert
Assert.Equal(expected, rsa.KeySizeMinBits);
}
[Fact]
public void KeySizeStepBits_Should_ExpectedResult()
{
// Arrange
const int expected = 8;
// Act
var rsa = new RsaEncryption();
// Assert
Assert.Equal(expected, rsa.KeySizeStepBits);
}
[Fact]
public void Sign_Should_CorrectlyCreateProperSignature()
{
// Arrange
var secretData = new EncryptionData(Secret);
var xml = File.ReadAllText($"{_assemblyPath}privateKey.xml");
var privateKey = RsaPrivateKey.LoadFromXml(xml);
const string expected = "kZmV1cUO91lpOQkgz5HLbWsfeXabJOPfcWjH72EytH95AAJEVq+nonJm9A"
+ "UjHy53VAIagJFJYiORcgsHC1klkppM71hRD1xUs70ggPiMIcTv/CDij3"
+ "6FYxGd7n9GAh5LikojbWJxJHc3A5LqnAwSBBfOfY2K4gY5lZ3rSmhNHDM=";
var e1 = new RsaEncryption();
// Act
var signature = e1.Sign(secretData, privateKey);
// Assert
Assert.Equal(signature.Base64, expected);
}
[Fact]
public void Verify_Should_ReturnFalse_When_ValidatingChangedSignedData()
{
// Arrange
var secretData = new EncryptionData(Secret);
var publicKey = new RsaPublicKey();
var privateKey = new RsaPrivateKey();
var e1 = new RsaEncryption();
e1.GenerateNewKeyset(ref publicKey, ref privateKey);
// Act
var signature = e1.Sign(secretData, privateKey);
secretData.Text += "3";
var actual = e1.Verify(secretData, signature, publicKey);
// Assert
Assert.False(actual);
}
[Fact]
public void Verify_Should_ReturnTrue_When_ValidatingUnChangedSignedData()
{
// Arrange
var secretData = new EncryptionData(Secret);
var publicKey = new RsaPublicKey();
var privateKey = new RsaPrivateKey();
var e1 = new RsaEncryption();
e1.GenerateNewKeyset(ref publicKey, ref privateKey);
// Act
var signature = e1.Sign(secretData, privateKey);
var actual = e1.Verify(secretData, signature, publicKey);
// Assert
Assert.True(actual);
}
private void AddKeysToEnvironment()
{
// Private Key
Environment.SetEnvironmentVariable("PublicKey.Modulus", "3uWxbWSnlL2ntr/gcJ0NQeiWRfzj/72zIDuBW/TmegeodMdPUvI5vXur0fKp6RbSU112oPf9o7hoAF8bdR9YOiJg6axZYKh+BxEH6pUPLbrtn1dPCUgTxlMeo0IhKvih1Q90Bz+ZxCp/V8Hcf86p+4LPeb1o9EOa01zd0yUwvkE=");
Environment.SetEnvironmentVariable("PublicKey.Exponent", "AQAB");
// Public Key
Environment.SetEnvironmentVariable("PrivateKey.P", "76iHZusdN1TYrTqf1gExNMMWbiHS7zSB/bi/xeUR0F3fjvnvsayn6s5ShM0jxYHVVkRyVoH16PwLW6Tt2gpdYw==");
Environment.SetEnvironmentVariable("PrivateKey.Q", "7hiVRmx0z1KERw+Zy86MmlvuODUsn2kuM06kLsSHbznSkYl5lekH9RFxFemNkGGMBg8OT5+EVtWAOdto8KTJCw==");
Environment.SetEnvironmentVariable("PrivateKey.DP", "ksvo/EqBn9XRzvH826npSQdCYv1G5gyEnzQeC4qPidEmUb6Yan12cWYlt4CsK5umYGwWmRSL20Ufc+gnZQo6Pw==");
Environment.SetEnvironmentVariable("PrivateKey.DQ", "QliLUCJsslDWF08blhUqTOENEpCOrKUMgLOLQJT3AGFmcbOTM9jJpNqFXovELNVhxVZwsHNM1z2LC5Q+O8BPXQ==");
Environment.SetEnvironmentVariable("PrivateKey.InverseQ", "pjEtLwYB4yeDpdORNFxhFVXWZCqoky86bmAnrrG4+FvwkH/2dNe65Wmp62JvZ7dwgPBIA+uA/LF+C1LXcXe9Aw==");
Environment.SetEnvironmentVariable("PrivateKey.D", "EmuZBhlTYA9sVMX2nlfcSJ4YDSChFvluXDOOtTK/+UW4vi3aeFhcPTSDNo5/TCv+pbULoLHd3DHZJm61rjAw8jV5n09Trufg/Z3ybzUrAOzT3iTR2rvg7mNS2IBmaTyJgemNKQDeFW81UOELVszUXNjhVex+k67Ma4omR6iTHSE=");
}
private void RemoveKeysToEnvironment()
{
// Private Key
Environment.SetEnvironmentVariable("PublicKey.Modulus", null);
Environment.SetEnvironmentVariable("PublicKey.Exponent", null);
// Public Key
Environment.SetEnvironmentVariable("PrivateKey.P", null);
Environment.SetEnvironmentVariable("PrivateKey.Q", null);
Environment.SetEnvironmentVariable("PrivateKey.DP", null);
Environment.SetEnvironmentVariable("PrivateKey.DQ", null);
Environment.SetEnvironmentVariable("PrivateKey.InverseQ", null);
Environment.SetEnvironmentVariable("PrivateKey.D", null);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Orleans.Configuration;
using Orleans.Hosting;
using Orleans.Providers.Streams.AzureQueue;
using Orleans.Runtime;
using Orleans.Streams;
using Orleans.TestingHost;
using Tester;
using Tester.AzureUtils;
using Tester.AzureUtils.Streaming;
using TestExtensions;
using UnitTests.GrainInterfaces;
using UnitTests.Grains;
using Xunit;
using Xunit.Abstractions;
namespace UnitTests.StreamingTests
{
[TestCategory("Streaming"), TestCategory("Limits")]
public class StreamLimitTests : TestClusterPerTest
{
public const string AzureQueueStreamProviderName = StreamTestsConstants.AZURE_QUEUE_STREAM_PROVIDER_NAME;
public const string SmsStreamProviderName = StreamTestsConstants.SMS_STREAM_PROVIDER_NAME;
private static int MaxExpectedPerStream = 500;
private static int MaxConsumersPerStream;
private static int MaxProducersPerStream;
private const int MessagePipelineSize = 1000;
private const int InitPipelineSize = 500;
private IManagementGrain mgmtGrain;
private string StreamNamespace;
private readonly ITestOutputHelper output;
private const int queueCount = 8;
protected override void ConfigureTestCluster(TestClusterBuilder builder)
{
TestUtils.CheckForAzureStorage();
builder.AddSiloBuilderConfigurator<SiloBuilderConfigurator>();
}
private class SiloBuilderConfigurator : ISiloConfigurator
{
public void Configure(ISiloBuilder hostBuilder)
{
hostBuilder
.AddSimpleMessageStreamProvider(SmsStreamProviderName)
.AddSimpleMessageStreamProvider("SMSProviderDoNotOptimizeForImmutableData", options => options.OptimizeForImmutableData = false)
.AddAzureTableGrainStorage("AzureStore", builder => builder.Configure<IOptions<ClusterOptions>>((options, silo) =>
{
options.ConfigureTestDefaults();
options.DeleteStateOnClear = true;
}))
.AddAzureTableGrainStorage("PubSubStore", builder => builder.Configure<IOptions<ClusterOptions>>((options, silo) =>
{
options.DeleteStateOnClear = true;
options.ConfigureTestDefaults();
}))
.AddAzureQueueStreams(AzureQueueStreamProviderName, ob => ob.Configure<IOptions<ClusterOptions>>(
(options, dep) =>
{
options.ConfigureTestDefaults();
options.QueueNames = AzureQueueUtilities.GenerateQueueNames(dep.Value.ClusterId, queueCount);
}))
.AddAzureQueueStreams("AzureQueueProvider2", ob=>ob.Configure<IOptions<ClusterOptions>>(
(options, dep) =>
{
options.ConfigureTestDefaults();
options.QueueNames = AzureQueueUtilities.GenerateQueueNames($"{dep.Value.ClusterId}2", queueCount);
}))
.AddMemoryGrainStorage("MemoryStore", options => options.NumStorageGrains = 1);
}
}
public StreamLimitTests(ITestOutputHelper output)
{
this.output = output;
StreamNamespace = StreamTestsConstants.StreamLifecycleTestsNamespace;
}
public override async Task InitializeAsync()
{
await base.InitializeAsync();
this.mgmtGrain = this.GrainFactory.GetGrain<IManagementGrain>(0);
}
public override async Task DisposeAsync()
{
await base.DisposeAsync();
if (!string.IsNullOrWhiteSpace(TestDefaultConfiguration.DataConnectionString))
{
await AzureQueueStreamProviderUtils.DeleteAllUsedAzureQueues(NullLoggerFactory.Instance,
AzureQueueUtilities.GenerateQueueNames(this.HostedCluster.Options.ClusterId, queueCount),
new AzureQueueOptions().ConfigureTestDefaults());
await AzureQueueStreamProviderUtils.DeleteAllUsedAzureQueues(NullLoggerFactory.Instance,
AzureQueueUtilities.GenerateQueueNames($"{this.HostedCluster.Options.ClusterId}2", queueCount),
new AzureQueueOptions().ConfigureTestDefaults());
}
}
[SkippableFact]
public async Task SMS_Limits_FindMax_Consumers()
{
// 1 Stream, 1 Producer, X Consumers
Guid streamId = Guid.NewGuid();
string streamProviderName = SmsStreamProviderName;
output.WriteLine("Starting search for MaxConsumersPerStream value using stream {0}", streamId);
IStreamLifecycleProducerGrain producer = this.GrainFactory.GetGrain<IStreamLifecycleProducerGrain>(Guid.NewGuid());
await producer.BecomeProducer(streamId, this.StreamNamespace, streamProviderName);
int loopCount = 0;
try
{
// Loop until something breaks!
for (loopCount = 1; loopCount <= MaxExpectedPerStream; loopCount++)
{
IStreamLifecycleConsumerGrain consumer = this.GrainFactory.GetGrain<IStreamLifecycleConsumerGrain>(Guid.NewGuid());
await consumer.BecomeConsumer(streamId, this.StreamNamespace, streamProviderName);
}
}
catch (Exception exc)
{
this.output.WriteLine("Stopping loop at loopCount={0} due to exception {1}", loopCount, exc);
}
MaxConsumersPerStream = loopCount - 1;
output.WriteLine("Finished search for MaxConsumersPerStream with value {0}", MaxConsumersPerStream);
Assert.NotEqual(0, MaxConsumersPerStream); // "MaxConsumersPerStream should be greater than zero."
output.WriteLine("MaxConsumersPerStream={0}", MaxConsumersPerStream);
}
[SkippableFact, TestCategory("Functional")]
public async Task SMS_Limits_FindMax_Producers()
{
// 1 Stream, X Producers, 1 Consumer
Guid streamId = Guid.NewGuid();
string streamProviderName = SmsStreamProviderName;
output.WriteLine("Starting search for MaxProducersPerStream value using stream {0}", streamId);
IStreamLifecycleConsumerGrain consumer = this.GrainFactory.GetGrain<IStreamLifecycleConsumerGrain>(Guid.NewGuid());
await consumer.BecomeConsumer(streamId, this.StreamNamespace, streamProviderName);
int loopCount = 0;
try
{
// Loop until something breaks!
for (loopCount = 1; loopCount <= MaxExpectedPerStream; loopCount++)
{
IStreamLifecycleProducerGrain producer = this.GrainFactory.GetGrain<IStreamLifecycleProducerGrain>(Guid.NewGuid());
await producer.BecomeProducer(streamId, this.StreamNamespace, streamProviderName);
}
}
catch (Exception exc)
{
this.output.WriteLine("Stopping loop at loopCount={0} due to exception {1}", loopCount, exc);
}
MaxProducersPerStream = loopCount - 1;
output.WriteLine("Finished search for MaxProducersPerStream with value {0}", MaxProducersPerStream);
Assert.NotEqual(0, MaxProducersPerStream); // "MaxProducersPerStream should be greater than zero."
output.WriteLine("MaxProducersPerStream={0}", MaxProducersPerStream);
}
[SkippableFact, TestCategory("Functional")]
public async Task SMS_Limits_P1_C128_S1()
{
// 1 Stream, 1 Producer, 128 Consumers
await Test_Stream_Limits(
SmsStreamProviderName,
1, 1, 128);
}
[SkippableFact, TestCategory("Failures")]
public async Task SMS_Limits_P128_C1_S1()
{
// 1 Stream, 128 Producers, 1 Consumer
await Test_Stream_Limits(
SmsStreamProviderName,
1, 128, 1);
}
[SkippableFact, TestCategory("Failures")]
public async Task SMS_Limits_P128_C128_S1()
{
// 1 Stream, 128 Producers, 128 Consumers
await Test_Stream_Limits(
SmsStreamProviderName,
1, 128, 128);
}
[SkippableFact, TestCategory("Failures")]
public async Task SMS_Limits_P1_C400_S1()
{
// 1 Stream, 1 Producer, 400 Consumers
int numConsumers = 400;
await Test_Stream_Limits(
SmsStreamProviderName,
1, 1, numConsumers);
}
[SkippableFact, TestCategory("Burst")]
public async Task SMS_Limits_Max_Producers_Burst()
{
if (MaxProducersPerStream == 0) await SMS_Limits_FindMax_Producers();
output.WriteLine("Using MaxProducersPerStream={0}", MaxProducersPerStream);
// 1 Stream, Max Producers, 1 Consumer
await Test_Stream_Limits(
SmsStreamProviderName,
1, MaxProducersPerStream, 1, useFanOut: true);
}
[SkippableFact, TestCategory("Functional")]
public async Task SMS_Limits_Max_Producers_NoBurst()
{
if (MaxProducersPerStream == 0) await SMS_Limits_FindMax_Producers();
output.WriteLine("Using MaxProducersPerStream={0}", MaxProducersPerStream);
// 1 Stream, Max Producers, 1 Consumer
await Test_Stream_Limits(
SmsStreamProviderName,
1, MaxProducersPerStream, 1, useFanOut: false);
}
[SkippableFact, TestCategory("Burst")]
public async Task SMS_Limits_Max_Consumers_Burst()
{
if (MaxConsumersPerStream == 0) await SMS_Limits_FindMax_Consumers();
output.WriteLine("Using MaxConsumersPerStream={0}", MaxConsumersPerStream);
// 1 Stream, Max Producers, 1 Consumer
await Test_Stream_Limits(
SmsStreamProviderName,
1, 1, MaxConsumersPerStream, useFanOut: true);
}
[SkippableFact]
public async Task SMS_Limits_Max_Consumers_NoBurst()
{
if (MaxConsumersPerStream == 0) await SMS_Limits_FindMax_Consumers();
output.WriteLine("Using MaxConsumersPerStream={0}", MaxConsumersPerStream);
// 1 Stream, Max Producers, 1 Consumer
await Test_Stream_Limits(
SmsStreamProviderName,
1, 1, MaxConsumersPerStream, useFanOut: false);
}
[SkippableFact, TestCategory("Failures"), TestCategory("Burst")]
public async Task SMS_Limits_P9_C9_S152_Burst()
{
// 152 * 9 ~= 1360 target per second
// 152 Streams, x9 Producers, x9 Consumers
int numStreams = 152;
await Test_Stream_Limits(
SmsStreamProviderName,
numStreams, 9, 9, useFanOut: true);
}
[SkippableFact, TestCategory("Failures")]
public async Task SMS_Limits_P9_C9_S152_NoBurst()
{
// 152 * 9 ~= 1360 target per second
// 152 Streams, x9 Producers, x9 Consumers
int numStreams = 152;
await Test_Stream_Limits(
SmsStreamProviderName,
numStreams, 9, 9, useFanOut: false);
}
[SkippableFact, TestCategory("Failures"), TestCategory("Burst")]
public async Task SMS_Limits_P1_C9_S152_Burst()
{
// 152 * 9 ~= 1360 target per second
// 152 Streams, x1 Producer, x9 Consumers
int numStreams = 152;
await Test_Stream_Limits(
SmsStreamProviderName,
numStreams, 1, 9, useFanOut: true);
}
[SkippableFact, TestCategory("Failures")]
public async Task SMS_Limits_P1_C9_S152_NoBurst()
{
// 152 * 9 ~= 1360 target per second
// 152 Streams, x1 Producer, x9 Consumers
int numStreams = 152;
await Test_Stream_Limits(
SmsStreamProviderName,
numStreams, 1, 9, useFanOut: false);
}
[SkippableFact(Skip = "Ignore"), TestCategory("Performance"), TestCategory("Burst")]
public async Task SMS_Churn_Subscribers_P0_C10_ManyStreams()
{
int numStreams = 2000;
int pipelineSize = 10000;
await Test_Stream_Churn_NumStreams(
SmsStreamProviderName,
pipelineSize,
numStreams,
numConsumers: 10,
numProducers: 0
);
}
//[SkippableFact, TestCategory("Performance"), TestCategory("Burst")]
//public async Task SMS_Churn_Subscribers_P1_C9_ManyStreams_TimePeriod()
//{
// await Test_Stream_Churn_TimePeriod(
// StreamReliabilityTests.SMS_STREAM_PROVIDER_NAME,
// InitPipelineSize,
// TimeSpan.FromSeconds(60),
// numProducers: 1
// );
//}
[SkippableFact, TestCategory("Performance"), TestCategory("Burst")]
public async Task SMS_Churn_FewPublishers_C9_ManyStreams()
{
int numProducers = 0;
int numStreams = 1000;
int pipelineSize = 100;
await Test_Stream_Churn_NumStreams_FewPublishers(
SmsStreamProviderName,
pipelineSize,
numStreams,
numProducers: numProducers,
warmUpPubSub: true
);
}
[SkippableFact, TestCategory("Performance"), TestCategory("Burst")]
public async Task SMS_Churn_FewPublishers_C9_ManyStreams_PubSubDirect()
{
int numProducers = 0;
int numStreams = 1000;
int pipelineSize = 100;
await Test_Stream_Churn_NumStreams_FewPublishers(
SmsStreamProviderName,
pipelineSize,
numStreams,
numProducers: numProducers,
warmUpPubSub: true,
normalSubscribeCalls: false
);
}
private Task Test_Stream_Churn_NumStreams_FewPublishers(
string streamProviderName,
int pipelineSize,
int numStreams,
int numConsumers = 9,
int numProducers = 4,
bool warmUpPubSub = true,
bool warmUpProducers = false,
bool normalSubscribeCalls = true)
{
output.WriteLine("Testing churn with {0} Streams on {1} Producers with {2} Consumers per Stream",
numStreams, numProducers, numConsumers);
AsyncPipeline pipeline = new AsyncPipeline(pipelineSize);
// Create streamId Guids
StreamId[] streamIds = new StreamId[numStreams];
for (int i = 0; i < numStreams; i++)
{
streamIds[i] = StreamId.Create(this.StreamNamespace, Guid.NewGuid());
}
int activeConsumerGrains = ActiveGrainCount(typeof(StreamLifecycleConsumerGrain).FullName);
Assert.Equal(0, activeConsumerGrains); // "Initial Consumer count should be zero"
int activeProducerGrains = ActiveGrainCount(typeof(StreamLifecycleProducerGrain).FullName);
Assert.Equal(0, activeProducerGrains); // "Initial Producer count should be zero"
if (warmUpPubSub)
{
WarmUpPubSub(streamProviderName, streamIds, pipeline);
pipeline.Wait();
int activePubSubGrains = ActiveGrainCount(typeof(PubSubRendezvousGrain).FullName);
Assert.Equal(streamIds.Length, activePubSubGrains); // "Initial PubSub count -- should all be warmed up"
}
Guid[] producerIds = new Guid[numProducers];
if (numProducers > 0 && warmUpProducers)
{
// Warm up Producers to pre-create grains
for (int i = 0; i < numProducers; i++)
{
producerIds[i] = Guid.NewGuid();
var grain = this.GrainFactory.GetGrain<IStreamLifecycleProducerGrain>(producerIds[i]);
Task promise = grain.Ping();
pipeline.Add(promise);
}
pipeline.Wait();
int activePublisherGrains = this.ActiveGrainCount(typeof(StreamLifecycleProducerGrain).FullName);
Assert.Equal(numProducers, activePublisherGrains); // "Initial Publisher count -- should all be warmed up"
}
var promises = new List<Task>();
Stopwatch sw = Stopwatch.StartNew();
if (numProducers > 0)
{
// Producers
for (int i = 0; i < numStreams; i++)
{
StreamId streamId = streamIds[i];
Guid producerId = producerIds[i % numProducers];
var grain = this.GrainFactory.GetGrain<IStreamLifecycleProducerGrain>(producerId);
Task promise = grain.BecomeProducer(streamId, streamProviderName);
promises.Add(promise);
pipeline.Add(promise);
}
pipeline.Wait();
promises.Clear();
}
// Consumers
for (int i = 0; i < numStreams; i++)
{
StreamId streamId = streamIds[i];
Task promise = SetupOneStream(streamId, streamProviderName, pipeline, numConsumers, 0, normalSubscribeCalls);
promises.Add(promise);
}
pipeline.Wait();
Task.WhenAll(promises).Wait();
sw.Stop();
int consumerCount = ActiveGrainCount(typeof(StreamLifecycleConsumerGrain).FullName);
Assert.Equal(activeConsumerGrains + (numStreams * numConsumers), consumerCount); // "The right number of Consumer grains are active"
int producerCount = ActiveGrainCount(typeof(StreamLifecycleProducerGrain).FullName);
Assert.Equal(activeProducerGrains + (numStreams * numProducers), producerCount); // "The right number of Producer grains are active"
int pubSubCount = ActiveGrainCount(typeof(PubSubRendezvousGrain).FullName);
Assert.Equal(streamIds.Length, pubSubCount); // "Final PubSub count -- no more started"
TimeSpan elapsed = sw.Elapsed;
int totalSubscriptions = numStreams * numConsumers;
double rps = totalSubscriptions / elapsed.TotalSeconds;
output.WriteLine("Subscriptions-per-second = {0} during period {1}", rps, elapsed);
Assert.NotEqual(0.0, rps); // "RPS greater than zero"
return Task.CompletedTask;
}
private Task Test_Stream_Churn_NumStreams(
string streamProviderName,
int pipelineSize,
int numStreams,
int numConsumers = 9,
int numProducers = 1,
bool warmUpPubSub = true,
bool normalSubscribeCalls = true)
{
output.WriteLine("Testing churn with {0} Streams with {1} Consumers and {2} Producers per Stream NormalSubscribe={3}",
numStreams, numConsumers, numProducers, normalSubscribeCalls);
AsyncPipeline pipeline = new AsyncPipeline(pipelineSize);
var promises = new List<Task>();
// Create streamId Guids
StreamId[] streamIds = new StreamId[numStreams];
for (int i = 0; i < numStreams; i++)
{
streamIds[i] = StreamId.Create(this.StreamNamespace, Guid.NewGuid());
}
if (warmUpPubSub)
{
WarmUpPubSub(streamProviderName, streamIds, pipeline);
pipeline.Wait();
int activePubSubGrains = ActiveGrainCount(typeof(PubSubRendezvousGrain).FullName);
Assert.Equal(streamIds.Length, activePubSubGrains); // "Initial PubSub count -- should all be warmed up"
}
int activeConsumerGrains = ActiveGrainCount(typeof(StreamLifecycleConsumerGrain).FullName);
Assert.Equal(0, activeConsumerGrains); // "Initial Consumer count should be zero"
Stopwatch sw = Stopwatch.StartNew();
for (int i = 0; i < numStreams; i++)
{
Task promise = SetupOneStream(streamIds[i], streamProviderName, pipeline, numConsumers, numProducers, normalSubscribeCalls);
promises.Add(promise);
}
Task.WhenAll(promises).Wait();
sw.Stop();
int consumerCount = ActiveGrainCount(typeof(StreamLifecycleConsumerGrain).FullName);
Assert.Equal(activeConsumerGrains + (numStreams * numConsumers), consumerCount); // "The correct number of new Consumer grains are active"
TimeSpan elapsed = sw.Elapsed;
int totalSubscriptions = numStreams * numConsumers;
double rps = totalSubscriptions / elapsed.TotalSeconds;
output.WriteLine("Subscriptions-per-second = {0} during period {1}", rps, elapsed);
Assert.NotEqual(0.0, rps); // "RPS greater than zero"
return Task.CompletedTask;
}
//private async Task Test_Stream_Churn_TimePeriod(
// string streamProviderName,
// int pipelineSize,
// TimeSpan duration,
// int numConsumers = 9,
// int numProducers = 1)
//{
// output.WriteLine("Testing Subscription churn for duration {0} with {1} Consumers and {2} Producers per Stream",
// duration, numConsumers, numProducers);
// AsyncPipeline pipeline = new AsyncPipeline(pipelineSize);
// var promises = new List<Task>();
// Stopwatch sw = Stopwatch.StartNew();
// for (int i = 0; sw.Elapsed <= duration; i++)
// {
// Guid streamId = Guid.NewGuid();
// Task promise = SetupOneStream(streamId, streamProviderName, pipeline, numConsumers, numProducers);
// promises.Add(promise);
// }
// await Task.WhenAll(promises);
// sw.Stop();
// TimeSpan elapsed = sw.Elapsed;
// int totalSubscription = numSt* numConsumers);
// double rps = totalSubscription/elapsed.TotalSeconds;
// output.WriteLine("Subscriptions-per-second = {0} during period {1}", rps, elapsed);
// Assert.NotEqual(0.0, rps, "RPS greater than zero");
//}
private void WarmUpPubSub(string streamProviderName, StreamId[] streamIds, AsyncPipeline pipeline)
{
int numStreams = streamIds.Length;
// Warm up PubSub for the appropriate streams
for (int i = 0; i < numStreams; i++)
{
var streamId = new InternalStreamId(streamProviderName, streamIds[i]);
string extKey = streamProviderName + "_" + this.StreamNamespace;
IPubSubRendezvousGrain pubsub = this.GrainFactory.GetGrain<IPubSubRendezvousGrain>(streamId.ToString());
Task promise = pubsub.Validate();
pipeline.Add(promise);
}
pipeline.Wait();
}
private static bool producersFirst = true;
private SimpleGrainStatistic[] grainCounts;
private Task SetupOneStream(
StreamId streamId,
string streamProviderName,
AsyncPipeline pipeline,
int numConsumers,
int numProducers,
bool normalSubscribeCalls)
{
//output.WriteLine("Initializing Stream {0} with Consumers={1} Producers={2}", streamId, numConsumers, numProducers);
List<Task> promises = new List<Task>();
if (producersFirst && numProducers > 0)
{
// Producers
var p1 = SetupProducers(streamId, this.StreamNamespace, streamProviderName, pipeline, numProducers);
promises.AddRange(p1);
}
// Consumers
if (numConsumers > 0)
{
var c = SetupConsumers(streamId, this.StreamNamespace, streamProviderName, pipeline, numConsumers, normalSubscribeCalls);
promises.AddRange(c);
}
if (!producersFirst && numProducers > 0)
{
// Producers
var p2 = SetupProducers(streamId, this.StreamNamespace, streamProviderName, pipeline, numProducers);
promises.AddRange(p2);
}
return Task.WhenAll(promises);
}
private IList<Task> SetupProducers(StreamId streamId, string streamNamespace, string streamProviderName, AsyncPipeline pipeline, int numProducers)
{
var producers = new List<IStreamLifecycleProducerGrain>();
var promises = new List<Task>();
for (int loopCount = 0; loopCount < numProducers; loopCount++)
{
var grain = this.GrainFactory.GetGrain<IStreamLifecycleProducerGrain>(Guid.NewGuid());
producers.Add(grain);
Task promise = grain.BecomeProducer(streamId, streamProviderName);
if (loopCount == 0)
{
// First call for this stream, so wait for call to complete successfully so we know underlying infrastructure is set up.
promise.Wait();
}
promises.Add(promise);
pipeline.Add(promise);
}
return promises;
}
private IList<Task> SetupConsumers(StreamId streamId, string streamNamespace, string streamProviderName, AsyncPipeline pipeline, int numConsumers, bool normalSubscribeCalls)
{
var consumers = new List<IStreamLifecycleConsumerGrain>();
var promises = new List<Task>();
long consumerIdStart = random.Next();
for (int loopCount = 0; loopCount < numConsumers; loopCount++)
{
var grain = this.GrainFactory.GetGrain<IStreamLifecycleConsumerGrain>(Guid.NewGuid());
consumers.Add(grain);
Task promise;
if (normalSubscribeCalls)
{
promise = grain.BecomeConsumer(streamId, streamProviderName);
}
else
{
promise = grain.TestBecomeConsumerSlim(streamId, streamProviderName);
}
//if (loopCount == 0)
//{
// // First call for this stream, so wait for call to complete successfully so we know underlying infrastructure is set up.
// promise.Wait();
//}
promises.Add(promise);
pipeline.Add(promise);
}
return promises;
}
private async Task Test_Stream_Limits(
string streamProviderName,
int numStreams,
int numProducers,
int numConsumers,
int numMessages = 1,
bool useFanOut = true)
{
output.WriteLine("Testing {0} Streams x Producers={1} Consumers={2} per stream with {3} messages each",
1, numProducers, numConsumers, numMessages);
Stopwatch sw = Stopwatch.StartNew();
var promises = new List<Task<double>>();
for (int s = 0; s < numStreams; s++)
{
Guid streamId = Guid.NewGuid();
Task<double> promise = Task.Run(
() => TestOneStream(streamId, streamProviderName, numProducers, numConsumers, numMessages, useFanOut));
promises.Add(promise);
if (!useFanOut)
{
await promise;
}
}
if (useFanOut)
{
output.WriteLine("Test: Waiting for {0} streams to finish", promises.Count);
}
double rps = (await Task.WhenAll(promises)).Sum();
promises.Clear();
output.WriteLine("Got total {0} RPS on {1} streams, or {2} RPS per streams",
rps, numStreams, rps/numStreams);
sw.Stop();
int totalMessages = numMessages * numStreams * numProducers;
output.WriteLine("Sent {0} messages total on {1} Streams from {2} Producers to {3} Consumers in {4} at {5} RPS",
totalMessages, numStreams, numStreams * numProducers, numStreams * numConsumers,
sw.Elapsed, totalMessages / sw.Elapsed.TotalSeconds);
}
private async Task<double> TestOneStream(Guid streamId, string streamProviderName,
int numProducers, int numConsumers, int numMessages,
bool useFanOut = true)
{
output.WriteLine("Testing Stream {0} with Producers={1} Consumers={2} x {3} messages",
streamId, numProducers, numConsumers, numMessages);
Stopwatch sw = Stopwatch.StartNew();
List<IStreamLifecycleConsumerGrain> consumers = new List<IStreamLifecycleConsumerGrain>();
List<IStreamLifecycleProducerGrain> producers = new List<IStreamLifecycleProducerGrain>();
await InitializeTopology(streamId, this.StreamNamespace, streamProviderName,
numProducers, numConsumers,
producers, consumers, useFanOut);
var promises = new List<Task>();
// Producers send M message each
int item = 1;
AsyncPipeline pipeline = new AsyncPipeline(MessagePipelineSize);
foreach (var grain in producers)
{
for (int m = 0; m < numMessages; m++)
{
Task promise = grain.SendItem(item++);
if (useFanOut)
{
pipeline.Add(promise);
promises.Add(promise);
}
else
{
await promise;
}
}
}
if (useFanOut)
{
//output.WriteLine("Test: Waiting for {0} producers to finish sending {1} messages", producers.Count, promises.Count);
await Task.WhenAll(promises);
promises.Clear();
}
var pubSub = StreamTestUtils.GetStreamPubSub(this.InternalClient);
// Check Consumer counts
var streamId1 = new InternalStreamId(streamProviderName, StreamId.Create(StreamNamespace, streamId));
int consumerCount = await pubSub.ConsumerCount(streamId1);
Assert.Equal(numConsumers, consumerCount); // "ConsumerCount for Stream {0}", streamId
// Check Producer counts
int producerCount = await pubSub.ProducerCount(streamId1);
Assert.Equal(numProducers, producerCount); // "ProducerCount for Stream {0}", streamId
// Check message counts received by consumers
int totalMessages = (numMessages + 1) * numProducers;
foreach (var grain in consumers)
{
int count = await grain.GetReceivedCount();
Assert.Equal(totalMessages, count); // "ReceivedCount for Consumer grain {0}", grain.GetPrimaryKey());
}
double rps = totalMessages/sw.Elapsed.TotalSeconds;
//output.WriteLine("Sent {0} messages total from {1} Producers to {2} Consumers in {3} at {4} RPS",
// totalMessages, numProducers, numConsumers,
// sw.Elapsed, rps);
return rps;
}
private async Task InitializeTopology(Guid streamId, string streamNamespace, string streamProviderName,
int numProducers, int numConsumers,
List<IStreamLifecycleProducerGrain> producers, List<IStreamLifecycleConsumerGrain> consumers,
bool useFanOut)
{
long nextGrainId = random.Next();
//var promises = new List<Task>();
AsyncPipeline pipeline = new AsyncPipeline(InitPipelineSize);
// Consumers
long consumerIdStart = nextGrainId;
for (int loopCount = 0; loopCount < numConsumers; loopCount++)
{
var grain = this.GrainFactory.GetGrain<IStreamLifecycleConsumerGrain>(Guid.NewGuid());
consumers.Add(grain);
Task promise = grain.BecomeConsumer(streamId, streamNamespace, streamProviderName);
if (useFanOut)
{
pipeline.Add(promise);
//promises.Add(promise);
//if (loopCount%WaitBatchSize == 0)
//{
// output.WriteLine("InitializeTopology: Waiting for {0} consumers to initialize", promises.Count);
// await Task.WhenAll(promises);
// promises.Clear();
//}
}
else
{
await promise;
}
}
if (useFanOut)
{
//output.WriteLine("InitializeTopology: Waiting for {0} consumers to initialize", promises.Count);
//await Task.WhenAll(promises);
//promises.Clear();
//output.WriteLine("InitializeTopology: Waiting for {0} consumers to initialize", pipeline.Count);
pipeline.Wait();
}
nextGrainId += numConsumers;
// Producers
long producerIdStart = nextGrainId;
pipeline = new AsyncPipeline(InitPipelineSize);
for (int loopCount = 0; loopCount < numProducers; loopCount++)
{
var grain = this.GrainFactory.GetGrain<IStreamLifecycleProducerGrain>(Guid.NewGuid());
producers.Add(grain);
Task promise = grain.BecomeProducer(streamId, streamNamespace, streamProviderName);
if (useFanOut)
{
pipeline.Add(promise);
//promises.Add(promise);
}
else
{
await promise;
}
}
if (useFanOut)
{
//output.WriteLine("InitializeTopology: Waiting for {0} producers to initialize", promises.Count);
//await Task.WhenAll(promises);
//promises.Clear();
//output.WriteLine("InitializeTopology: Waiting for {0} producers to initialize", pipeline.Count);
pipeline.Wait();
}
//nextGrainId += numProducers;
}
private int ActiveGrainCount(string grainTypeName)
{
grainCounts = mgmtGrain.GetSimpleGrainStatistics().Result; // Blocking Wait
int grainCount = grainCounts
.Where(g => g.GrainType == grainTypeName)
.Select(s => s.ActivationCount)
.Sum();
return grainCount;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Text;
using System.Globalization;
/// <summary>
/// UInt64.Parse(System.string)
/// </summary>
public class UInt64Parse1
{
public static int Main()
{
UInt64Parse1 ui64parse1 = new UInt64Parse1();
TestLibrary.TestFramework.BeginTestCase("UInt64Parse1");
if (ui64parse1.RunTests())
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("PASS");
return 100;
}
else
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("FAIL");
return 0;
}
}
public bool RunTests()
{
bool retVal = true;
TestLibrary.TestFramework.LogInformation("[Positive]");
retVal = PosTest1() && retVal;
retVal = PosTest2() && retVal;
retVal = PosTest3() && retVal;
retVal = PosTest4() && retVal;
retVal = PosTest5() && retVal;
retVal = PosTest6() && retVal;
retVal = PosTest7() && retVal;
TestLibrary.TestFramework.LogInformation("[Negative]");
retVal = NegTest1() && retVal;
retVal = NegTest2() && retVal;
retVal = NegTest3() && retVal;
retVal = NegTest4() && retVal;
retVal = NegTest5() && retVal;
return retVal;
}
#region PostiveTest
public bool PosTest1()
{
bool retVal = true;
UInt64 uintA;
TestLibrary.TestFramework.BeginScenario("PosTest1: the string corresponding UInt64 is UInt64 MinValue ");
try
{
string strA = UInt64.MinValue.ToString();
uintA = UInt64.Parse(strA);
if (uintA != 0)
{
TestLibrary.TestFramework.LogError("001", "the ActualResult is not the ExpectResult");
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("002", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
public bool PosTest2()
{
bool retVal = true;
UInt64 uintA;
TestLibrary.TestFramework.BeginScenario("PosTest2: the string corresponding UInt64 is UInt64 MaxValue ");
try
{
string strA = UInt64.MaxValue.ToString();
uintA = UInt64.Parse(strA);
if (uintA != UInt64.MaxValue)
{
TestLibrary.TestFramework.LogError("003", "the ActualResult is not the ExpectResult");
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("004", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
public bool PosTest3()
{
bool retVal = true;
UInt64 uintA;
TestLibrary.TestFramework.BeginScenario("PosTest3: the string corresponding UInt64 is normal UInt64 ");
try
{
UInt64 uintTest = (UInt64)this.GetInt64(0, UInt64.MaxValue);
string strA = uintTest.ToString();
uintA = UInt64.Parse(strA);
if (uintA != uintTest)
{
TestLibrary.TestFramework.LogError("005", "the ActualResult is not the ExpectResult,The UInt64 is:" + uintTest);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("006", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
public bool PosTest4()
{
bool retVal = true;
UInt64 uintA;
TestLibrary.TestFramework.BeginScenario("PosTest4: the string format is [ws][sign]digits[ws] 1");
try
{
UInt64 uintTest = (UInt64)this.GetInt64(0, UInt64.MaxValue);
string strA = "+" + uintTest.ToString();
uintA = UInt64.Parse(strA);
if (uintA != uintTest)
{
TestLibrary.TestFramework.LogError("007", "the ActualResult is not the ExpectResult,UInt64 is:" + uintTest);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("008", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
public bool PosTest5()
{
bool retVal = true;
UInt64 uintA;
TestLibrary.TestFramework.BeginScenario("PosTest5: the string format is [ws][sign]digits[ws] 2");
try
{
UInt64 uintTest = (UInt64)this.GetInt64(0, UInt64.MaxValue);
string strA = "\u0020" + uintTest.ToString();
uintA = UInt64.Parse(strA);
if (uintA != uintTest)
{
TestLibrary.TestFramework.LogError("009", "the ActualResult is not the ExpectResult");
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("010", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
public bool PosTest6()
{
bool retVal = true;
UInt64 uintA;
TestLibrary.TestFramework.BeginScenario("PosTest6: the string format is [ws][sign]digits[ws] 3");
try
{
UInt64 uintTest = (UInt64)this.GetInt64(0, UInt64.MaxValue);
string strA = uintTest.ToString() + "\u0020";
uintA = UInt64.Parse(strA);
if (uintA != uintTest)
{
TestLibrary.TestFramework.LogError("011", "the ActualResult is not the ExpectResult");
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("012", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
public bool PosTest7()
{
bool retVal = true;
UInt64 uintA;
TestLibrary.TestFramework.BeginScenario("PosTest7: the string format is [ws][sign]digits[ws] 4");
try
{
UInt64 uintTest = (UInt64)this.GetInt64(0, UInt64.MaxValue);
string strA = "\u0009" + "+" + uintTest.ToString() + "\u0020";
uintA = UInt64.Parse(strA);
if (uintA != uintTest)
{
TestLibrary.TestFramework.LogError("013", "the ActualResult is not the ExpectResult");
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("014", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
#endregion
#region NegativeTest
public bool NegTest1()
{
bool retVal = true;
UInt64 uintA;
TestLibrary.TestFramework.BeginScenario("NegTest1: the parameter string is null");
try
{
string strA = null;
uintA = UInt64.Parse(strA);
retVal = false;
}
catch (ArgumentNullException) { }
catch (Exception e)
{
TestLibrary.TestFramework.LogError("N001", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
public bool NegTest2()
{
bool retVal = true;
UInt64 uintA;
TestLibrary.TestFramework.BeginScenario("NegTest2: the parameter string is not of the correct format 1");
try
{
string strA = "abcd";
uintA = UInt64.Parse(strA);
retVal = false;
}
catch (FormatException) { }
catch (Exception e)
{
TestLibrary.TestFramework.LogError("N002", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
public bool NegTest3()
{
bool retVal = true;
UInt64 uintA;
TestLibrary.TestFramework.BeginScenario("NegTest3: the parameter string is not of the correct format 2");
try
{
string strA = "b12345d";
uintA = UInt64.Parse(strA);
retVal = false;
}
catch (FormatException) { }
catch (Exception e)
{
TestLibrary.TestFramework.LogError("N003", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
public bool NegTest4()
{
bool retVal = true;
UInt64 uintA;
TestLibrary.TestFramework.BeginScenario("NegTest4: the parameter string corresponding number is less than UInt64 minValue");
try
{
Int64 Testint = (-1) * Convert.ToInt64(this.GetInt64(1, Int64.MaxValue));
string strA = Testint.ToString();
uintA = UInt64.Parse(strA);
retVal = false;
}
catch (OverflowException) { }
catch (Exception e)
{
TestLibrary.TestFramework.LogError("N004", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
public bool NegTest5()
{
bool retVal = true;
UInt64 uintA;
TestLibrary.TestFramework.BeginScenario("NegTest5: the parameter string corresponding number is larger than UInt64 maxValue");
try
{
string strA = "18446744073709551616";
uintA = UInt64.Parse(strA);
retVal = false;
}
catch (OverflowException) { }
catch (Exception e)
{
TestLibrary.TestFramework.LogError("N005", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
#endregion
#region ForTestObject
private UInt64 GetInt64(UInt64 minValue, UInt64 maxValue)
{
try
{
if (minValue == maxValue)
{
return minValue;
}
if (minValue < maxValue)
{
return minValue + (UInt64)TestLibrary.Generator.GetInt64(-55) % (maxValue - minValue);
}
}
catch
{
throw;
}
return minValue;
}
#endregion
}
| |
// Copyright 2017 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using UnityEditor;
using UnityEditor.SceneManagement;
using UnityEngine;
using UnityEngine.SceneManagement;
using System;
using System.Collections.Generic;
/// An editor window that provides UI for reverb baking related tasks:
/// 1. Select reverb probes and bake reverb to them.
/// 2. Modify the material mappings.
public class ResonanceAudioReverbBakingWindow : EditorWindow {
private SerializedProperty reverbLayerMask = null;
private SerializedProperty includeNonStaticGameObjects = null;
private SerializedProperty materialMap = null;
private GUIContent materialMapLabel = new GUIContent("Material Map",
"ResonanceAudioMaterialMap asset to use.");
private GUIContent reverbLayerMaskLabel = new GUIContent("Reverb Mask",
"Which layers of game objects are included in reverb computation.");
private GUIContent nonStaticGameObjectLabel = new GUIContent("Include Non-Static Game Objects",
"Should non-static game objects be included in reverb computation?");
private GUIContent visualizeModeLabel = new GUIContent("Visualize Mode",
"Toggle to visualize the material mapping in the Scene View.");
private GUIContent selectReverbProbesLabel = new GUIContent("Select Reverb Probes",
"Reverb probe selections for baking.");
private GUIContent selectAllProbesLabel = new GUIContent("Select All",
"Selects all reverb probes.");
private GUIContent clearAllProbesLabel = new GUIContent("Clear",
"Clears reverb probe selections.");
private GUIContent bakeLabel = new GUIContent("Bake", "Bake reverb to selected reverb probes.");
// Whether to visualize the material mapping.
private bool isInVisualizeMode = false;
// The material mapper instance.
private ResonanceAudioMaterialMapper materialMapper = null;
// The material mapper updater instance.
private ResonanceAudioMaterialMapperUpdater materialMapperUpdater = null;
// The serialized object of the material mapper.
private SerializedObject serializedMaterialMapper = null;
// Whether the scene view needs to be redrawn. True when some things are changed (e.g. material
// mappings changed or objects moved).
private bool redraw = false;
// The set of scene views whose shaders have been updated. This is used to make sure that each
// scene view is at least updated once after OnEnable() (during OnEnable() the scene views might
// not be available yet).
private HashSet <int> updatedSceneViews = null;
// Shader to visualize surface materials.
private Shader surfaceMaterialShader = null;
// This is to accomodate the long |nonStaticGameObjectLabel|.
private const float propertyLabelWidth = 184.0f;
// The scroll position of the reverb probe selection UI.
private Vector2 probeSelectionScrollPosition = Vector2.zero;
// The foldout of the reverb probe selection.
private bool showReverbProbeSelection = true;
// The path to the material mapper asset.
private const string materialMapperAssetPath =
"Assets/ResonanceAudio/Resources/ResonanceAudioMaterialMapper.asset";
[MenuItem("ResonanceAudio/Reverb Baking")]
private static void Initialize() {
ResonanceAudioReverbBakingWindow window =
EditorWindow.GetWindow<ResonanceAudioReverbBakingWindow>();
window.Show();
}
void OnEnable() {
updatedSceneViews = new HashSet<int>();
InitializeColorArrayInShader();
InitializeSurfaceMaterialShader();
LoadOrCreateMaterialMapper();
LoadOrCreateMaterialMapperUpdater();
isInVisualizeMode = false;
EditorSceneManager.sceneOpened += (Scene scene, OpenSceneMode mode) => OnSceneOrModeSwitch();
EditorSceneManager.sceneClosed += (Scene scene) => OnSceneOrModeSwitch();
#if UNITY_2017_2_OR_NEWER
EditorApplication.playModeStateChanged += (PlayModeStateChange state) => OnSceneOrModeSwitch();
#else
EditorApplication.playmodeStateChanged = OnSceneOrModeSwitch;
#endif // UNITY_2017_2_OR_NEWER
SceneView.onSceneGUIDelegate += OnSceneGUI;
}
void OnDisable() {
EditorSceneManager.sceneOpened -= (Scene scene, OpenSceneMode mode) => OnSceneOrModeSwitch();
EditorSceneManager.sceneClosed -= (Scene scene) => OnSceneOrModeSwitch();
#if UNITY_2017_2_OR_NEWER
EditorApplication.playModeStateChanged -= (PlayModeStateChange state) => OnSceneOrModeSwitch();
#else
EditorApplication.playmodeStateChanged = null;
#endif // UNITY_2017_2_OR_NEWER
SceneView.onSceneGUIDelegate -= OnSceneGUI;
// Destroy the material mapper updater if not null.
if (!EditorApplication.isPlaying && materialMapperUpdater != null) {
DestroyImmediate(materialMapperUpdater.gameObject);
}
if (isInVisualizeMode) {
isInVisualizeMode = false;
RefreshMaterialMapper();
UpdateShader();
}
}
/// @cond
void OnGUI() {
serializedMaterialMapper.Update();
EditorGUI.BeginDisabledGroup(EditorApplication.isPlaying);
var savedWidth = EditorGUIUtility.labelWidth;
EditorGUIUtility.labelWidth = propertyLabelWidth;
DrawMaterialMapSelection();
EditorGUILayout.Separator();
EditorGUI.BeginDisabledGroup(materialMap.objectReferenceValue == null);
DrawObjectFiltering();
EditorGUILayout.Separator();
DrawVisualizeModeCheckbox();
EditorGUIUtility.labelWidth = savedWidth;
EditorGUILayout.Separator();
showReverbProbeSelection = EditorGUILayout.Foldout(showReverbProbeSelection,
selectReverbProbesLabel);
if (showReverbProbeSelection) {
++EditorGUI.indentLevel;
DrawProbeSelection();
--EditorGUI.indentLevel;
}
EditorGUILayout.Separator();
DrawBakeButton();
EditorGUI.EndDisabledGroup(); // Disabled if no material map is selected.
EditorGUI.EndDisabledGroup(); // Disabled if in Play mode.
serializedMaterialMapper.ApplyModifiedProperties();
}
/// @endcond
// Loads the material mapper asset; creates one if not found.
private void LoadOrCreateMaterialMapper() {
materialMapper = AssetDatabase.LoadAssetAtPath<ResonanceAudioMaterialMapper>(
materialMapperAssetPath);
if (materialMapper == null) {
materialMapper = ScriptableObject.CreateInstance<ResonanceAudioMaterialMapper>();
AssetDatabase.CreateAsset(materialMapper, materialMapperAssetPath);
AssetDatabase.SaveAssets();
}
serializedMaterialMapper = new UnityEditor.SerializedObject(materialMapper);
reverbLayerMask = serializedMaterialMapper.FindProperty("reverbLayerMask");
includeNonStaticGameObjects =
serializedMaterialMapper.FindProperty("includeNonStaticGameObjects");
materialMap = serializedMaterialMapper.FindProperty("materialMap");
materialMapper.Initialize();
RefreshMaterialMapper();
UpdateShader();
}
// Loads the unique material mapper updater; creates one if not found.
private void LoadOrCreateMaterialMapperUpdater() {
if (EditorApplication.isPlayingOrWillChangePlaymode) {
return;
}
var scene = EditorSceneManager.GetActiveScene();
GameObject[] rootGameObjects = scene.GetRootGameObjects();
for (int i = 0; i < rootGameObjects.Length; ++i) {
var foundUpdater =
rootGameObjects[i].GetComponentInChildren<ResonanceAudioMaterialMapperUpdater>();
if (foundUpdater != null) {
ResetMaterialMapperUpdater(foundUpdater);
return;
}
}
// Create an empty GameObject at the root, which is hidden and not saved, to hold a
// ResonanceAudioMaterialMapperUpdater.
GameObject updaterObject = new GameObject("Holder of mapper updater ID = ");
updaterObject.hideFlags = HideFlags.HideAndDontSave;
var newUpdater = updaterObject.AddComponent<ResonanceAudioMaterialMapperUpdater>();
updaterObject.name += newUpdater.GetInstanceID();
ResetMaterialMapperUpdater(newUpdater);
}
// Resets the |materialMapperUpdater| to |newUpdater| and destroy the old one if necessary.
private void ResetMaterialMapperUpdater(ResonanceAudioMaterialMapperUpdater newUpdater) {
if (newUpdater != materialMapperUpdater) {
if (materialMapperUpdater != null) {
DestroyImmediate(materialMapperUpdater.gameObject);
}
materialMapperUpdater = newUpdater;
}
materialMapperUpdater.RefreshMaterialMapper = RefreshMaterialMapperOnlyInVisualizeMode;
}
// Initializes the surface material colors in a global vector array for shaders.
private void InitializeColorArrayInShader() {
var numSurfaceMaterials =
Enum.GetValues(typeof(ResonanceAudioRoomManager.SurfaceMaterial)).Length;
Vector4[] vectorArray = new Vector4[numSurfaceMaterials];
for (int surfaceMaterialIndex = 0; surfaceMaterialIndex < numSurfaceMaterials;
++surfaceMaterialIndex) {
var color = ResonanceAudioMaterialMap.surfaceMaterialColors[surfaceMaterialIndex];
vectorArray[surfaceMaterialIndex] = new Vector4(color.r, color.g, color.b, 0.5f);
}
Shader.SetGlobalVectorArray("_SurfaceMaterialColors", vectorArray);
}
// Initializes the surface material shader which visualizes surface materials as colors.
private void InitializeSurfaceMaterialShader() {
surfaceMaterialShader = Shader.Find("ResonanceAudio/SurfaceMaterial");
if (surfaceMaterialShader == null) {
Debug.LogError("Surface material shader not found");
return;
}
}
// Refreshes the material mapper's data to reflect external changes (e.g. scene modified,
// material mapping changed).
private void RefreshMaterialMapper() {
if (EditorApplication.isPlaying) {
return;
}
if (materialMap.objectReferenceValue == null) {
return;
}
MeshRenderer[] meshRenderers = null;
List<string>[] guidsForMeshRenderers = null;
GatherMeshRenderersAndGuids(ref meshRenderers, ref guidsForMeshRenderers);
Terrain[] activeTerrains = null;
string[] guidsForTerrains = null;
GatherTerrainsAndGuids(ref activeTerrains, ref guidsForTerrains);
materialMapper.ApplyMaterialMapping(meshRenderers, guidsForMeshRenderers, activeTerrains,
guidsForTerrains, surfaceMaterialShader);
redraw = true;
}
// Refreshes the material mapper's data only in visualize mode.
private void RefreshMaterialMapperOnlyInVisualizeMode() {
if (isInVisualizeMode) {
RefreshMaterialMapper();
}
}
// Gathers the mesh renderes of game objects, and the GUIDs of the Unity Materials of
// each sub-mesh.
private void GatherMeshRenderersAndGuids(ref MeshRenderer[] meshRenderers,
ref List<string>[] guidsForMeshRenderers) {
List<MeshRenderer> meshRenderersList = new List<MeshRenderer>();
List<List<string>> guidsForMeshRenderersList = new List<List<string>>();
// Gather mesh renderers from all scenes.
for (int sceneIndex = 0; sceneIndex < EditorSceneManager.sceneCount; ++sceneIndex) {
Scene scene = EditorSceneManager.GetSceneAt(sceneIndex);
if (!scene.isLoaded) {
continue;
}
// Get the root game objects in this loaded scene.
GameObject[] rootGameObjects = scene.GetRootGameObjects();
for (int rootGameObjectIndex = 0; rootGameObjectIndex < rootGameObjects.Length;
++rootGameObjectIndex) {
var rootGameObject = rootGameObjects[rootGameObjectIndex];
var meshRenderersInChildren = rootGameObject.GetComponentsInChildren<MeshRenderer>();
for (int meshRenderIndex = 0; meshRenderIndex < meshRenderersInChildren.Length;
++meshRenderIndex) {
var meshRenderer = meshRenderersInChildren[meshRenderIndex];
meshRenderersList.Add(meshRenderer);
// Each Unity Material of a mesh renderer correspondes to a sub-mesh.
var unityMaterials = meshRenderer.sharedMaterials;
var guidsForMeshRenderer = new List<string>();
for (int subMeshIndex = 0; subMeshIndex < unityMaterials.Length; ++subMeshIndex) {
// Find the GUID that identifies this Unity Material.
var unityMaterial = unityMaterials[subMeshIndex];
string assetPath = AssetDatabase.GetAssetPath(unityMaterial);
guidsForMeshRenderer.Add(AssetDatabase.AssetPathToGUID(assetPath));
}
guidsForMeshRenderersList.Add(guidsForMeshRenderer);
}
}
}
meshRenderers = meshRenderersList.ToArray();
guidsForMeshRenderers = guidsForMeshRenderersList.ToArray();
}
// Gathers the terrains and the GUIDs of the terrain data.
private void GatherTerrainsAndGuids(ref Terrain[] activeTerrains, ref string[] guidsForTerrains) {
List<string> guidsForTerrainsList = new List<string>();
// Gather from |activeTerrains|, the terrains in all loaded scenes.
activeTerrains = Terrain.activeTerrains;
foreach (var terrain in activeTerrains) {
// Finds the GUID that identifies this terrain data.
string assetPath = AssetDatabase.GetAssetPath(terrain.terrainData);
guidsForTerrainsList.Add(AssetDatabase.AssetPathToGUID(assetPath));
}
guidsForTerrains = guidsForTerrainsList.ToArray();
}
// Attempts to update the scene views' shader, using the surface material shader stored in
// |materialMapper| if |isInVisualizeMode| is true, and using the default shader otherwise.
// Defers the updating to OnSceneGUI() if the scene views are not ready yet.
private void UpdateShader() {
var sceneViews = SceneView.sceneViews;
// Defer the updating if the scene views are not ready.
if (sceneViews.Count == 0) {
updatedSceneViews.Clear();
return;
}
// Update all ready scene views.
for (int i = 0; i < sceneViews.Count; ++i) {
UpdateShaderForSceneView((SceneView) sceneViews[i]);
}
}
// Updates the shader of a specific scene view.
private void UpdateShaderForSceneView(SceneView sceneView) {
if (isInVisualizeMode) {
sceneView.SetSceneViewShaderReplace(surfaceMaterialShader, "RenderType");
} else {
sceneView.SetSceneViewShaderReplace(null, null);
}
sceneView.Repaint();
updatedSceneViews.Add(sceneView.GetInstanceID());
}
// The UI for selecting a ResonanceAudioMaterialMap asset to use.
private void DrawMaterialMapSelection() {
EditorGUILayout.PropertyField(materialMap, materialMapLabel);
}
// Draws the objects filtering GUI. Users can decide which layers to include, and whether to
// include non-static objects.
private void DrawObjectFiltering() {
EditorGUILayout.PropertyField(reverbLayerMask, reverbLayerMaskLabel);
EditorGUILayout.Separator();
EditorGUILayout.PropertyField(includeNonStaticGameObjects, nonStaticGameObjectLabel);
}
// Draws the "Visualize Mode" checkbox.
private void DrawVisualizeModeCheckbox() {
if (isInVisualizeMode != EditorGUILayout.Toggle(visualizeModeLabel, isInVisualizeMode)) {
isInVisualizeMode = !isInVisualizeMode;
RefreshMaterialMapper();
UpdateShader();
}
}
// The UI for selecting a subset of reverb probes to bake reverb to.
private void DrawProbeSelection() {
ResonanceAudioReverbProbe[] allReverbProbes =
UnityEngine.Object.FindObjectsOfType<ResonanceAudioReverbProbe>();
// Clean up the deleted reverb probes.
var selectedReverbProbes = ResonanceAudioReverbComputer.selectedReverbProbes;
selectedReverbProbes.RemoveAll(reverbProbe => reverbProbe == null);
probeSelectionScrollPosition = EditorGUILayout.BeginScrollView(probeSelectionScrollPosition,
GUILayout.ExpandHeight(false));
for (int i = 0; i < allReverbProbes.Length; ++i) {
var reverbProbe = allReverbProbes[i];
bool currentlySelected = selectedReverbProbes.Contains(reverbProbe);
if (EditorGUILayout.ToggleLeft(reverbProbe.name, currentlySelected)) {
if (!currentlySelected) {
// Reverb probe selected.
selectedReverbProbes.Add(reverbProbe);
}
} else {
if (currentlySelected) {
// Reverb probe de-selected.
selectedReverbProbes.Remove(reverbProbe);
}
}
}
EditorGUILayout.EndScrollView();
if (allReverbProbes.Length > 0) {
EditorGUILayout.Separator();
EditorGUILayout.BeginHorizontal();
GUILayout.Space(15 * EditorGUI.indentLevel);
if (GUILayout.Button(selectAllProbesLabel)) {
for (int i = 0; i < allReverbProbes.Length; ++i) {
if (!selectedReverbProbes.Contains(allReverbProbes[i])) {
selectedReverbProbes.Add(allReverbProbes[i]);
}
}
}
if (GUILayout.Button(clearAllProbesLabel)) {
selectedReverbProbes.Clear();
}
EditorGUILayout.EndHorizontal();
} else {
EditorGUILayout.HelpBox("No ResonanceAudioReverbProbe exists in the scene.",
MessageType.Warning);
}
}
// The UI to compute reverb and bake the results to the selected probes.
private void DrawBakeButton() {
// Only enable the "Bake" button when at least one reverb probe is selected and the scene
// is loaded.
var scene = EditorSceneManager.GetActiveScene();
EditorGUI.BeginDisabledGroup(ResonanceAudioReverbComputer.selectedReverbProbes.Count == 0 ||
!scene.isLoaded);
EditorGUILayout.BeginHorizontal();
GUILayout.Space(15 * EditorGUI.indentLevel);
if (GUILayout.Button(bakeLabel)) {
// We allow only one material mapper in the scene. Find the unique one and ask for acoustic
// meshes that should be included in the reverb computation.
if (materialMapper != null) {
// Compute the reverb for the selected reverb probes using the included acoustic meshes.
RefreshMaterialMapper();
ResonanceAudioReverbComputer.ComputeReverb(materialMapper.GetIncludedAcousticMeshes());
}
}
EditorGUILayout.EndHorizontal();
EditorGUI.EndDisabledGroup();
}
private void OnSceneGUI(SceneView sceneView) {
// Deferred update of the scene view if it is not updated yet.
if (!updatedSceneViews.Contains(sceneView.GetInstanceID())) {
UpdateShaderForSceneView(sceneView);
}
if (isInVisualizeMode && redraw) {
materialMapper.RenderAcousticMeshes();
redraw = false;
}
}
private void OnSceneOrModeSwitch() {
LoadOrCreateMaterialMapperUpdater();
// Force repaint this window to reflect the scene changes, which may have a different set of
// Unity Materials and Terrain data.
Repaint();
}
}
| |
using System;
using System.Globalization;
using System.Linq;
using System.Text;
using GroupDocs.Viewer.Domain;
using GroupDocs.Viewer.Domain.Containers;
namespace MvcSample.Helpers
{
/// <summary>
/// Class FileDataJsonSerializer.
/// </summary>
public class DocumentInfoJsonSerializer
{
/// <summary>
/// The document info
/// </summary>
private readonly DocumentInfoContainer _documentInfo;
/// <summary>
/// The _options
/// </summary>
private readonly SerializationOptions _options;
/// <summary>
/// The _default culture
/// </summary>
private readonly CultureInfo _defaultCulture = CultureInfo.InvariantCulture;
/// <summary>
/// Two decimals places format
/// </summary>
private const string TwoDecimalPlacesFormat = "0.##";
/// <summary>
/// Initializes a new instance of the <see cref="DocumentInfoContainer"/> class.
/// </summary>
/// <param name="documentInfo">The document info.</param>
/// <param name="options">The options.</param>
public DocumentInfoJsonSerializer(DocumentInfoContainer documentInfo, SerializationOptions options)
{
_documentInfo = documentInfo;
_options = options;
}
/// <summary>
/// Serializes this instance.
/// </summary>
/// <returns>System.String.</returns>
public string Serialize()
{
var isCellsFileData = _documentInfo.Pages.Any(_ => !string.IsNullOrEmpty(_.Name));
if (isCellsFileData && _options.IsHtmlMode)
return SerializeCells();
return SerializeDefault();
}
/// <summary>
/// Serializes the default.
/// </summary>
/// <returns>System.String.</returns>
private string SerializeDefault()
{
StringBuilder json = new StringBuilder();
var maxWidth = 0;
var maxHeight = 0;
foreach (var pageData in _documentInfo.Pages)
{
if (pageData.Height > maxHeight)
{
maxHeight = pageData.Height;
maxWidth = pageData.Width;
}
}
json.Append("{\"pages\":[");
int pageCount = _documentInfo.Pages.Count;
for (int i = 0; i < pageCount; i++)
{
PageData pageData = _documentInfo.Pages[i];
bool needSeparator = i > 0;
if (needSeparator)
json.Append(",");
AppendPage(pageData, json);
bool includeRows = _options.UsePdf && pageData.Rows.Count > 0;
if (includeRows)
{
json.Append(",\"rows\":[");
for (int j = 0; j < pageData.Rows.Count; j++)
{
bool appendRowSeaparator = j != 0;
if (appendRowSeaparator)
json.Append(",");
AppendRow(pageData.Rows[j], json);
}
json.Append("]"); // rows
}
json.Append("}"); // page
}
json.Append("]"); // pages
json.Append(string.Format(",\"maxPageHeight\":{0},\"widthForMaxHeight\":{1}",
maxHeight, maxWidth));
json.Append("}"); // document
return json.ToString();
}
/// <summary>
/// Serializes cells.
/// </summary>
/// <returns>System.String.</returns>
private string SerializeCells()
{
StringBuilder json = new StringBuilder();
json.Append("{\"sheets\":[");
int pageCount = _documentInfo.Pages.Count;
for (int i = 0; i < pageCount; i++)
{
PageData pageData = _documentInfo.Pages[i];
bool needSeparator = i > 0;
if (needSeparator)
json.Append(",");
json.Append(string.Format("{{\"name\":\"{0}\"}}", pageData.Name));
}
json.Append("]"); // pages
json.Append("}"); // document
return json.ToString();
}
/// <summary>
/// Appends the page.
/// </summary>
/// <param name="pageData">The page data.</param>
/// <param name="json">The json.</param>
private void AppendPage(PageData pageData, StringBuilder json)
{
if (pageData.Angle == 0)
{
json.Append(string.Format("{{\"w\":{0},\"h\":{1},\"number\":{2}",
pageData.Width.ToString(_defaultCulture),
pageData.Height.ToString(_defaultCulture),
(pageData.Number).ToString(_defaultCulture)));
}
else
{
json.Append(string.Format("{{\"w\":{0},\"h\":{1},\"number\":{2},\"rotation\":{3}",
pageData.Width.ToString(_defaultCulture),
pageData.Height.ToString(_defaultCulture),
(pageData.Number).ToString(_defaultCulture),
pageData.Angle));
}
}
/// <summary>
/// Appends the row.
/// </summary>
/// <param name="rowData">The row data.</param>
/// <param name="json">The json.</param>
private void AppendRow(RowData rowData, StringBuilder json)
{
string[] textCoordinates = new string[rowData.TextCoordinates.Count];
for (int i = 0; i < rowData.TextCoordinates.Count; i++)
textCoordinates[i] = rowData.TextCoordinates[i].ToString(TwoDecimalPlacesFormat, _defaultCulture);
string[] characterCoordinates = new string[rowData.CharacterCoordinates.Count];
for (int i = 0; i < rowData.CharacterCoordinates.Count; i++)
characterCoordinates[i] = rowData.CharacterCoordinates[i].ToString(TwoDecimalPlacesFormat, _defaultCulture);
json.Append(String.Format("{{\"l\":{0},\"t\":{1},\"w\":{2},\"h\":{3},\"c\":[{4}],\"s\":\"{5}\",\"ch\":[{6}]}}",
rowData.LineLeft.ToString(TwoDecimalPlacesFormat, _defaultCulture),
rowData.LineTop.ToString(TwoDecimalPlacesFormat, _defaultCulture),
rowData.LineWidth.ToString(TwoDecimalPlacesFormat, _defaultCulture),
rowData.LineHeight.ToString(TwoDecimalPlacesFormat, _defaultCulture),
string.Join(",", textCoordinates),
JsonEncode(rowData.Text),
string.Join(",", characterCoordinates)));
}
/// <summary>
/// Jsons the encode.
/// </summary>
/// <param name="text">The text.</param>
/// <returns>System.String.</returns>
private string JsonEncode(string text)
{
if (string.IsNullOrEmpty(text))
return string.Empty;
int i;
int length = text.Length;
StringBuilder stringBuilder = new StringBuilder(length + 4);
for (i = 0; i < length; i += 1)
{
char c = text[i];
switch (c)
{
case '\\':
case '"':
case '/':
stringBuilder.Append('\\');
stringBuilder.Append(c);
break;
case '\b':
stringBuilder.Append("\\b");
break;
case '\t':
stringBuilder.Append("\\t");
break;
case '\n':
stringBuilder.Append("\\n");
break;
case '\f':
stringBuilder.Append("\\f");
break;
case '\r':
stringBuilder.Append("\\r");
break;
default:
if (c < ' ')
{
string t = "000" + Convert.ToByte(c).ToString("X");
stringBuilder.Append("\\u" + t.Substring(t.Length - 4));
}
else
{
stringBuilder.Append(c);
}
break;
}
}
return stringBuilder.ToString();
}
}
public class SerializationOptions
{
public bool UsePdf { get; set; }
public bool IsHtmlMode { get; set; }
public bool SupportListOfBookmarks { get; set; }
public bool SupportListOfContentControls { get; set; }
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows.Forms;
using System.Drawing;
using System.Drawing.Imaging;
using AForge.Video;
using AForge.Video.DirectShow;
using MatterHackers.Agg;
using MatterHackers.Agg.Image;
namespace AForge
{
public interface ICamera
{
void TakeSnapShot();
ImageBuffer CurrentImage { get; }
int Exposure0To511 { get; set; }
int RedBalance0To255 { get; set; }
int GreenBalance0To255 { get; set; }
int BlueBalance0To255 { get; set; }
bool IsNewImageReady();
void CloseCurrentVideoSource();
void OpenSettings();
}
public class AForgeCamera : ICamera
{
public enum DownSample { None, HalfSize };
VideoCaptureDevice videoCaptureDevice;
bool newImageReady = false;
ImageBuffer asyncCopiedVideoImage = new ImageBuffer();
ImageBuffer imageForExternalUse = new ImageBuffer();
DownSample downSampleVideo = DownSample.None;
bool flipY = true;
public AForgeCamera(string preferedCameraName = null, int preferedWidth = 640, int preferedHeight = 480, DownSample downSampleVideo = DownSample.None)
{
this.downSampleVideo = downSampleVideo;
if (preferedCameraName != null)
{
FilterInfoCollection videoDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice);
foreach (FilterInfo info in videoDevices)
{
if (info.Name.Contains(preferedCameraName))
{
videoCaptureDevice = new VideoCaptureDevice(info.MonikerString);
videoCaptureDevice.DesiredFrameSize = new Size(preferedWidth, preferedHeight);
break;
}
}
}
if (videoCaptureDevice == null)
{
VideoCaptureDeviceForm form = new VideoCaptureDeviceForm();
if (form.ShowDialog(null) == DialogResult.OK)
{
// create video source
videoCaptureDevice = form.VideoDevice;
}
}
if (videoCaptureDevice != null)
{
//videoCaptureDevice.DesiredFrameRate = 5;
//videoCaptureDevice.ProvideSnapshots = true;
//videoCaptureDevice.DesiredSnapshotSize = new Size(preferedWidth, preferedHeight);
//videoCaptureDevice.SnapshotFrame += new NewFrameEventHandler(videoCaptureDevice_SnapshotFrame);
asyncCopiedVideoImage = new ImageBuffer(videoCaptureDevice.DesiredFrameSize.Width, videoCaptureDevice.DesiredFrameSize.Height, 32, new BlenderBGRA());
if (downSampleVideo == DownSample.HalfSize)
{
imageForExternalUse = new ImageBuffer(videoCaptureDevice.DesiredFrameSize.Width / 2, videoCaptureDevice.DesiredFrameSize.Height / 2, 32, new BlenderBGRA());
}
else
{
imageForExternalUse = new ImageBuffer(videoCaptureDevice.DesiredFrameSize.Width, videoCaptureDevice.DesiredFrameSize.Height, 32, new BlenderBGRA());
}
videoCaptureDevice.Start();
videoCaptureDevice.NewFrame += new NewFrameEventHandler(source_NewFrame);
}
}
public ImageBuffer CurrentImage
{
get
{
return imageForExternalUse;
}
}
public int Exposure0To511 { get; set; }
public int RedBalance0To255 { get; set; }
public int GreenBalance0To255 { get; set; }
public int BlueBalance0To255 { get; set; }
public void OpenSettings()
{
videoCaptureDevice.DisplayPropertyPage(IntPtr.Zero);
}
bool currentlyUsingCameraImage = false;
public bool IsNewImageReady()
{
if (newImageReady)
{
if (!currentlyUsingCameraImage)
{
currentlyUsingCameraImage = true;
lock (asyncCopiedVideoImage)
{
if (downSampleVideo == DownSample.HalfSize)
{
imageForExternalUse.NewGraphics2D().Render(asyncCopiedVideoImage, 0, 0, 0, .5, .5);
}
else
{
imageForExternalUse.NewGraphics2D().Render(asyncCopiedVideoImage, 0, 0);
}
}
imageForExternalUse.MarkImageChanged();
newImageReady = false;
currentlyUsingCameraImage = false;
return true;
}
}
return false;
}
void source_NewFrame(object sender, NewFrameEventArgs eventArgs)
{
if (!currentlyUsingCameraImage)
{
currentlyUsingCameraImage = true;
Bitmap bitmap = eventArgs.Frame;
if (bitmap.Width != asyncCopiedVideoImage.Width || bitmap.Height != asyncCopiedVideoImage.Height)
{
asyncCopiedVideoImage = new ImageBuffer(bitmap.Width, bitmap.Height, 32, new BlenderBGRA());
}
UpdateImageBuffer(asyncCopiedVideoImage, bitmap);
newImageReady = true;
currentlyUsingCameraImage = false;
}
}
public void TakeSnapShot()
{
videoCaptureDevice.Stop();
videoCaptureDevice.ProvideSnapshots = true;
videoCaptureDevice.SimulateTrigger();
videoCaptureDevice.Start();
}
void videoCaptureDevice_SnapshotFrame(object sender, NewFrameEventArgs eventArgs)
{
Bitmap bitmap = eventArgs.Frame;
bitmap.Save("snapshot.png");
}
// Close video source if it is running
public void CloseCurrentVideoSource()
{
if (videoCaptureDevice != null)
{
videoCaptureDevice.SignalToStop();
// wait ~ 3 seconds
for (int i = 0; i < 30; i++)
{
if (!videoCaptureDevice.IsRunning)
{
break;
}
System.Threading.Thread.Sleep(100);
}
if (videoCaptureDevice.IsRunning)
{
videoCaptureDevice.Stop();
}
videoCaptureDevice.Stop();
videoCaptureDevice = null;
}
}
internal void UpdateImageBuffer(ImageBuffer destImageBuffer, Bitmap sourceBitmap)
{
BitmapData bitmapData = null;
bool isLocked = false;
if (destImageBuffer != null)
{
if (!isLocked)
{
bitmapData = sourceBitmap.LockBits(new Rectangle(0, 0, sourceBitmap.Width, sourceBitmap.Height), System.Drawing.Imaging.ImageLockMode.ReadWrite, sourceBitmap.PixelFormat);
}
int destBufferStrideInBytes = destImageBuffer.StrideInBytes();
int destBufferHeight = destImageBuffer.Height;
int destBufferWidth = destImageBuffer.Width;
int destBufferHeightMinusOne = destBufferHeight - 1;
int bitmapDataStride = bitmapData.Stride;
int offset;
byte[] buffer = destImageBuffer.GetBuffer(out offset);
if (flipY)
{
unsafe
{
byte* bitmapDataScan0 = (byte*)bitmapData.Scan0;
fixed (byte* pDestFixed = &buffer[offset])
{
byte* pSource = bitmapDataScan0;
for (int y = 0; y < destBufferHeight; y++)
{
byte* pDest = pDestFixed + destBufferStrideInBytes * (destBufferHeight - 1 - y);
for (int x = 0; x < destBufferWidth; x++)
{
pDest[x * 4 + 0] = pSource[x * 3 + 0];
pDest[x * 4 + 1] = pSource[x * 3 + 1];
pDest[x * 4 + 2] = pSource[x * 3 + 2];
pDest[x * 4 + 3] = 255;
}
pSource += bitmapDataStride;
}
}
}
}
else
{
unsafe
{
byte* bitmapDataScan0 = (byte*)bitmapData.Scan0;
fixed (byte* pDestFixed = &buffer[offset])
{
byte* pSource = bitmapDataScan0;
for (int y = 0; y < destBufferHeight; y++)
{
byte* pDest = pDestFixed + destBufferStrideInBytes * (y);
for (int x = 0; x < destBufferWidth; x++)
{
pDest[x * 4 + 0] = pSource[x * 3 + 0];
pDest[x * 4 + 1] = pSource[x * 3 + 1];
pDest[x * 4 + 2] = pSource[x * 3 + 2];
pDest[x * 4 + 3] = 255;
}
pSource += bitmapDataStride;
}
}
}
}
if (!isLocked)
{
sourceBitmap.UnlockBits(bitmapData);
}
}
}
}
}
| |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="InflateBlocks.cs" company="XamlNinja">
// 2011 Richard Griffin and Ollie Riches
// </copyright>
// <summary>
// http://www.sharpgis.net/post/2011/08/28/GZIP-Compressed-Web-Requests-in-WP7-Take-2.aspx
// </summary>
// --------------------------------------------------------------------------------------------------------------------
namespace WP7Contrib.Communications.Compression
{
using System;
internal sealed class InflateBlocks
{
static readonly int[] inflate_mask = new int[17]
{
0,
1,
3,
7,
15,
31,
63,
(int) sbyte.MaxValue,
(int) byte.MaxValue,
511,
1023,
2047,
4095,
8191,
16383,
(int) short.MaxValue,
(int) ushort.MaxValue
};
internal static readonly int[] border = new int[19]
{
16,
17,
18,
0,
8,
7,
9,
6,
10,
5,
11,
4,
12,
3,
13,
2,
14,
1,
15
};
const int MANY = 1440;
const int TYPE = 0;
const int LENS = 1;
const int STORED = 2;
const int TABLE = 3;
const int BTREE = 4;
const int DTREE = 5;
const int CODES = 6;
const int DRY = 7;
const int DONE = 8;
const int BAD = 9;
internal int[] Bb = new int[1];
internal int[] Tb = new int[1];
internal InflateCodes Codes = new InflateCodes();
internal InfTree Inftree = new InfTree();
internal int Mode;
internal int Left;
internal int Table;
internal int Index;
internal int[] Blens;
internal int Last;
internal ZlibCodec Codec;
internal int Bitk;
internal int Bitb;
internal int[] Hufts;
internal byte[] Window;
internal int End;
internal int Read;
internal int Write;
internal object Checkfn;
internal long Check;
#region Constructors
static InflateBlocks()
{
}
internal InflateBlocks(ZlibCodec codec, object Checkfn, int w)
{
this.Codec = codec;
this.Hufts = new int[4320];
this.Window = new byte[w];
this.End = w;
this.Checkfn = Checkfn;
this.Mode = 0;
this.Reset((long[])null);
}
#endregion Constructors
internal void Reset(long[] c)
{
if (c != null)
c[0] = this.Check;
if (this.Mode != 4 && this.Mode != 5)
{}
if (this.Mode != 6)
{}
this.Mode = 0;
this.Bitk = 0;
this.Bitb = 0;
this.Read = this.Write = 0;
if (this.Checkfn == null)
return;
this.Codec.Adler32 = this.Check = Adler.Adler32(0L, (byte[])null, 0, 0);
}
internal int Process(int r)
{
int sourceIndex = this.Codec.NextIn;
int num1 = this.Codec.AvailableBytesIn;
int number1 = this.Bitb;
int num2 = this.Bitk;
int destinationIndex = this.Write;
int num3 = destinationIndex < this.Read ? this.Read - destinationIndex - 1 : this.End - destinationIndex;
int num4;
int num5;
while (true)
{
switch (this.Mode)
{
case 0:
while (num2 < 3)
{
if (num1 != 0)
{
r = 0;
--num1;
number1 |= ((int)this.Codec.InputBuffer[sourceIndex++] & (int)byte.MaxValue) << num2;
num2 += 8;
}
else
{
this.Bitb = number1;
this.Bitk = num2;
this.Codec.AvailableBytesIn = num1;
this.Codec.TotalBytesIn += (long)(sourceIndex - this.Codec.NextIn);
this.Codec.NextIn = sourceIndex;
this.Write = destinationIndex;
return this.Flush(r);
}
}
int number2 = number1 & 7;
this.Last = number2 & 1;
switch (SharedUtils.URShift(number2, 1))
{
case 0:
int number3 = SharedUtils.URShift(number1, 3);
int num6 = num2 - 3;
int bits1 = num6 & 7;
number1 = SharedUtils.URShift(number3, bits1);
num2 = num6 - bits1;
this.Mode = 1;
break;
case 1:
int[] bl1 = new int[1];
int[] bd1 = new int[1];
int[][] tl1 = new int[1][];
int[][] td1 = new int[1][];
InfTree.inflate_trees_fixed(bl1, bd1, tl1, td1, this.Codec);
this.Codes.Init(bl1[0], bd1[0], tl1[0], 0, td1[0], 0);
number1 = SharedUtils.URShift(number1, 3);
num2 -= 3;
this.Mode = 6;
break;
case 2:
number1 = SharedUtils.URShift(number1, 3);
num2 -= 3;
this.Mode = 3;
break;
case 3:
goto label_9;
}
break;
case 1:
while (num2 < 32)
{
if (num1 != 0)
{
r = 0;
--num1;
number1 |= ((int)this.Codec.InputBuffer[sourceIndex++] & (int)byte.MaxValue) << num2;
num2 += 8;
}
else
{
this.Bitb = number1;
this.Bitk = num2;
this.Codec.AvailableBytesIn = num1;
this.Codec.TotalBytesIn += (long)(sourceIndex - this.Codec.NextIn);
this.Codec.NextIn = sourceIndex;
this.Write = destinationIndex;
return this.Flush(r);
}
}
if ((SharedUtils.URShift(~number1, 16) & (int)ushort.MaxValue) == (number1 & (int)ushort.MaxValue))
{
this.Left = number1 & (int)ushort.MaxValue;
number1 = num2 = 0;
this.Mode = this.Left != 0 ? 2 : (this.Last != 0 ? 7 : 0);
break;
}
else
goto label_15;
case 2:
if (num1 != 0)
{
if (num3 == 0)
{
if (destinationIndex == this.End && this.Read != 0)
{
destinationIndex = 0;
num3 = destinationIndex < this.Read ? this.Read - destinationIndex - 1 : this.End - destinationIndex;
}
if (num3 == 0)
{
this.Write = destinationIndex;
r = this.Flush(r);
destinationIndex = this.Write;
num3 = destinationIndex < this.Read ? this.Read - destinationIndex - 1 : this.End - destinationIndex;
if (destinationIndex == this.End && this.Read != 0)
{
destinationIndex = 0;
num3 = destinationIndex < this.Read ? this.Read - destinationIndex - 1 : this.End - destinationIndex;
}
if (num3 == 0)
goto label_26;
}
}
r = 0;
int length = this.Left;
if (length > num1)
length = num1;
if (length > num3)
length = num3;
Array.Copy((Array)this.Codec.InputBuffer, sourceIndex, (Array)this.Window, destinationIndex, length);
sourceIndex += length;
num1 -= length;
destinationIndex += length;
num3 -= length;
if ((this.Left -= length) == 0)
{
this.Mode = this.Last != 0 ? 7 : 0;
break;
}
else
break;
}
else
goto label_18;
case 3:
while (num2 < 14)
{
if (num1 != 0)
{
r = 0;
--num1;
number1 |= ((int)this.Codec.InputBuffer[sourceIndex++] & (int)byte.MaxValue) << num2;
num2 += 8;
}
else
{
this.Bitb = number1;
this.Bitk = num2;
this.Codec.AvailableBytesIn = num1;
this.Codec.TotalBytesIn += (long)(sourceIndex - this.Codec.NextIn);
this.Codec.NextIn = sourceIndex;
this.Write = destinationIndex;
return this.Flush(r);
}
}
int num7;
this.Table = num7 = number1 & 16383;
if ((num7 & 31) <= 29 && (num7 >> 5 & 31) <= 29)
{
int length = 258 + (num7 & 31) + (num7 >> 5 & 31);
if (this.Blens == null || this.Blens.Length < length)
{
this.Blens = new int[length];
}
else
{
for (int Index = 0; Index < length; ++Index)
this.Blens[Index] = 0;
}
number1 = SharedUtils.URShift(number1, 14);
num2 -= 14;
this.Index = 0;
this.Mode = 4;
goto case 4;
}
else
goto label_39;
case 4:
while (this.Index < 4 + SharedUtils.URShift(this.Table, 10))
{
while (num2 < 3)
{
if (num1 != 0)
{
r = 0;
--num1;
number1 |= ((int)this.Codec.InputBuffer[sourceIndex++] & (int)byte.MaxValue) << num2;
num2 += 8;
}
else
{
this.Bitb = number1;
this.Bitk = num2;
this.Codec.AvailableBytesIn = num1;
this.Codec.TotalBytesIn += (long)(sourceIndex - this.Codec.NextIn);
this.Codec.NextIn = sourceIndex;
this.Write = destinationIndex;
return this.Flush(r);
}
}
this.Blens[InflateBlocks.border[this.Index++]] = number1 & 7;
number1 = SharedUtils.URShift(number1, 3);
num2 -= 3;
}
while (this.Index < 19)
this.Blens[InflateBlocks.border[this.Index++]] = 0;
this.Bb[0] = 7;
num4 = this.Inftree.inflate_trees_bits(this.Blens, this.Bb, this.Tb, this.Hufts, this.Codec);
if (num4 == 0)
{
this.Index = 0;
this.Mode = 5;
goto case 5;
}
else
goto label_55;
case 5:
while (true)
{
int num8 = this.Table;
if (this.Index < 258 + (num8 & 31) + (num8 >> 5 & 31))
{
int Index = this.Bb[0];
while (num2 < Index)
{
if (num1 != 0)
{
r = 0;
--num1;
number1 |= ((int)this.Codec.InputBuffer[sourceIndex++] & (int)byte.MaxValue) << num2;
num2 += 8;
}
else
{
this.Bitb = number1;
this.Bitk = num2;
this.Codec.AvailableBytesIn = num1;
this.Codec.TotalBytesIn += (long)(sourceIndex - this.Codec.NextIn);
this.Codec.NextIn = sourceIndex;
this.Write = destinationIndex;
return this.Flush(r);
}
}
if (this.Tb[0] != -1)
{}
int bits2 = this.Hufts[(this.Tb[0] + (number1 & InflateBlocks.inflate_mask[Index])) * 3 + 1];
int num9 = this.Hufts[(this.Tb[0] + (number1 & InflateBlocks.inflate_mask[bits2])) * 3 + 2];
if (num9 < 16)
{
number1 = SharedUtils.URShift(number1, bits2);
num2 -= bits2;
this.Blens[this.Index++] = num9;
}
else
{
int bits3 = num9 == 18 ? 7 : num9 - 14;
int num10 = num9 == 18 ? 11 : 3;
while (num2 < bits2 + bits3)
{
if (num1 != 0)
{
r = 0;
--num1;
number1 |= ((int)this.Codec.InputBuffer[sourceIndex++] & (int)byte.MaxValue) << num2;
num2 += 8;
}
else
{
this.Bitb = number1;
this.Bitk = num2;
this.Codec.AvailableBytesIn = num1;
this.Codec.TotalBytesIn += (long)(sourceIndex - this.Codec.NextIn);
this.Codec.NextIn = sourceIndex;
this.Write = destinationIndex;
return this.Flush(r);
}
}
int number4 = SharedUtils.URShift(number1, bits2);
int num11 = num2 - bits2;
int num12 = num10 + (number4 & InflateBlocks.inflate_mask[bits3]);
number1 = SharedUtils.URShift(number4, bits3);
num2 = num11 - bits3;
int num13 = this.Index;
int num14 = this.Table;
if (num13 + num12 <= 258 + (num14 & 31) + (num14 >> 5 & 31) && (num9 != 16 || num13 >= 1))
{
int num15 = num9 == 16 ? this.Blens[num13 - 1] : 0;
do
{
this.Blens[num13++] = num15;
}
while (--num12 != 0);
this.Index = num13;
}
else
goto label_73;
}
}
else
break;
}
this.Tb[0] = -1;
int[] bl2 = new int[1]
{
9
};
int[] bd2 = new int[1]
{
6
};
int[] tl2 = new int[1];
int[] td2 = new int[1];
int num16 = this.Table;
num5 = this.Inftree.inflate_trees_dynamic(257 + (num16 & 31), 1 + (num16 >> 5 & 31), this.Blens, bl2, bd2, tl2, td2, this.Hufts, this.Codec);
switch (num5)
{
case 0:
this.Codes.Init(bl2[0], bd2[0], this.Hufts, tl2[0], this.Hufts, td2[0]);
this.Mode = 6;
goto label_83;
case -3:
goto label_80;
default:
goto label_81;
}
case 6:
label_83:
this.Bitb = number1;
this.Bitk = num2;
this.Codec.AvailableBytesIn = num1;
this.Codec.TotalBytesIn += (long)(sourceIndex - this.Codec.NextIn);
this.Codec.NextIn = sourceIndex;
this.Write = destinationIndex;
if ((r = this.Codes.Process(this, r)) == 1)
{
r = 0;
sourceIndex = this.Codec.NextIn;
num1 = this.Codec.AvailableBytesIn;
number1 = this.Bitb;
num2 = this.Bitk;
destinationIndex = this.Write;
num3 = destinationIndex < this.Read ? this.Read - destinationIndex - 1 : this.End - destinationIndex;
if (this.Last == 0)
{
this.Mode = 0;
break;
}
else
goto label_87;
}
else
goto label_84;
case 7:
goto label_88;
case 8:
goto label_91;
case 9:
goto label_92;
default:
goto label_93;
}
}
label_9:
int num17 = SharedUtils.URShift(number1, 3);
int num18 = num2 - 3;
this.Mode = 9;
this.Codec.Message = "invalid block type";
r = -3;
this.Bitb = num17;
this.Bitk = num18;
this.Codec.AvailableBytesIn = num1;
this.Codec.TotalBytesIn += (long)(sourceIndex - this.Codec.NextIn);
this.Codec.NextIn = sourceIndex;
this.Write = destinationIndex;
return this.Flush(r);
label_15:
this.Mode = 9;
this.Codec.Message = "invalid stored block lengths";
r = -3;
this.Bitb = number1;
this.Bitk = num2;
this.Codec.AvailableBytesIn = num1;
this.Codec.TotalBytesIn += (long)(sourceIndex - this.Codec.NextIn);
this.Codec.NextIn = sourceIndex;
this.Write = destinationIndex;
return this.Flush(r);
label_18:
this.Bitb = number1;
this.Bitk = num2;
this.Codec.AvailableBytesIn = num1;
this.Codec.TotalBytesIn += (long)(sourceIndex - this.Codec.NextIn);
this.Codec.NextIn = sourceIndex;
this.Write = destinationIndex;
return this.Flush(r);
label_26:
this.Bitb = number1;
this.Bitk = num2;
this.Codec.AvailableBytesIn = num1;
this.Codec.TotalBytesIn += (long)(sourceIndex - this.Codec.NextIn);
this.Codec.NextIn = sourceIndex;
this.Write = destinationIndex;
return this.Flush(r);
label_39:
this.Mode = 9;
this.Codec.Message = "too many length or distance symbols";
r = -3;
this.Bitb = number1;
this.Bitk = num2;
this.Codec.AvailableBytesIn = num1;
this.Codec.TotalBytesIn += (long)(sourceIndex - this.Codec.NextIn);
this.Codec.NextIn = sourceIndex;
this.Write = destinationIndex;
return this.Flush(r);
label_55:
r = num4;
if (r == -3)
{
this.Blens = (int[])null;
this.Mode = 9;
}
this.Bitb = number1;
this.Bitk = num2;
this.Codec.AvailableBytesIn = num1;
this.Codec.TotalBytesIn += (long)(sourceIndex - this.Codec.NextIn);
this.Codec.NextIn = sourceIndex;
this.Write = destinationIndex;
return this.Flush(r);
label_73:
this.Blens = (int[])null;
this.Mode = 9;
this.Codec.Message = "invalid bit length repeat";
r = -3;
this.Bitb = number1;
this.Bitk = num2;
this.Codec.AvailableBytesIn = num1;
this.Codec.TotalBytesIn += (long)(sourceIndex - this.Codec.NextIn);
this.Codec.NextIn = sourceIndex;
this.Write = destinationIndex;
return this.Flush(r);
label_80:
this.Blens = (int[])null;
this.Mode = 9;
label_81:
r = num5;
this.Bitb = number1;
this.Bitk = num2;
this.Codec.AvailableBytesIn = num1;
this.Codec.TotalBytesIn += (long)(sourceIndex - this.Codec.NextIn);
this.Codec.NextIn = sourceIndex;
this.Write = destinationIndex;
return this.Flush(r);
label_84:
return this.Flush(r);
label_87:
this.Mode = 7;
label_88:
this.Write = destinationIndex;
r = this.Flush(r);
destinationIndex = this.Write;
int num19 = destinationIndex < this.Read ? this.Read - destinationIndex - 1 : this.End - destinationIndex;
if (this.Read != this.Write)
{
this.Bitb = number1;
this.Bitk = num2;
this.Codec.AvailableBytesIn = num1;
this.Codec.TotalBytesIn += (long)(sourceIndex - this.Codec.NextIn);
this.Codec.NextIn = sourceIndex;
this.Write = destinationIndex;
return this.Flush(r);
}
else
this.Mode = 8;
label_91:
r = 1;
this.Bitb = number1;
this.Bitk = num2;
this.Codec.AvailableBytesIn = num1;
this.Codec.TotalBytesIn += (long)(sourceIndex - this.Codec.NextIn);
this.Codec.NextIn = sourceIndex;
this.Write = destinationIndex;
return this.Flush(r);
label_92:
r = -3;
this.Bitb = number1;
this.Bitk = num2;
this.Codec.AvailableBytesIn = num1;
this.Codec.TotalBytesIn += (long)(sourceIndex - this.Codec.NextIn);
this.Codec.NextIn = sourceIndex;
this.Write = destinationIndex;
return this.Flush(r);
label_93:
r = -2;
this.Bitb = number1;
this.Bitk = num2;
this.Codec.AvailableBytesIn = num1;
this.Codec.TotalBytesIn += (long)(sourceIndex - this.Codec.NextIn);
this.Codec.NextIn = sourceIndex;
this.Write = destinationIndex;
return this.Flush(r);
}
internal void Free()
{
this.Reset((long[])null);
this.Window = (byte[])null;
this.Hufts = (int[])null;
}
internal void SetDictionary(byte[] d, int start, int n)
{
Array.Copy((Array)d, start, (Array)this.Window, 0, n);
this.Read = this.Write = n;
}
internal int SyncPoint()
{
return this.Mode == 1 ? 1 : 0;
}
internal int Flush(int r)
{
int destinationIndex1 = this.Codec.NextOut;
int num1 = this.Read;
int num2 = (num1 <= this.Write ? this.Write : this.End) - num1;
if (num2 > this.Codec.AvailableBytesOut)
num2 = this.Codec.AvailableBytesOut;
if (num2 != 0 && r == -5)
r = 0;
this.Codec.AvailableBytesOut -= num2;
this.Codec.TotalBytesOut += (long)num2;
if (this.Checkfn != null)
this.Codec.Adler32 = this.Check = Adler.Adler32(this.Check, this.Window, num1, num2);
Array.Copy((Array)this.Window, num1, (Array)this.Codec.OutputBuffer, destinationIndex1, num2);
int destinationIndex2 = destinationIndex1 + num2;
int num3 = num1 + num2;
if (num3 == this.End)
{
int num4 = 0;
if (this.Write == this.End)
this.Write = 0;
int num5 = this.Write - num4;
if (num5 > this.Codec.AvailableBytesOut)
num5 = this.Codec.AvailableBytesOut;
if (num5 != 0 && r == -5)
r = 0;
this.Codec.AvailableBytesOut -= num5;
this.Codec.TotalBytesOut += (long)num5;
if (this.Checkfn != null)
this.Codec.Adler32 = this.Check = Adler.Adler32(this.Check, this.Window, num4, num5);
Array.Copy((Array)this.Window, num4, (Array)this.Codec.OutputBuffer, destinationIndex2, num5);
destinationIndex2 += num5;
num3 = num4 + num5;
}
this.Codec.NextOut = destinationIndex2;
this.Read = num3;
return r;
}
}
}
| |
using System;
using UnityEngine;
using UnitySampleAssets.CrossPlatformInput.PlatformSpecific;
namespace UnitySampleAssets.CrossPlatformInput
{
public static class CrossPlatformInputManager
{
private static VirtualInput virtualInput;
static CrossPlatformInputManager()
{
virtualInput = new StandaloneInput();
}
public static void RegisterVirtualAxis(VirtualAxis axis)
{
virtualInput.RegisterVirtualAxis(axis);
}
public static void RegisterVirtualButton(VirtualButton button)
{
virtualInput.RegisterVirtualButton(button);
}
public static void UnRegisterVirtualAxis(string _name)
{
if (_name == null)
{
throw new ArgumentNullException("_name");
}
virtualInput.UnRegisterVirtualAxis(_name);
}
public static void UnRegisterVirtualButton(string name)
{
virtualInput.UnRegisterVirtualButton(name);
}
// returns a reference to a named virtual axis if it exists otherwise null
public static VirtualAxis VirtualAxisReference(string name)
{
return virtualInput.VirtualAxisReference(name);
}
// returns the platform appropriate axis for the given name
public static float GetAxis(string name)
{
return GetAxis(name, false);
}
public static float GetAxisRaw(string name)
{
return GetAxis(name, true);
}
// private function handles both types of axis (raw and not raw)
private static float GetAxis(string name, bool raw)
{
return virtualInput.GetAxis(name, raw);
}
// -- Button handling --
public static bool GetButton(string name)
{
return virtualInput.GetButton(name);
}
public static bool GetButtonDown(string name)
{
return virtualInput.GetButtonDown(name);
}
public static bool GetButtonUp(string name)
{
return virtualInput.GetButtonUp(name);
}
public static void SetButtonDown(string name)
{
virtualInput.SetButtonDown(name);
}
public static void SetButtonUp(string name)
{
virtualInput.SetButtonUp(name);
}
public static void SetAxisPositive(string name)
{
virtualInput.SetAxisPositive(name);
}
public static void SetAxisNegative(string name)
{
virtualInput.SetAxisNegative(name);
}
public static void SetAxisZero(string name)
{
virtualInput.SetAxisZero(name);
}
public static void SetAxis(string name, float value)
{
virtualInput.SetAxis(name, value);
}
public static Vector3 mousePosition
{
get { return virtualInput.MousePosition(); }
}
public static void SetVirtualMousePositionX(float f)
{
virtualInput.SetVirtualMousePositionX(f);
}
public static void SetVirtualMousePositionY(float f)
{
virtualInput.SetVirtualMousePositionY(f);
}
public static void SetVirtualMousePositionZ(float f)
{
virtualInput.SetVirtualMousePositionZ(f);
}
// virtual axis and button classes - applies to mobile input
// Can be mapped to touch joysticks, tilt, gyro, etc, depending on desired implementation.
// Could also be implemented by other input devices - kinect, electronic sensors, etc
public class VirtualAxis
{
public string name { get; private set; }
private float m_Value;
public bool matchWithInputManager { get; private set; }
public VirtualAxis(string name) : this(name, true)
{
}
public VirtualAxis(string name, bool matchToInputSettings)
{
this.name = name;
matchWithInputManager = matchToInputSettings;
RegisterVirtualAxis(this);
}
// removes an axes from the cross platform input system
public void Remove()
{
UnRegisterVirtualAxis(name);
}
// a controller gameobject (eg. a virtual thumbstick) should update this class
public void Update(float value)
{
m_Value = value;
}
public float GetValue
{
get { return m_Value; }
}
public float GetValueRaw
{
get { return m_Value; }
}
}
// a controller gameobject (eg. a virtual GUI button) should call the
// 'pressed' function of this class. Other objects can then read the
// Get/Down/Up state of this button.
public class VirtualButton
{
public string name { get; private set; }
private int lastPressedFrame = -5;
private int releasedFrame = -5;
private bool pressed;
public bool matchWithInputManager { get; private set; }
public VirtualButton(string name) : this(name, true)
{
}
public VirtualButton(string name, bool matchToInputSettings)
{
this.name = name;
matchWithInputManager = matchToInputSettings;
// RegisterVirtualButton(this);
}
// A controller gameobject should call this function when the button is pressed down
public void Pressed()
{
if (pressed)
{
return;
}
pressed = true;
lastPressedFrame = Time.frameCount;
}
// A controller gameobject should call this function when the button is released
public void Released()
{
pressed = false;
releasedFrame = Time.frameCount;
}
// the controller gameobject should call Remove when the button is destroyed or disabled
public void Remove()
{
UnRegisterVirtualButton(name);
}
// these are the states of the button which can be read via the cross platform input system
public bool GetButton
{
get { return pressed; }
}
public bool GetButtonDown
{
get
{
return lastPressedFrame - Time.frameCount == 0;
}
}
public bool GetButtonUp
{
get
{
return (releasedFrame == Time.frameCount - 0);
}
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.